Skip to content

Missing fields when using streaming API #54

@zoravur

Description

@zoravur

Tool calls don't contain "role": "assistant" in the completed ChatCompletionAccumulator.Choices[0].message. It does contain it for the non-streaming API responses though. Here is a direct comparison:

> What's the weather in NYC?

Tool call stream finished: 0 get_weather {"location":"New York City"}

streaming:  {
  "id": "chatcmpl-A8iJ5dT3WSaOoJJ1KTPGAyv94OgWY",
  "choices": [
    {
      "finish_reason": "tool_calls",
      "index": 0,
      "logprobs": {
        "content": null,
        "refusal": null
      },
      "message": {
        "role": "",
        "tool_calls": [
          {
            "function": {
              "arguments": "{\"location\":\"New York City\"}",
              "name": "get_weather"
            },
            "id": "",
            "type": ""
          }
        ]
      }
    }
  ],
  "created": 1726640303,
  "model": "gpt-4o-mini-2024-07-18",
  "object": "chat.completion.chunk",
  "service_tier": "",
  "system_fingerprint": "fp_483d39d857",
  "usage": {
    "completion_tokens": 0,
    "prompt_tokens": 0,
    "total_tokens": 0,
    "completion_tokens_details": {
      "reasoning_tokens": 0
    }
  }
}
nonstreaming:  {
  "id": "chatcmpl-A8iJ56XcjwlGPTtCZUkTIWpk2Wf89",
  "object": "chat.completion",
  "created": 1726640303,
  "model": "gpt-4o-mini-2024-07-18",
  "choices": [
    {
      "index": 0,
      "message": {
        "role": "assistant",
        "content": null,
        "tool_calls": [
          {
            "id": "call_NK8PHa5XX4AVPYxdXBpHYvkm",
            "type": "function",
            "function": {
              "name": "get_weather",
              "arguments": "{\"location\":\"New York City\"}"
            }
          }
        ],
        "refusal": null
      },
      "logprobs": null,
      "finish_reason": "tool_calls"
    }
  ],
  "usage": {
    "prompt_tokens": 50,
    "completion_tokens": 16,
    "total_tokens": 66,
    "completion_tokens_details": {
      "reasoning_tokens": 0
    }
  },
  "system_fingerprint": "fp_483d39d857"
}

Here is the code to generate the above output:

package main

import (
	"context"
	"encoding/json"
	"github.com/openai/openai-go"
)

// Mock function to simulate weather data retrieval
func getWeather(location string) string {
	// In a real implementation, this function would call a weather API
	return "Sunny, 25°C"
}

func main() {
	client := openai.NewClient()
	ctx := context.Background()

	question := "What's the weather in NYC?"
	msgs := []openai.ChatCompletionMessageParamUnion{
		openai.UserMessage(question),
	}

	print("> ")
	println(question)
	println()

	params := openai.ChatCompletionNewParams{
		Messages: openai.F(msgs),
		Seed:     openai.Int(0),
		Model:    openai.F(openai.ChatModelGPT4oMini),
		Tools: openai.F([]openai.ChatCompletionToolParam{
			{
				Type: openai.F(openai.ChatCompletionToolTypeFunction),
				Function: openai.F(openai.FunctionDefinitionParam{
					Name:        openai.String("get_weather"),
					Description: openai.String("Get weather at the given location"),
					Parameters: openai.F(openai.FunctionParameters{
						"type": "object",
						"properties": map[string]interface{}{
							"location": map[string]string{
								"type": "string",
							},
						},
						"required": []string{"location"},
					}),
				}),
			},
		}),
	}

	stream := client.Chat.Completions.NewStreaming(ctx, params)

	acc := openai.ChatCompletionAccumulator{}

	for stream.Next() {
		chunk := stream.Current()
		acc.AddChunk(chunk)

		// When this fires, the current chunk value will not contain content data
		if content, ok := acc.JustFinishedContent(); ok {
			println("Content stream finished:", content)
			println()
		}

		if tool, ok := acc.JustFinishedToolCall(); ok {
			println("Tool call stream finished:", tool.Index, tool.Name, tool.Arguments)
			println()
		}

		if refusal, ok := acc.JustFinishedRefusal(); ok {
			println("Refusal stream finished:", refusal)
			println()
		}
	}

	if err := stream.Err(); err != nil {
		panic(err)
	}

	if data, err := json.MarshalIndent(acc, "", "    "); err != nil {
		panic(err)
	} else {
		println("streaming: ", string(data))
	}

	output, err := client.Chat.Completions.New(ctx, params)
	if err != nil {
		panic(err)
	}
	println("nonstreaming: ", output.JSON.RawJSON())
}
  • This seems to happen for non-toolcalls as well.
  • The id and type fields of the toolcalll are also missing. id is probably most critical because without it you can't respond to the model with a toolcall.

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions