Streaming with tools - missing tool id, name

It is in the first chunk. Or chunk [0] if you will.

data = {
    "id": "chatcmpl-xyz",
    "object": "chat.completion.chunk",
    "created": 1730391658,
    "model": "gpt-4o-mini-2024-07-18",
    "system_fingerprint": "fp_0ba0d124f1",
    "choices": [
        {
            "index": 0,
            "delta": {
                "role": "assistant",
                "content": None,
                "tool_calls": [
                    {
                        "index": 0,
                        "id": "call_xZtWnTjjc0jhgtsMMpq6kC0w",
                        "type": "function",
                        "function": {
                            "name": "get_current_weather",
                            "arguments": ""
                        }
                    }
                ],
                "refusal": None
            },
            "logprobs": None,
            "finish_reason": None
        }
    ],
    "usage": None
}
# Extracting the chunk function name
function_name = data["choices"][0]["delta"]["tool_calls"][0]["function"]["name"]
print(function_name)

You might see if you were clever enough in your code before to discard the first chunk because it doesn’t have “content”.

Chunk function call dump for your understanding

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_xZtWnTjjc0jhgtsMMpq6kC0w","type":"function","function":{"name":"get_current_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Miami"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"unit"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"fahren"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"heit"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}

 data: {"id":"chatcmpl-xyz","object":"chat.completion.chunk","created":1730391658,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[],"usage":{"prompt_tokens":143,"completion_tokens":20,"total_tokens":163,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}

I have a class object that collects from the stream, whether repeated or appended, simply or needing more unwrapping. Simple version.

class ResponseState:
    """Holds the state of the streaming response."""

    def __init__(self) -> None:
        self.content: str = ''
        self.function_call: Dict[str, str] = {}
        self.tool_calls: Dict[int, Dict[str, Any]] = {}
        self.finish_reason: Optional[str] = None
        self.usage: Optional[Dict[str, Any]] = None

When [DONE], the “content” has already been printed, and action can be taken on either functions or tools.