Function Calling Help - Model Doesn't Seem To Accept Function Prompt?

I decided to make the mega-demo. Going beyond this I might as well just write a chatbot with classes for handling simulated functions and showing rewriting them to a real API…


# imports and set up the OpenAI client object with a shorter timeout
from openai import OpenAI
import json
client = OpenAI(timeout=30)
# Here we'll make a tool specification, more flexible by adding one at a time
toolspec=[]
toolspec.extend([{
        "type": "function",
        "function": {
            "name": "get_weather_forecast",
            "description": "Get weather forecast. AI can make multiple tool calls in one response.",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city and state.",
                    },
                    "format": {
                        "type": "string",
                        "enum": ["celsius", "fahrenheit"],
                        "description": "The temperature unit to use.",
                    },
                    "time_period": {
                        "type": "number",
                        "description": "length in days or portions of day",
                    }
                },
                "required": ["location", "format", "num_days"]
            },
        }
    }]
)
# Then we'll form the basis of our call to API, with the user input
# Note I ask the preview model for two answers at once
params = {
  "model": "gpt-3.5-turbo-1106",
  "tools": toolspec,
  "messages": [
    {
        "role": "system", "content": "You are a helpful AI assistant."
    },
    {
        "role": "user", "content": ("How hot will it be today in Seattle? And in Miami?"
                                    " Use multi-tool to get both at the same time")
        
    },
    ],
}
# Now after we see that the AI emits functions, we can add the multi-tool function return
# Rename "xparams" to "params" to add assistant/tools
# Then run again to get your AI answer

xparams = {"messages": []}  # dump disabled messages here

# Show the AI what it previously emitted to us
xparams['messages'].extend(
[
  {
    "role": "assistant",
    "content": "Let me look up the weather in those cities for you...",
    "tool_calls": [
         {
          "id": "call_rygjilssMBx8JQGUgEo7QqeY",
          "type": "function",
          "function": {
            "name": "get_weather_forecast",
            "arguments": "{\"location\": \"Seattle\", \"format\": \"fahrenheit\", \"time_period\": 1}"
          }
        },
        {
          "id": "call_pI6vxWtSMU5puVBHNm5nJhw3",
          "type": "function",
          "function": {
            "name": "get_weather_forecast",
            "arguments": "{\"location\": \"Miami\", \"format\": \"fahrenheit\", \"time_period\": 1}"
        },
    }
    ]
  }
]
)
# Return values: what the tool_call with multiple functions gives
# rename xparams to params here also for the 2nd run
xparams['messages'].extend(
[
  {
    "role": "tool", "tool_call_id": "call_rygjilssMBx8JQGUgEo7QqeY", "content":
        "Seattle 2022-12-15 forecast: high 62, low 42, partly cloudy\n"  
  },
  {
    "role": "tool", "tool_call_id": "call_pI6vxWtSMU5puVBHNm5nJhw3", "content":
        "Miami 2022-12-15 forecast: high 77, low 66, sunny\n"  
  }
]
)
# Make API call to OpenAI
c = None
try:
    c = client.chat.completions.with_raw_response.create(**params)
except Exception as e:
    print(f"Error: {e}")

# If we got the response, load a whole bunch of demo variables
# This is different because of the 'with raw response' for obtaining headers
if c:
    headers_dict = c.headers.items().mapping.copy()
    for key, value in headers_dict.items():
        variable_name = f'headers_{key.replace("-", "_")}'
        globals()[variable_name] = value
    remains = headers_x_ratelimit_remaining_tokens  # show we set variables
    
    api_return_dict = json.loads(c.content.decode())
    api_finish_str = api_return_dict.get('choices')[0].get('finish_reason')
    usage_dict = api_return_dict.get('usage')
    api_message_dict = api_return_dict.get('choices')[0].get('message')
    api_message_str = api_return_dict.get('choices')[0].get('message').get('content')
    api_tools_list = api_return_dict.get('choices')[0].get('message').get('tool_calls')
    # print any response always
    if api_message_str:
        print(api_message_str)

    # print all tool functions pretty
    if api_tools_list:
        for tool_item in api_tools_list:
            print(json.dumps(tool_item, indent=2))

"""
AI says to us:
Here are the weather forecasts for today:
- Seattle: High 62°C, Low 42°C, Partly Cloudy
- Miami: High 77°C, Low 66°C, Sunny
"""
7 Likes