Thx @Foxabilo, below is code
import openai
import json
# Example dummy function hard coded to return the same weather
# In production, this could be your backend API or an external API
def get_current_weather(location, unit="fahrenheit"):
"""Get the current weather in a given location"""
weather_info = {
"location": location,
"temperature": "72",
"unit": unit,
"forecast": ["sunny", "windy"],
}
return json.dumps(weather_info)
# Step 1, send model the user query and what functions it has access to
def run_conversation():
openai.api_key = 'sk-............................................'
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
# messages=[{"role": "user", "content": "What's the weather like in Boston?"}],
messages=[{"role": "user", "content": "How's the weather today?"}],
functions=[
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
}
],
function_call="auto",
)
print(response)
message = response["choices"][0]["message"]
# Step 2, check if the model wants to call a function
if message.get("function_call"):
function_name = message["function_call"]["name"]
function_args = json.loads(message["function_call"]["arguments"])
# Step 3, call the function
# Note: the JSON response from the model may not be valid JSON
function_response = get_current_weather(
location=function_args.get("location"),
unit=function_args.get("unit"),
)
# Step 4, send model the info on the function call and function response
second_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=[
# {"role": "user", "content": "What is the weather like in boston?"},
{"role": "user", "content": "How's the weather today?"},
message,
{
"role": "function",
"name": function_name,
"content": function_response,
},
],
)
return second_response
print(run_conversation())
below is result
{
"choices": [
{
"finish_reason": "function_call",
"index": 0,
"message": {
"content": null,
"function_call": {
"arguments": "{\n \"location\": \"San Francisco, CA\"\n}",
"name": "get_current_weather"
},
"role": "assistant"
}
}
],
"created": 1690640046,
"id": "chatcmpl-7hezmtfIaBzB0PKCTo9irwLND2xBW",
"model": "gpt-3.5-turbo-0613",
"object": "chat.completion",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 80,
"total_tokens": 99
}
}
{
"choices": [
{
"finish_reason": "stop",
"index": 0,
"message": {
"content": "The weather in San Francisco, CA is sunny and windy today with a temperature of 72 degrees.",
"role": "assistant"
}
}
],
"created": 1690640047,
"id": "chatcmpl-7heznMlz7rlj4KrFeyd9fZrrNZXpF",
"model": "gpt-3.5-turbo-0613",
"object": "chat.completion",
"usage": {
"completion_tokens": 20,
"prompt_tokens": 72,
"total_tokens": 92
}
}