Hi openai team,
i have noticed that the function calling resets the conversational state using the responses api. Is there anyway to maintain the conversational context?
I tried using the previous_response_id param but it gave a Bad request error.
Sample data can be found below

Thanks and regards,
Shreyas
Could you share an example of your code?
I just tried and it seems to work fine:
import requests
from openai import OpenAI
import json
client = OpenAI()
select_model="gpt-4.1-mini"
def get_weather(latitude, longitude):
response = requests.get(f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,wind_speed_10m&hourly=temperature_2m,relative_humidity_2m,wind_speed_10m")
data = response.json()
return data['current']['temperature_2m']
tools = [{
"type": "function",
"name": "get_weather",
"description": "Get current temperature for provided coordinates in celsius.",
"parameters": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"}
},
"required": ["latitude", "longitude"],
"additionalProperties": False
},
"strict": True
}]
input_messages = [{"role": "user", "content": "What's the weather like in Paris today?"}]
response = client.responses.create(
model=select_model,
input=input_messages,
tools=tools,
)
print("id: ",response.id, "output:", response.output)
tool_call = response.output[0]
args = json.loads(tool_call.arguments)
result = get_weather(args["latitude"], args["longitude"])
print("get_weather returned: ", result)
input_messages = [{
"type": "function_call_output",
"call_id": tool_call.call_id,
"output": str(result)
}]
response_2 = client.responses.create(
model=select_model,
previous_response_id=response.id,
input=input_messages,
#tools=tools,
)
print(response_2.output_text)
Thanks! Was able to get it to work after some troubleshooting.
1 Like