I’m trying to make a file_search assistant and use chat completions to allow interaction with the assistant. I have the following code, but am receiving an error I don’t quite understand.
from openai import OpenAI
client = OpenAI(api_key='xxx')
assistant = client.beta.assistants.create(
name = "CDA File Reader",
instructions = "Read the instructions at the top of the file. Be cordial and conversational.",
model = "gpt-4o",
tools = [{"type": "file_search"}],
)
vector_store = client.beta.vector_stores.create(name = "Training Documents")
file_paths = ["/Users/kishanpatel/Library/CloudStorage/OneDrive-CHAMPSSoftwareInc/CDA/test.txt"]
file_streams = [open(path, "rb") for path in file_paths]
file_batch = client.beta.vector_stores.file_batches.upload_and_poll(
vector_store_id = vector_store.id, files = file_streams
)
print(file_batch.status)
print(file_batch.file_counts)
assistant = client.beta.assistants.update(
assistant_id = assistant.id,
tool_resources = {"file_search": {"vector_store_ids": [vector_store.id]}}
)
def chat_with_assistant(user_input):
chat_message = [
{"role": "system", "content": "You are a data assistant. Read the information at the top of the provided information, and respond accordingly."},
{"role": "user", "content": user_input}
]
response = client.chat.completions.create(
model="gpt-4",
messages=chat_message,
tools=[{"type": "file_search", "resource_id": vector_store.id}]
)
assistant_reply = response.choices[0].message.content
return assistant_reply
user_input = input("Input: ")
assistant_response = chat_with_assistant(user_input)
print("Output: " + assistant_response)
The error that I'm getting is this:
Traceback (most recent call last):
File "/Users/kishanpatel/Library/CloudStorage/OneDrive-CHAMPSSoftwareInc/CDA/testing.py", line 6, in <module>
assistant = client.beta.assistants.create(
name="CDA File Reader",
...<7 lines>...
}
)
File "/Library/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages/openai/resources/beta/assistants.py", line 146, in create
return self._post(
~~~~~~~~~~^
"/assistants",
^^^^^^^^^^^^^^
...<18 lines>...
cast_to=Assistant,
^^^^^^^^^^^^^^^^^^
)
^
File "/Library/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages/openai/_base_client.py", line 1278, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Library/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages/openai/_base_client.py", line 955, in request
return self._request(
~~~~~~~~~~~~~^
cast_to=cast_to,
^^^^^^^^^^^^^^^^
...<3 lines>...
retries_taken=retries_taken,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Library/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages/openai/_base_client.py", line 1059, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': "Missing required parameter: 'tools[0].function'.", 'type': 'invalid_request_error', 'param': 'tools[0].function', 'code': 'missing_required_parameter'}}
How can I get around this? Any help is appreciated. Thank you!