An error occurs when creating something from beta

I originally planned to create an assistant, but when I create an ‘assistant’, without even specifying anything extra:

from openai import OpenAI
from config import configs

client = OpenAI(api_key=configs.chatgpt_api_key, base_url=configs.chatgpt_base_url)

assistant = client.beta.assistants.create(
 model="gpt-4o",
)

Gives the following error:

---------------------------------------------------------------------------
BadRequestError                           Traceback (most recent call last)
Cell In[14], line 6
      2 from config import configs
      4 client = OpenAI(api_key=configs.chatgpt_api_key, base_url=configs.chatgpt_base_url)
----> 6 assistant = client.beta.assistants.create(
      7   model="gpt-4o",
      8 )
     10 # thread = client.beta.threads.create()
     11 
     12 # message = client.beta.threads.messages.create(
   (...)
     29 # else:
     30 #   print(run.status)

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\resources\beta\assistants.py:156, in Assistants.create(self, model, description, instructions, metadata, name, response_format, temperature, tool_resources, tools, top_p, extra_headers, extra_query, extra_body, timeout)
     90 """
     91 Create an assistant with a model and instructions.
     92 
   (...)
    153   timeout: Override the client-level default timeout for this request, in seconds
    154 """
    155 extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
--> 156 return self._post(
    157     "/assistants",
    158     body=maybe_transform(
    159         {
    160             "model": model,
    161             "description": description,
    162             "instructions": instructions,
    163             "metadata": metadata,
    164             "name": name,
    165             "response_format": response_format,
    166             "temperature": temperature,
    167             "tool_resources": tool_resources,
    168             "tools": tools,
    169             "top_p": top_p,
    170         },
    171         assistant_create_params.AssistantCreateParams,
    172     ),
    173     options=make_request_options(
    174         extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
    175     ),
    176     cast_to=Assistant,
    177 )

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:1240, in SyncAPIClient.post(self, path, cast_to, body, options, files, stream, stream_cls)
   1226 def post(
   1227     self,
   1228     path: str,
   (...)
   1235     stream_cls: type[_StreamT] | None = None,
   1236 ) -> ResponseT | _StreamT:
   1237     opts = FinalRequestOptions.construct(
   1238         method="post", url=path, json_data=body, files=to_httpx_files(files), **options
   1239     )
-> 1240     return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:921, in SyncAPIClient.request(self, cast_to, options, remaining_retries, stream, stream_cls)
    912 def request(
    913     self,
    914     cast_to: Type[ResponseT],
   (...)
    919     stream_cls: type[_StreamT] | None = None,
    920 ) -> ResponseT | _StreamT:
--> 921     return self._request(
    922         cast_to=cast_to,
    923         options=options,
    924         stream=stream,
    925         stream_cls=stream_cls,
    926         remaining_retries=remaining_retries,
    927     )

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:1020, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
   1017         err.response.read()
   1019     log.debug("Re-raising status error")
-> 1020     raise self._make_status_error_from_response(err.response) from None
   1022 return self._process_response(
   1023     cast_to=cast_to,
   1024     options=options,
   (...)
   1027     stream_cls=stream_cls,
   1028 )

BadRequestError: Error code: 400 - {'error': {'message': "Missing required parameter: 'messages'.", 'type': 'invalid_request_error', 'param': 'messages', 'code': 'missing_required_parameter'}}

or here is the following example of creating a thread:

thread = client.beta.threads.create()

error:

---------------------------------------------------------------------------
BadRequestError                           Traceback (most recent call last)
Cell In[15], line 10
      4 client = OpenAI(api_key=configs.chatgpt_api_key, base_url=configs.chatgpt_base_url)
      6 # assistant = client.beta.assistants.create(
      7 #   model="gpt-4o",
      8 # )
---> 10 thread = client.beta.threads.create()
     12 # message = client.beta.threads.messages.create(
     13 #   thread_id=thread.id,
     14 #   role="user",
   (...)
     29 # else:
     30 #   print(run.status)

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\resources\beta\threads\threads.py:121, in Threads.create(self, messages, metadata, tool_resources, extra_headers, extra_query, extra_body, timeout)
     95 """
     96 Create a thread.
     97 
   (...)
    118   timeout: Override the client-level default timeout for this request, in seconds
    119 """
    120 extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
--> 121 return self._post(
    122     "/threads",
    123     body=maybe_transform(
    124         {
    125             "messages": messages,
    126             "metadata": metadata,
    127             "tool_resources": tool_resources,
    128         },
    129         thread_create_params.ThreadCreateParams,
    130     ),
    131     options=make_request_options(
    132         extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
    133     ),
    134     cast_to=Thread,
    135 )

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:1240, in SyncAPIClient.post(self, path, cast_to, body, options, files, stream, stream_cls)
   1226 def post(
   1227     self,
   1228     path: str,
   (...)
   1235     stream_cls: type[_StreamT] | None = None,
   1236 ) -> ResponseT | _StreamT:
   1237     opts = FinalRequestOptions.construct(
   1238         method="post", url=path, json_data=body, files=to_httpx_files(files), **options
   1239     )
-> 1240     return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:921, in SyncAPIClient.request(self, cast_to, options, remaining_retries, stream, stream_cls)
    912 def request(
    913     self,
    914     cast_to: Type[ResponseT],
   (...)
    919     stream_cls: type[_StreamT] | None = None,
    920 ) -> ResponseT | _StreamT:
--> 921     return self._request(
    922         cast_to=cast_to,
    923         options=options,
    924         stream=stream,
    925         stream_cls=stream_cls,
    926         remaining_retries=remaining_retries,
    927     )

File d:\Работа\language_models\myvenv\Lib\site-packages\openai\_base_client.py:1020, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
   1017         err.response.read()
   1019     log.debug("Re-raising status error")
-> 1020     raise self._make_status_error_from_response(err.response) from None
   1022 return self._process_response(
   1023     cast_to=cast_to,
   1024     options=options,
   (...)
   1027     stream_cls=stream_cls,
   1028 )

BadRequestError: Error code: 400 - {'error': {'message': 'you must provide a model parameter', 'type': 'invalid_request_error', 'param': None, 'code': None}}

What might be the problem? The keys are working, checked on ‘completions’, everything works there.