Running a fastapi application built in python using OpenAI assistant

import os
from fastapi import FastAPI
from fastapi import FastAPI, HTTPException, Request
from pydantic import BaseModel

import uvicorn
from fastapi.responses import RedirectResponse
from openai import OpenAI
from fastapi.responses import JSONResponse
import json
import asyncio

#loading the environment variables
from dotenv import load_dotenv
load_dotenv()

os.environ[‘OPENAI_API_KEY’]=os.getenv(“OPENAI_API_KEY”)

In[14]:

Create a Pydantic model to validate incoming requests

class ChatRequest(BaseModel):
question:str

app=FastAPI()

@app.get(“/”)
def read_root():
return {“Hello”: “World”}

def show_json(obj):
print(json.dumps(json.loads(obj.model_dump_json()), indent=4))

client = OpenAI(api_key=os.environ[‘OPENAI_API_KEY’])

assistant = client.beta.assistants.create(
name=“AI Tutor”,

    tools=[{"type": "code_interpreter"}],
    model="gpt-4o",

)
show_json(assistant)

In[15]:

thread = client.beta.threads.create()
show_json(thread)

In[16]:

@app.post(“/ask”)
async def ask_question(request: ChatRequest):
try:
# Creating a message in the thread with the user question
message = client.beta.threads.messages.create(
thread_id=“thread_wIyeeSPGTjyg6cmbhGDyldKp”,
role=“user”,
content={“type”: “text”, “text”: request.question}
)

    # Running the assistant to get the response
    run = client.beta.threads.runs.create(

           
       )
    
    # Assuming show_json(run) processes the run object correctly
    show_json(run)

    response = run  # Adjust this as needed based on your processing logic
    return JSONResponse(status_code=200, content=response.choices[0].to_dict())

except Exception as e:
    raise HTTPException(status_code=500, detail=str(e))

In[17]:

if name == “main”:
import nest_asyncio
nest_asyncio.apply()
uvicorn.run(app, host=“0.0.0.0”, port=800) It is showing this error:“INFO: 127.0.0.1:63130 - “POST /ask HTTP/1.1” 500 Internal Server Error” .I tried a lot but couldn’t resolve this error.Please help me to resolve this issue.