How to deploy an OpenAI Assistant on Flask?

I would be most grateful for advice how to deploy my OpenAI Assistant as a Flask app. I’ve been able to run the Assistant on my local Python environment and, previously, was able to build Flask versions of Chat.Completions and Dalle. But what worked there does not seem applicable to Assistants.

Below is the code from the Assistants API Quickstart that I’m using as my starting point:

from openai import OpenAI, OpenAIError
import os
from dotenv import load_dotenv
import logging
load_dotenv()
api_key = os.getenv('OPENAI_API_KEY')
client = OpenAI(api_key=api_key)
logging.basicConfig(level=logging.INFO)
  

# STEP 1 - CREATE AN ASSISTANT
assistant = client.beta.assistants.create(
  name="Math Tutor",
  instructions="You are a personal math tutor. Write and run code to answer math questions.",
  tools=[{"type": "code_interpreter"}],
  model="gpt-4o",
)


# STEP 2 - CREATE A THREAD
thread = client.beta.threads.create()


# STEP 3 - ADD A MESSAGE TO A THREAD
message = client.beta.threads.messages.create(
  thread_id=thread.id,
  role="user",
  content="I need to solve the equation `3x + 11 = 14`. Can you help me?"
)


# STEP 4 - CREATE A RUN
from typing_extensions import override
from openai import AssistantEventHandler
 
# First, we create a EventHandler class to define
# how we want to handle the events in the response stream.
 
class EventHandler(AssistantEventHandler):    
  @override
  def on_text_created(self, text) -> None:
    print(f"\nassistant > ", end="", flush=True)
      
  @override
  def on_text_delta(self, delta, snapshot):
    print(delta.value, end="", flush=True)
      
  def on_tool_call_created(self, tool_call):
    print(f"\nassistant > {tool_call.type}\n", flush=True)
  
  def on_tool_call_delta(self, delta, snapshot):
    if delta.type == 'code_interpreter':
      if delta.code_interpreter.input:
        print(delta.code_interpreter.input, end="", flush=True)
      if delta.code_interpreter.outputs:
        print(f"\n\noutput >", flush=True)
        for output in delta.code_interpreter.outputs:
          if output.type == "logs":
            print(f"\n{output.logs}", flush=True)
 
# Then, we use the `stream` SDK helper 
# with the `EventHandler` class to create the Run 
# and stream the response.
 
with client.beta.threads.runs.stream(
  thread_id=thread.id,
  assistant_id=assistant.id,
  instructions="Please address the user as Jane Doe. The user has a premium account.",
  event_handler=EventHandler(),
) as stream:
  stream.until_done()

Solved how to deploy an OpenAI Assistant on Flask