Hi everyone! I’m struggling to return the streaming response of an assistant function call in nextjs 14. I’m able to execute the function, but it seems that the tool output its not correctly submitted. Here is my route handler to better understand:
import { NextRequest } from "next/server";
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || "",
});
const assistant_id = process.env.OPENAI_ASSISTANT_ID ?? "";
export async function POST(request: NextRequest) {
// parse message from post
const { threadId, content, restaurantId } = await request.json();
// Create a thread if needed
const thread_id = threadId ?? (await openai.beta.threads.create({})).id;
// add new message to thread
await openai.beta.threads.messages.create(thread_id, {
role: "user",
content,
});
// create the run stream
let stream = openai.beta.threads.runs
.stream(thread_id, { assistant_id })
// subscribe to every event
.on("event", async ({ event, data }) => {
if (
event === "thread.run.requires_action" &&
data.status === "requires_action" &&
data.required_action?.type === "submit_tool_outputs"
) {
const tool_outputs = await Promise.all(
data.required_action.submit_tool_outputs.tool_calls.map(async (toolCall) => {
const parameters = JSON.parse(toolCall.function.arguments);
switch (toolCall.function.name) {
case "myFunction": {
const "myParameter" = parameters.myParameter
...
return {
tool_call_id: toolCall.id,
output: `my-awesome-output`,
};
}
default: {
return {
tool_call_id: toolCall.id,
output: `unknown function: ${toolCall.function.name}`,
};
}
}
})
);
await openai.beta.threads.runs.submitToolOutputs(thread_id, data.id, {
tool_outputs,
});
}
});
return new Response(stream.toReadableStream());
}
And here is my client side code:
const [isLoading, setIsLoading] = useState(false);
const [threadId, setThreadId] = useState<string | null>(localStorage.getItem("chatbot_thread_id"));
const [prompt, setPrompt] = useState("");
const [messages, setMessages] = useState<Message[]>([]);
const messageId = useRef(0);
const [error, setError] = useState<string | null>(null);
async function handleSubmit(e: React.FormEvent<HTMLFormElement>) {
e.preventDefault();
setIsLoading(true);
// add user message to list of messages
messageId.current++;
setMessages([
...messages,
{
id: messageId.current.toString(),
role: "user",
content: prompt,
created_at: Date.now(),
},
]);
setPrompt("");
const response = await fetch("/api/openai-assistant", {
method: "POST",
body: JSON.stringify({
threadId: threadId,
content: prompt
}),
});
if (!response.body) {
return;
}
const runner = AssistantStream.fromReadableStream(response.body);
runner.on("messageCreated", (message) => {
if (!threadId) setThreadId(message.thread_id);
localStorage.setItem("chatbot_thread_id", message.thread_id);
});
runner.on("messageDone", (message) => {
// get final message content
const { id, role, created_at } = message;
const finalContent = message.content[0].type == "text" ? message.content[0].text.value : "";
// add assistant message to list of messages
messageId.current++;
setMessages((prevMessages) => [...prevMessages, { id, role, created_at, content: finalContent }]);
setIsLoading(false);
});
runner.on("error", (error) => {
console.error(error);
});
}
I know that for Nextjs there is Vercel AI SDK that simplify this process with “AssistantResponse” and “useAssistant” hook, but i want to use only OpenAI client. Can somebody help me?