I decided manually manage conversation state because previous_response_id
was somewhat confusing to implement. This solution is very basic as it is for a basic NextJS conversation web page.
ā¦/page.tsx
// on prompt submit
async function onSubmit(values: UserFormData) {
const submitCount = form.formState.submitCount;
const userMessage: ChatCompletionMessageParam = {
role: "user",
content: values.prompt,
};
// append user message to allMessages
let allMessages: ChatCompletionMessageParam[] = [...messages, userMessage];
setMessages(allMessages);
let response = null;
let aiResponse: ChatCompletionMessageParam;
// logic to pass in entire conversation state to OpenAI API
if (submitCount === 0) {
try {
// first request for a given conversation
response = await axios.post("/api/conversation", [userMessage]);
// records the ai response to later append to allMessages
aiResponse = {
role: "assistant",
content: response.data,
};
// append aiResponse to allMessages
allMessages = [...allMessages, aiResponse];
// update state and state variable messages
setMessages(allMessages);
} catch (error) {
console.error("Error posting to /api/conversation", error);
}
} else {
// since not first request, starts another request with appended ai response to allMessages
response = await axios.post("/api/conversation", allMessages);
aiResponse = {
role: "assistant",
content: response.data,
};
// a cycle to keep adding ai response after user message up top
allMessages = [...allMessages, aiResponse];
setMessages(allMessages);
}
// reset form state
form.reset();
}
route.ts
import OpenAI from "openai";
import { NextRequest, NextResponse } from "next/server";
export async function POST(req: NextRequest) {
const client = new OpenAI();
const messages = await req.json();
console.log("messages:", messages);
const response = await client.responses.create({
model: "gpt-4o-mini",
instructions: "You are a conversationalist that is fun to talk to.",
input: [...messages],
});
return new NextResponse(response.output_text, { status: 200 });
}