router.post("/chat", async (req, res) => {
// console.log("body", req.body);
try {
const response = await openai.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ role: "system", content: req.body.systemMessage || "" },
{ role: "system", content: req.body.instructionPrompt || ''},
...req.body.prevChat,
{ role: "user", content: req.body.message },
],
max_tokens: 100,
temperature: 0,
});
res.json({
data: response.data,
});
} catch (error) {
console.error("Error:", error.response.data.error.message);
res.status(500).send("An error occurred");
}
});
The problem is when i directly ask a question that is available as a system message it cant find the answer, but when i tell him that the answer is present in the system message, it can answer. This issue didn’t occur earlier when i was playing around with the api. Recently facing this issue.