I’ve been struggling with this for over a week, and I’m not sure where my problem is any more Essentially i have a rules engine where each rule is likely to depend on the results of previous rules, so they must be executed synchronously, in sequence. If I want to incorporate output from chatGPT, it seems I need to make an async call from the main synchronous loop, but this doesn’t work … the code simply exits without getting the result:
Response
{
"statusCode": 200,
"headers": {
"Content-type": "application/json"
}
}
Function Logs
START RequestId: 4e7ae355-8ed8-43ab-b4c4-0f0ec48a8e57 Version: $LATEST
2023-04-26T00:17:45.499Z 4e7ae355-8ed8-43ab-b4c4-0f0ec48a8e57 INFO getResponse('what did the fox say?')
2023-04-26T00:17:45.794Z 4e7ae355-8ed8-43ab-b4c4-0f0ec48a8e57 INFO make request
2023-04-26T00:17:45.833Z 4e7ae355-8ed8-43ab-b4c4-0f0ec48a8e57 INFO output: undefined
END RequestId: 4e7ae355-8ed8-43ab-b4c4-0f0ec48a8e57
I’ve appended a stripped down example below that doesn’t work in the same way as my main code: any help or insights much appreciated (or ideas on how to avoid the async trap),
David
const { Configuration, OpenAIApi } = require("openai");
exports.handler = async function(event, context, callback) {
var output;
try {
// call synchronous function with the input
output = mycaller(event.prompt);
console.log('output: ' + output);
}
catch (e){
output = e.message;
}
let response = {
statusCode: 200,
headers: { "Content-type" : "application/json" },
body: JSON.stringify(output)
};
return response;
};
// sync function that calls an async one
function mycaller(prompt)
{
myFunction(prompt).then(res => res.data.choices[0].text);
}
// async function that waits for chatGPT response
async function myFunction(prompt)
{
return await getResponse(prompt);
}
// async function that makes chatGPT call
async function getResponse(prompt)
{
console.log("getResponse('" + prompt + "')");
const configuration = new Configuration({
apiKey: "my-api-key-here",
});
const openai = new OpenAIApi(configuration);
return openai.createCompletion({
model: "text-davinci-003",
prompt: prompt,
temperature: 0.7,
max_tokens: 500,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
});
}