Completion time limit parameter?

You guys ever give any thought to this? For some real-time applications time is sensitive, and quality text generation needs significant time to return. I find myself coding around the uncertainty of waiting for the completion and wishing for a simple parameter that would give the API a max_time to complete. Basically tell the API, ‘after x amount of time, stop and give me what you’ve got so far.’ Possible?

Using streaming would be a way to solve process latency.
Also, check this

Thanks. Yeah, streaming occurred to me, but there is precious little documentation on it. I’m not the most experienced coder, and SSE connections in Node are a bit over my head. It’s funny, I’m trying to get ChatGPT to teach it to me, but with mixed results. I understand a bit better now, but the damn bot keeps giving me mostly helpful code, but with subtle errors that take time to find. It gave incorrect syntax for the Openai JS module connection. Ha. I still haven’t got it working. Any idea where I might find a working code example for using EventSource to retrieve a streaming text completion?

It also took me a while to figure it out aobut how streaming is working in client side javascript. Here is my demo code, walk through submitRequest() to see the detail.

<!DOCTYPE html>
<html>

<head>
    <meta charset="utf-8">
    <title>OpenAI API Codex Demo</title>
    <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
    <script>
        var keypass = null;
        async function submitRequest() {
            resetResponse();

            if (validateInput() == false) {
                return;
            }

            var last_start = 0;
            var key = $("input[name=key]").val();
            var model = $("select[id=OpenAImodel]").val();
            var maxToken = $("input[name=maxToken]").val() ? $("input[name=maxToken]").val() * 1 : 128;
            var request = $("textarea[name=requestPrompt]").val() + ".\n";
            var url = "https://api.openai.com/v1/completions";
            var string = $("#text").val();
            var data = {
                model: model,
                prompt: request,
                temperature: 0,
                max_tokens: maxToken,
                top_p: 1,
                frequency_penalty: 0,
                presence_penalty: 0,
                stream: true,
                stop: ["###"]
            };

            var es = await fetch(url, {
                headers: {
                    "Content-Type": "application/json",
                    "Authorization": "Bearer " + key,
                },
                method: "POST",
                body: JSON.stringify(data),
            }).then((response) => {
                const reader = response.body.getReader();
                const decoder = new TextDecoder();
                return new ReadableStream({
                    start(controller) {
                        return pump();
                        function pump() {
                            return reader.read().then(({ value, done }) => {
                                // When no more data needs to be consumed, close the stream
                                var resposneData = decoder.decode(value);

                                if (resposneData.includes("[DONE]") == true) {
                                    controller.close();
                                    // alert('Finished.');
                                    return;
                                } else {
                                    // console.log(resposneData);
                                    var responseJson = JSON.parse(resposneData.replace("data: ", ""));
                                    console.log(responseJson);
                                    $("#result").text($("#result").text() + responseJson.choices[0].text);
                                }
                                if (done) {
                                    controller.close();
                                    return;
                                }

                                return pump();
                            },
                                e => console.error("The stream became errored and cannot be read from!", e));
                        }
                    },
                    pull(controller) {

                    },
                    cancel() {
                        ReadableStream.cancel();
                    }
                })
            });
        };

        function resetResponse() {
            $("#result").text("");
        };

        function validateInput() {
            var model = $("select[id=OpenAImodel]").val();
            var request = $("textarea[name=requestPrompt]").val();
            if (!model || !request) {
                alert("Missing model or request!");
                return false;
            }
            return true;
        }

    </script>
</head>

<body>
    <input type="text" name="key" placeholder="API Key" value="" required size="60"><br>
    Max token: <input type="number" name="maxToken" placeholder="128" value="128" required><br>
    AI model: <select id="OpenAImodel">
        <option value="">Select a model</option>
        <option value="code-davinci-002">Code Davinchi 002</option>
        <option value="text-davinci-002">Text Davinchi 002</option>
		<option value="text-davinci-003">Text Davinchi 003</option>
    </select><br>
    <textarea id="text" rows="5" cols="100" name="requestPrompt"
        placeholder="/* create a JavaScript dictionary of 5 countries and capitals: */"></textarea>
    <br>
    <button type="button" onclick="submitRequest()">Submit</button>&nbsp;&nbsp;
	<button type="button" onclick="resetResponse()">Clear</button>
    <br>
    <h2>Result</h2>
    <textarea id="result" rows="15" cols="100"></textarea>
</body>

</html>

Thanks. I’m trying to do it on the server in Node, and I’ve been using the OpenAI npm package library. I don’t know so much about the inner workings of Fetch, but I might have to study up on it if I can’t make it work with require(“openai”).