Looking for Help With API Access for School Project

Hey. I’m trying to create a really simple program that uses the gpt-3.5-turbo API and three.js to be able to type in a shape, like “cube,” in a textbox on a browser and have the API send back code to render it.

I’m still getting into coding so excuse my inexperience, but right now, my project is set up like this:

project root directory
public
index.html
scene.js
app.js

My code for index.html is:

Your Title
Generate Model
<script src="/scene.js"></script>

For scene.js it’s:

async function generateModel() {
const idea = document.getElementById(‘modelDescription’).value;
try {
const response = await fetch(‘/generateThreejsCode’, {

        method: 'POST',
        headers: {
            'Content-Type': 'application/json'
        },
        body: JSON.stringify({ prompt: idea })
    });
    const data = await response.json();
    if (data.code) {
        eval(data.code);
    } else {
        console.error('Error fetching code from OpenAI:', data.error);
    }
} catch (error) {
    console.error("Error fetching code from OpenAI: ", error.message);
    console.error("Error details:", error.response ? error.response.data : null);
}

}

and for app.js it’s:

require(‘dotenv’).config();
const express = require(‘express’);
const bodyParser = require(‘body-parser’);
const { OpenAIApiFactory } = require(‘openai’);

const app = express();
app.use(bodyParser.json());
app.use(express.static(‘public’));

app.post(‘/generateThreejsCode’, async (req, res) => {
const userDescription = req.body.prompt;

const openai = OpenAIApiFactory({ key: process.env.OPEN_API_KEY });

try {
    const completionResponse = await openai.createCompletion({
        model: "gpt-3.5-turbo",
        prompt: `You are a three.js code writing AI. Provide a Three.js code snippet for ${userDescription}`,
        max_tokens: 4000
    });

    const generatedCode = completionResponse.choices && completionResponse.choices[0]?.text 
                          ? completionResponse.choices[0].text 
                          : "Sorry, couldn't generate code.";

    res.json({ generatedCode });
} catch (error) {
    console.error("Error calling OpenAI:", error);
    res.status(500).json({ error: "Failed to generate code." });
}

});
const PORT = 3000;
app.listen(PORT, () => {
console.log(Server is running on localhost:${PORT});
});

lastly, I have a file just called “.env” with OPENAI_API_KEY = 123123123 (without quotes around it)

This is what it looks like on the browser:

not sure if that will load in correctly lol…

I’m new to .env files so maybe that’s the culprit?

My terminal error throws this:

andrewd@Andrews-MacBook-Air-6 ~ % cd documents/project-root-directory/
andrewd@Andrews-MacBook-Air-6 project-root-directory % node app.js
Server is running on localhost:3000
Error calling OpenAI: Error: Request failed with status code 401
at createError (/Users/andrewd/node_modules/openai/node_modules/axios/lib/core/createError.js:16:15)
at settle (/Users/andrewd/node_modules/openai/node_modules/axios/lib/core/settle.js:17:12)
at IncomingMessage.handleStreamEnd (/Users/andrewd/node_modules/openai/node_modules/axios/lib/adapters/http.js:322:11)
at IncomingMessage.emit (node:events:526:35)
at endReadableNT (node:internal/streams/readable:1359:12)
at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
config: {
transitional: {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
},
adapter: [Function: httpAdapter],
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
xsrfCookieName: ‘XSRF-TOKEN’,
xsrfHeaderName: ‘X-XSRF-TOKEN’,
maxContentLength: -1,
maxBodyLength: -1,
validateStatus: [Function: validateStatus],
headers: {
Accept: ‘application/json, text/plain, /’,
‘Content-Type’: ‘application/json’,
‘User-Agent’: ‘axios/0.26.1’,
‘Content-Length’: 131
},
method: ‘post’,
data: ‘{“model”:“gpt-3.5-turbo”,“prompt”:“You are a three.js code writing AI. Provide a Three.js code snippet for cube”,“max_tokens”:4000}’,
url: ‘apiopenaicom/v1/completions’. [can’t include links here]
},
request: <ref *1> ClientRequest {
_events: [Object: null prototype] {
abort: [Function (anonymous)],
aborted: [Function (anonymous)],
connect: [Function (anonymous)],
error: [Function (anonymous)],
socket: [Function (anonymous)],
timeout: [Function (anonymous)],
finish: [Function: requestOnFinish]
},
_eventsCount: 7,
_maxListeners: undefined,
outputData: ,
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
maxRequestsOnConnectionReached: false,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
strictContentLength: false,
_contentLength: 131,
_hasBody: true,
_trailer: ‘’,
finished: true,
_headerSent: true,
_closed: false,
socket: TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
secureConnecting: false,
_SNICallback: null,
servername: ‘api.openaicom’,
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object: null prototype],
_eventsCount: 10,
connecting: false,
_hadError: false,
_parent: null,
_host: ‘api.openaicom’,
_closeAfterHandlingError: false,
_readableState: [ReadableState],
_maxListeners: undefined,
_writableState: [WritableState],
allowHalfOpen: false,
_sockname: null,
_pendingData: null,
_pendingEncoding: ‘’,
server: undefined,
_server: null,
ssl: [TLSWrap],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular *1],
[Symbol(res)]: [TLSWrap],
[Symbol(verified)]: true,
[Symbol(pendingSession)]: null,
[Symbol(async_id_symbol)]: 41,
[Symbol(kHandle)]: [TLSWrap],
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: null,
[Symbol(kBuffer)]: null,
[Symbol(kBufferCb)]: null,
[Symbol(kBufferGen)]: null,
[Symbol(kCapture)]: false,
[Symbol(kSetNoDelay)]: false,
[Symbol(kSetKeepAlive)]: true,
[Symbol(kSetKeepAliveInitialDelay)]: 60,
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0,
[Symbol(connect-options)]: [Object]
},
_header: ‘POST /v1/completions HTTP/1.1\r\n’ +
‘Accept: application/json, text/plain, /\r\n’ +
‘Content-Type: application/json\r\n’ +
‘User-Agent: axios/0.26.1\r\n’ +
‘Content-Length: 131\r\n’ +
‘Host: api.openaicom\r\n’ +
‘Connection: close\r\n’ +
‘\r\n’,
_keepAliveTimeout: 0,
_onPendingData: [Function: nop],
agent: Agent {
_events: [Object: null prototype],
_eventsCount: 2,
_maxListeners: undefined,
defaultPort: 443,
protocol: ‘https:’,
options: [Object: null prototype],
requests: [Object: null prototype] {},
sockets: [Object: null prototype],
freeSockets: [Object: null prototype] {},
keepAliveMsecs: 1000,
keepAlive: false,
maxSockets: Infinity,
maxFreeSockets: 256,
scheduling: ‘lifo’,
maxTotalSockets: Infinity,
totalSocketCount: 1,
maxCachedSessions: 100,
_sessionCache: [Object],
[Symbol(kCapture)]: false
},
socketPath: undefined,
method: ‘POST’,
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
joinDuplicateHeaders: undefined,
path: ‘/v1/completions’,
_ended: true,
res: IncomingMessage {
_readableState: [ReadableState],
_events: [Object: null prototype],
_eventsCount: 4,
_maxListeners: undefined,
socket: [TLSSocket],
httpVersionMajor: 1,
httpVersionMinor: 1,
httpVersion: ‘1.1’,
complete: true,
rawHeaders: [Array],
rawTrailers: ,
joinDuplicateHeaders: undefined,
aborted: false,
upgrade: false,
url: ‘’,
method: null,
statusCode: 401,
statusMessage: ‘Unauthorized’,
client: [TLSSocket],
_consuming: false,
_dumped: false,
req: [Circular *1],
responseUrl: ‘apiopenaicom/v1/completions’,
redirects: ,
[Symbol(kCapture)]: false,
[Symbol(kHeaders)]: [Object],
[Symbol(kHeadersCount)]: 22,
[Symbol(kTrailers)]: null,
[Symbol(kTrailersCount)]: 0
},
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: ‘api.openaicom’,
protocol: ‘https:’,
_redirectable: Writable {
_writableState: [WritableState],
_events: [Object: null prototype],
_eventsCount: 3,
_maxListeners: undefined,
_options: [Object],
_ended: true,
_ending: true,
_redirectCount: 0,
_redirects: ,
_requestBodyLength: 131,
_requestBodyBuffers: ,
_onNativeResponse: [Function (anonymous)],
_currentRequest: [Circular *1],
_currentUrl: ‘apiopenaicom/v1/completions’,
[Symbol(kCapture)]: false
},
[Symbol(kCapture)]: false,
[Symbol(kBytesWritten)]: 0,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype] {
accept: [Array],
‘content-type’: [Array],
‘user-agent’: [Array],
‘content-length’: [Array],
host: [Array]
},
[Symbol(errored)]: null,
[Symbol(kHighWaterMark)]: 16384,
[Symbol(kRejectNonStandardBodyWrites)]: false,
[Symbol(kUniqueHeaders)]: null
},
response: {
status: 401,
statusText: ‘Unauthorized’,
headers: {
date: ‘Fri, 21 Jul 2023 17:48:43 GMT’,
‘content-type’: ‘application/json; charset=utf-8’,
‘content-length’: ‘496’,
connection: ‘close’,
vary: ‘Origin’,
‘x-request-id’: ‘1186b4c0e26e8794259de5551bbd0590’,
‘strict-transport-security’: ‘max-age=15724800; includeSubDomains’,
‘cf-cache-status’: ‘DYNAMIC’,
server: ‘cloudflare’,
‘cf-ray’: ‘7ea546c3ce6c2d14-IAD’,
‘alt-svc’: ‘h3=“:443”; ma=86400’
},
config: {
transitional: [Object],
adapter: [Function: httpAdapter],
transformRequest: [Array],
transformResponse: [Array],
timeout: 0,
xsrfCookieName: ‘XSRF-TOKEN’,
xsrfHeaderName: ‘X-XSRF-TOKEN’,
maxContentLength: -1,
maxBodyLength: -1,
validateStatus: [Function: validateStatus],
headers: [Object],
method: ‘post’,
data: ‘{“model”:“gpt-3.5-turbo”,“prompt”:“You are a three.js code writing AI. Provide a Three.js code snippet for cube”,“max_tokens”:4000}’,
url: ‘openaicom/v1/completions’
},
request: <ref *1> ClientRequest {
_events: [Object: null prototype],
_eventsCount: 7,
_maxListeners: undefined,
outputData: ,
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
maxRequestsOnConnectionReached: false,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
strictContentLength: false,
_contentLength: 131,
_hasBody: true,
_trailer: ‘’,
finished: true,
_headerSent: true,
_closed: false,
socket: [TLSSocket],
_header: ‘POST /v1/completions HTTP/1.1\r\n’ +
‘Accept: application/json, text/plain, /\r\n’ +
‘Content-Type: application/json\r\n’ +
‘User-Agent: axios/0.26.1\r\n’ +
‘Content-Length: 131\r\n’ +
‘Host: api.openaicom\r\n’ +
‘Connection: close\r\n’ +
‘\r\n’,
_keepAliveTimeout: 0,
_onPendingData: [Function: nop],
agent: [Agent],
socketPath: undefined,
method: ‘POST’,
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
joinDuplicateHeaders: undefined,
path: ‘/v1/completions’,
_ended: true,
res: [IncomingMessage],
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: ‘api.openaicom’,
protocol: ‘https:’,
_redirectable: [Writable],
[Symbol(kCapture)]: false,
[Symbol(kBytesWritten)]: 0,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype],
[Symbol(errored)]: null,
[Symbol(kHighWaterMark)]: 16384,
[Symbol(kRejectNonStandardBodyWrites)]: false,
[Symbol(kUniqueHeaders)]: null
},
data: { error: [Object] }
},
isAxiosError: true,
toJSON: [Function: toJSON]
}

I would be ESTATIC if anyone has any ideas as to what might be causing this, thanks so much!!

Sorry, realized bad formatting in my question. This is what the code visually looks like. Idk why it uploaded like that

1 Like
  1. In your app.js, The catch (error) will have the specific error message from OpenAI. I’d look at that log or return the error to your front-end so it is more visible. This will provide a detailed error message and make it easier to figure out what is wrong.
  2. 401 errors are generally missing/invalid API key. Comment out the OpenAI call for a bit, are you able to successfully log/print the API key from process.env.OPEN_API_KEY in app.js?
  3. You are calling openai.createCompletion with gpt-3.5-turbo. You need to be calling openai.createChatCompletion and pass messages array instead of prompt string if you want to use GPT3. Check the API docs.
    (4… Wrap your code blocks in three backticks (```) to have the formatted as code in your post)
1 Like

Got it, you were spot on with the .env not being accessible. I have adopted your suggestion and the console.log now prints it which is cool!

I created the following code but it still seems to be throwing errors. I also tried to put the error catch on front end.

**app.js:** 
require('dotenv').config();
console.log(process.env.OPENAI_API_KEY);

const express = require('express');
const bodyParser = require('body-parser');
const { OpenAIApiFactory } = require('openai');

const app = express();
app.use(bodyParser.json());
app.use(express.static('public'));

app.post('/generateThreejsCode', async (req, res) => {
    const userDescription = req.body.prompt;

    const openai = OpenAIApiFactory({ key: process.env.OPENAI_API_KEY });
    model = "gpt-3.5-turbo",
    messages = [
        {role: "user", content: `Provide a Three.js code snippet for ${userDescription}` },
        {role: "system", content: "You are a helpful assistant that writes Three.js code. Create code that would create a model for the user. It will be directly imported into js." }
    ];

    try {
        const completionResponse = await openai.createChatCompletion({
            model: "gpt-3.5-turbo",
            messages: messages,  // Note: We've replaced 'prompt' with 'messages'
            max_tokens: 4000
        });

        const generatedCode = completionResponse.choices && completionResponse.choices[0]?.message?.content
                              ? completionResponse.choices[0].message.content
                              : "Sorry, couldn't generate code.";

        res.json({ generatedCode });
    } catch (error) {
        console.error("Error calling OpenAI:", error);
        res.status(500).json({ error: "Failed to generate code." });
    }
});

const PORT = 3000;
app.listen(PORT, () => {
    console.log(`Server is running on httplocalhost:${PORT}`);
});

scene.js


async function generateModel() {
    const idea = document.getElementById('modelDescription').value;
    try {
        const response = await fetch('/generateThreejsCode', {

            method: 'POST',
            headers: {
                'Content-Type': 'application/json'
            },
            body: JSON.stringify({ prompt: idea })
        });
        const data = await response.json();
        if (data.code) {
            eval(data.code);
        } else {
            console.error('Error fetching code from OpenAI:', data.error);
        }
    } catch (error) {
        console.error("Error fetching code from OpenAI: ", error.message);
        console.error("Error details:", error.response ? error.response.data : null);
        console.log.error;
    }
}

my terminal error

andrew@Andrews-MacBook-Air-6 ~ % cd documents/project-root-directory/
andrew@Andrews-MacBook-Air-6 project-root-directory % node app.js
Server is running on localhost:3000
Error calling OpenAI: Error: Request failed with status code 401
    at createError (/Users/andrew/node_modules/openai/node_modules/axios/lib/core/createError.js:16:15)
    at settle (/Users/andrew/node_modules/openai/node_modules/axios/lib/core/settle.js:17:12)
    at IncomingMessage.handleStreamEnd (/Users/andrew/node_modules/openai/node_modules/axios/lib/adapters/http.js:322:11)
    at IncomingMessage.emit (node:events:526:35)
    at endReadableNT (node:internal/streams/readable:1359:12)
    at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
  config: {
    transitional: {
      silentJSONParsing: true,
      forcedJSONParsing: true,
      clarifyTimeoutError: false
    },
    adapter: [Function: httpAdapter],
    transformRequest: [ [Function: transformRequest] ],
    transformResponse: [ [Function: transformResponse] ],
    timeout: 0,
    xsrfCookieName: 'XSRF-TOKEN',
    xsrfHeaderName: 'X-XSRF-TOKEN',
    maxContentLength: -1,
    maxBodyLength: -1,
    validateStatus: [Function: validateStatus],
    headers: {
      Accept: 'application/json, text/plain, */*',
      'Content-Type': 'application/json',
      'User-Agent': 'axios/0.26.1',
      'Content-Length': 311
    },
    method: 'post',
    data: '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Provide a Three.js code snippet for square rotating"},{"role":"system","content":"You are a helpful assistant that writes Three.js code. Create code that would create a model for the user. It will be directly imported into js."}],"max_tokens":4000}',
    url: 'htaenaicom/v1/chat/completions'
  },
  request: <ref *1> ClientRequest {
    _events: [Object: null prototype] {
      abort: [Function (anonymous)],
      aborted: [Function (anonymous)],
      connect: [Function (anonymous)],
      error: [Function (anonymous)],
      socket: [Function (anonymous)],
      timeout: [Function (anonymous)],
      finish: [Function: requestOnFinish]
    },
    _eventsCount: 7,
    _maxListeners: undefined,
    outputData: [],
    outputSize: 0,
    writable: true,
    destroyed: false,
    _last: true,
    chunkedEncoding: false,
    shouldKeepAlive: false,
    maxRequestsOnConnectionReached: false,
    _defaultKeepAlive: true,
    useChunkedEncodingByDefault: true,
    sendDate: false,
    _removedConnection: false,
    _removedContLen: false,
    _removedTE: false,
    strictContentLength: false,
    _contentLength: 311,
    _hasBody: true,
    _trailer: '',
    finished: true,
    _headerSent: true,
    _closed: false,
    socket: TLSSocket {
      _tlsOptions: [Object],
      _secureEstablished: true,
      _securePending: false,
      _newSessionPending: false,
      _controlReleased: true,
      secureConnecting: false,
      _SNICallback: null,
      servername: 'api.openaicom',
      alpnProtocol: false,
      authorized: true,
      authorizationError: null,
      encrypted: true,
      _events: [Object: null prototype],
      _eventsCount: 10,
      connecting: false,
      _hadError: false,
      _parent: null,
      _host: 'openaicom',
      _closeAfterHandlingError: false,
      _readableState: [ReadableState],
      _maxListeners: undefined,
      _writableState: [WritableState],
      allowHalfOpen: false,
      _sockname: null,
      _pendingData: null,
      _pendingEncoding: '',
      server: undefined,
      _server: null,
      ssl: [TLSWrap],
      _requestCert: true,
      _rejectUnauthorized: true,
      parser: null,
      _httpMessage: [Circular *1],
      [Symbol(res)]: [TLSWrap],
      [Symbol(verified)]: true,
      [Symbol(pendingSession)]: null,
      [Symbol(async_id_symbol)]: 61,
      [Symbol(kHandle)]: [TLSWrap],
      [Symbol(lastWriteQueueSize)]: 0,
      [Symbol(timeout)]: null,
      [Symbol(kBuffer)]: null,
      [Symbol(kBufferCb)]: null,
      [Symbol(kBufferGen)]: null,
      [Symbol(kCapture)]: false,
      [Symbol(kSetNoDelay)]: false,
      [Symbol(kSetKeepAlive)]: true,
      [Symbol(kSetKeepAliveInitialDelay)]: 60,
      [Symbol(kBytesRead)]: 0,
      [Symbol(kBytesWritten)]: 0,
      [Symbol(connect-options)]: [Object]
    },
    _header: 'POST /v1/chat/completions HTTP/1.1\r\n' +
      'Accept: application/json, text/plain, */*\r\n' +
      'Content-Type: application/json\r\n' +
      'User-Agent: axios/0.26.1\r\n' +
      'Content-Length: 311\r\n' +
      'Host: openaiom\r\n' +
      'Connection: close\r\n' +
      '\r\n',
    _keepAliveTimeout: 0,
    _onPendingData: [Function: nop],
    agent: Agent {
      _events: [Object: null prototype],
      _eventsCount: 2,
      _maxListeners: undefined,
      defaultPort: 443,
      protocol: 'https:',
      options: [Object: null prototype],
      requests: [Object: null prototype] {},
      sockets: [Object: null prototype],
      freeSockets: [Object: null prototype] {},
      keepAliveMsecs: 1000,
      keepAlive: false,
      maxSockets: Infinity,
      maxFreeSockets: 256,
      scheduling: 'lifo',
      maxTotalSockets: Infinity,
      totalSocketCount: 1,
      maxCachedSessions: 100,
      _sessionCache: [Object],
      [Symbol(kCapture)]: false
    },
    socketPath: undefined,
    method: 'POST',
    maxHeaderSize: undefined,
    insecureHTTPParser: undefined,
    joinDuplicateHeaders: undefined,
    path: '/v1/chat/completions',
    _ended: true,
    res: IncomingMessage {
      _readableState: [ReadableState],
      _events: [Object: null prototype],
      _eventsCount: 4,
      _maxListeners: undefined,
      socket: [TLSSocket],
      httpVersionMajor: 1,
      httpVersionMinor: 1,
      httpVersion: '1.1',
      complete: true,
      rawHeaders: [Array],
      rawTrailers: [],
      joinDuplicateHeaders: undefined,
      aborted: false,
      upgrade: false,
      url: '',
      method: null,
      statusCode: 401,
      statusMessage: 'Unauthorized',
      client: [TLSSocket],
      _consuming: false,
      _dumped: false,
      req: [Circular *1],
      response urpi.openaicom/v1/chat/completions',
      redirects: [],
      [Symbol(kCapture)]: false,
      [Symbol(kHeaders)]: [Object],
      [Symbol(kHeadersCount)]: 22,
      [Symbol(kTrailers)]: null,
      [Symbol(kTrailersCount)]: 0
    },
    aborted: false,
    timeoutCb: null,
    upgradeOrConnect: false,
    parser: null,
    maxHeadersCount: null,
    reusedSocket: false,
    host: 'api.openaicom',
    protocol: 'https:',
    _redirectable: Writable {
      _writableState: [WritableState],
      _events: [Object: null prototype],
      _eventsCount: 3,
      _maxListeners: undefined,
      _options: [Object],
      _ended: true,
      _ending: true,
      _redirectCount: 0,
      _redirects: [],
      _requestBodyLength: 311,
      _requestBodyBuffers: [],
      _onNativeResponse: [Function (anonymous)],
      _currentRequest: [Circular *1],
      _currentUrl: 'hapiopena1/chat/completions',
      [Symbol(kCapture)]: false
    },
    [Symbol(kCapture)]: false,
    [Symbol(kBytesWritten)]: 0,
    [Symbol(kNeedDrain)]: false,
    [Symbol(corked)]: 0,
    [Symbol(kOutHeaders)]: [Object: null prototype] {
      accept: [Array],
      'content-type': [Array],
      'user-agent': [Array],
      'content-length': [Array],
      host: [Array]
    },
    [Symbol(errored)]: null,
    [Symbol(kHighWaterMark)]: 16384,
    [Symbol(kRejectNonStandardBodyWrites)]: false,
    [Symbol(kUniqueHeaders)]: null
  },
  response: {
    status: 401,
    statusText: 'Unauthorized',
    headers: {
      date: 'Fri, 21 Jul 2023 21:00:54 GMT',
      'content-type': 'application/json; charset=utf-8',
      'content-length': '496',
      connection: 'close',
      vary: 'Origin',
      'x-request-id': '2b3660ad32c99b5fdfe72f0bed564be0',
      'strict-transport-security': 'max-age=15724800; includeSubDomains',
      'cf-cache-status': 'DYNAMIC',
      server: 'cloudflare',
      'cf-ray': '7ea660478a583035-BOS',
      'alt-svc': 'h3=":443"; ma=86400'
    },
    config: {
      transitional: [Object],
      adapter: [Function: httpAdapter],
      transformRequest: [Array],
      transformResponse: [Array],
      timeout: 0,
      xsrfCookieName: 'XSRF-TOKEN',
      xsrfHeaderName: 'X-XSRF-TOKEN',
      maxContentLength: -1,
      maxBodyLength: -1,
      validateStatus: [Function: validateStatus],
      headers: [Object],
      method: 'post',
      data: '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Provide a Three.js code snippet for square rotating"},{"role":"system","content":"You are a helpful assistant that writes Three.js code. Create code that would create a model for the user. It will be directly imported into js."}],"max_tokens":4000}',
      url: 'httopenaicom/v1/chat/completions'
    },
    request: <ref *1> ClientRequest {
      _events: [Object: null prototype],
      _eventsCount: 7,
      _maxListeners: undefined,
      outputData: [],
      outputSize: 0,
      writable: true,
      destroyed: false,
      _last: true,
      chunkedEncoding: false,
      shouldKeepAlive: false,
      maxRequestsOnConnectionReached: false,
      _defaultKeepAlive: true,
      useChunkedEncodingByDefault: true,
      sendDate: false,
      _removedConnection: false,
      _removedContLen: false,
      _removedTE: false,
      strictContentLength: false,
      _contentLength: 311,
      _hasBody: true,
      _trailer: '',
      finished: true,
      _headerSent: true,
      _closed: false,
      socket: [TLSSocket],
      _header: 'POST /v1/chat/completions HTTP/1.1\r\n' +
        'Accept: application/json, text/plain, */*\r\n' +
        'Content-Type: application/json\r\n' +
        'User-Agent: axios/0.26.1\r\n' +
        'Content-Length: 311\r\n' +
        'Host: apipenaicom\r\n' +
        'Connection: close\r\n' +
        '\r\n',
      _keepAliveTimeout: 0,
      _onPendingData: [Function: nop],
      agent: [Agent],
      socketPath: undefined,
      method: 'POST',
      maxHeaderSize: undefined,
      insecureHTTPParser: undefined,
      joinDuplicateHeaders: undefined,
      path: '/v1/chat/completions',
      _ended: true,
      res: [IncomingMessage],
      aborted: false,
      timeoutCb: null,
      upgradeOrConnect: false,
      parser: null,
      maxHeadersCount: null,
      reusedSocket: false,
      host: 'apipenaicom',
      protocol: 'https:',
      _redirectable: [Writable],
      [Symbol(kCapture)]: false,
      [Symbol(kBytesWritten)]: 0,
      [Symbol(kNeedDrain)]: false,
      [Symbol(corked)]: 0,
      [Symbol(kOutHeaders)]: [Object: null prototype],
      [Symbol(errored)]: null,
      [Symbol(kHighWaterMark)]: 16384,
      [Symbol(kRejectNonStandardBodyWrites)]: false,
      [Symbol(kUniqueHeaders)]: null
    },
    data: { error: [Object] }
  },
  isAxiosError: true,
  toJSON: [Function: toJSON]
}