Uploading files to OpenAI

Could I get some advice on the best way to upload the file pulled from the s3 bucket?
I am getting this error in my terminal:

RangeError: Maximum call stack size exceeded
Exception in PromiseRejectCallback:
file:///Users/kobby/Desktop/Projects/BetaResumeBuilder/node_modules/openai/uploads.mjs:143
        await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop)));
                      ^

RangeError: Maximum call stack size exceeded

The code that threw that error:

export default async function POST(req: NextRequest, res: NextResponse) {
  try {
    let assistantId;
    let assistantDetails;
    let path;
    let s3Key;
    let downloadedFileBuffer;

    const form = new formidable.IncomingForm();
    const data = await new Promise<any>((resolve, reject) => {
      form.parse(req, (err, fields, files) => {
        if (err) {
          reject(err);
          return;
        }
        resolve({ fields, files });
      });
    });

    const { job, company, description, uploadMode, resume } = data.fields;

    if (uploadMode === "pdf") {
      const file: formidable.File | null = data.files.resume;

      if (file) {
        const buffer = await fsPromises.readFile(file.path);

        s3Key = `XXX`; 

        try {
          await s3.send(
            new PutObjectCommand({
              Bucket: process.env.NEXT_PUBLIC_AWS_S3_BUCKET_NAME,
              Key: s3Key,
              Body: buffer,
            })
          );
          console.log(`File uploaded to S3 at key: ${s3Key}`);
        } catch (error) {
          console.error("Error uploading file to S3:", error);
          return NextResponse.json({
            success: false,
            error: "Error uploading file to S3",
          });
        }
      } else {
        console.error("No file provided in the request");
        return NextResponse.json({
          success: false,
          error: "No file provided in the request.",
        });
      }

      ........

        const assistant = await openai.beta.assistants.create(assistantConfig);

        assistantDetails = { assistantId: assistant.id, ...assistantConfig };

        await fsPromises.writeFile(
          assistantFilePath,
          JSON.stringify(assistantDetails, null, 2)
        );
        assistantId = assistantDetails.assistantId;
      }
      const thread = await openai.beta.threads.create();

      console.log("Downloading file from the S3 bucket๐Ÿคž๐Ÿพ");

      try {
        const downloadParams = {
          Bucket: process.env.NEXT_PUBLIC_AWS_S3_BUCKET_NAME,
          Key: s3Key,
        };
        downloadedFileBuffer = await s3.send(new GetObjectCommand(downloadParams));
      } catch (error) {
        console.error("Error downloading file from S3:", error);
        return NextResponse.json({
          success: false,
          error: "Error downloading file from S3",
        });
      }

      console.log("File recieved from Storage๐Ÿคž๐Ÿพ");

      console.log("Uploading file to open ai๐Ÿคž๐Ÿพ");

      // streaming a multi-part upload to open ai

      const uploadFile = await openai.files.create({
        file: downloadedFileBuffer.Body,
        purpose: "assistants",
      });
      console.log("Done uploading to openai ๐ŸคŸ๐Ÿพ");

When I also decide to try to buffer the payload and try to chunk it, I get:

error:  APIError: 413 The data value transmitted exceeds the capacity limit.
    at APIError.generate (...node_modules/openai/error.mjs:62:16)
    at OpenAI.makeStatusError (...node_modules/openai/core.mjs:244:25)
    at OpenAI.makeRequest (...node_modules/openai/core.mjs:283:30)
    at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
    at async POST (webpack-internal:///(api)/./pages/api/generate-resume/index.ts:157:32) {
  status: 413,
  headers: {
    'access-control-allow-origin': '*',
    'alt-svc': 'h3=":443"; ma=86400',
    'cf-cache-status': 'DYNAMIC',
    'cf-ray': '84d741f15b4136ae-YYZ',
    connection: 'keep-alive',
    'content-length': '158',
    'content-type': 'application/json',
    date: 'Tue, 30 Jan 2024 05:19:03 GMT',
    'openai-processing-ms': '37',
    server: 'cloudflare',
    'set-cookie': 'xxxx=; path=/; expires=Tue, 30-Jan-24 05:49:03 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None, _cfuvid=xxxx; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None',
    'strict-transport-security': 'max-age=15724800; includeSubDomains',
    'x-request-id': 'xxxx'
  },
  error: {
    message: 'The data value transmitted exceeds the capacity limit.',
    type: 'server_error',
    param: null,
    code: null
  },
  code: null,
  param: null,
  type: 'server_error'
}

related code:

// ... [previous code]

let downloadedFileBuffer;

try {
  const downloadParams = {
    Bucket: process.env.NEXT_PUBLIC_AWS_S3_BUCKET_NAME,
    Key: s3Key,
  };
  const fileStream = await s3.send(new GetObjectCommand(downloadParams));
  downloadedFileBuffer = await streamToBuffer(fileStream.Body);

} catch (error) {
  console.error("Error downloading file from S3:", error);
  return NextResponse.json({
    success: false,
    error: "Error downloading file from S3",
  });
}

console.log("File received from Storage ๐Ÿคž๐Ÿพ");

console.log("Uploading file to OpenAI ๐Ÿคž๐Ÿพ");

// Helper function to convert a stream into a buffer
function streamToBuffer(stream) {
  return new Promise((resolve, reject) => {
    const chunks = [];
    stream.on('data', (chunk) => chunks.push(chunk));
    stream.on('end', () => resolve(Buffer.concat(chunks)));
    stream.on('error', reject);
  });
}

// Uploading file to OpenAI
try {
  const uploadFile = await openai.files.create({
    file: downloadedFileBuffer,
    purpose: "fine-tune",
  });
  console.log("Done uploading to OpenAI ๐ŸคŸ๐Ÿพ");
} catch (error) {
  console.error("Error uploading file to OpenAI:", error);
  return NextResponse.json({
    success: false,
    error: "Error uploading file to OpenAI",
  });
}

// ... [rest of the code]

1 Like