4

I'm trying to implement an API endpoint that allows for multiple file uploads.

I don't want to write any file to disk, but to buffer them and pipe to S3.

Here's my code for uploading a single file. Once I attempt to post multiple files to the the endpoint in route.js, it doesn't work.

route.js - I'll keep this as framework agnostic as possible

import Busboy from 'busboy'
// or const Busboy = require('busboy')

const parseForm = async req => {
  return new Promise((resolve, reject) => {
    const form = new Busboy({ headers: req.headers })
    let chunks = []
    form.on('file', (field, file, filename, enc, mime) => {
      file.on('data', data => {
        chunks.push(data)
      })
    })
    form.on('error', err => {
      reject(err)
    })
    form.on('finish', () => {
      const buf = Buffer.concat(chunks)
      resolve({ 
        fileBuffer: buf,
        fileType: mime,
        fileName: filename,
        fileEnc: enc,
      })
    })
    req.pipe(form)
  })
}

export default async (req, res) => {
// or module.exports = async (req, res) => {
  try {
    const { fileBuffer, ...fileParams } = await parseForm(req)
    const result = uploadFile(fileBuffer, fileParams)
    res.status(200).json({ success: true, fileUrl: result.Location })
  } catch (err) {
    console.error(err)
    res.status(500).json({ success: false, error: err.message })
  }
}

upload.js

import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')

export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
  const params = {
    Bucket: 'my-s3-bucket',
    Key: fileParams.fileName,
    Body: buffer,
    ContentType: fileParams.fileType,
    ContentEncoding: fileParams.fileEnc,
  }
  return s3.upload(params).promise()
}
josiahwiebe
  • 151
  • 1
  • 9

1 Answers1

6

I couldn't find a lot of documentation for this but I think I've patched together a solution.

Most implementations appear to write the file to disk before uploading it to S3, but I wanted to be able to buffer the files and upload to S3 without writing to disk.

I created this implementation that could handle a single file upload, but when I attempted to provide multiple files, it merged the buffers together into one file.

The one limitation I can't seem to overcome is the field name. For example, you could setup the FormData() like this:

const formData = new FormData()
fileData.append('file[]', form.firstFile[0])
fileData.append('file[]', form.secondFile[0])
fileData.append('file[]', form.thirdFile[0])

await fetch('/api/upload', {
  method: 'POST',
  body: formData,
}

This structure is laid out in the FormData.append() MDN example. However, I'm not certain how to process that in. In the end, I setup my FormData() like this:

Form Data

const formData = new FormData()
fileData.append('file1', form.firstFile[0])
fileData.append('file2', form.secondFile[0])
fileData.append('file3', form.thirdFile[0])

await fetch('/api/upload', {
  method: 'POST',
  body: formData,
}

As far as I can tell, this isn't explicitly wrong, but it's not the preferred method.

Here's my updated code

route.js

import Busboy from 'busboy'
// or const Busboy = require('busboy')

const parseForm = async req => {
  return new Promise((resolve, reject) => {
    const form = new Busboy({ headers: req.headers })
    const files = [] // create an empty array to hold the processed files
    const buffers = {} // create an empty object to contain the buffers
    form.on('file', (field, file, filename, enc, mime) => {
      buffers[field] = [] // add a new key to the buffers object
      file.on('data', data => {
        buffers[field].push(data)
      })
      file.on('end', () => {
        files.push({
          fileBuffer: Buffer.concat(buffers[field]),
          fileType: mime,
          fileName: filename,
          fileEnc: enc,
        })
      })
    })
    form.on('error', err => {
      reject(err)
    })
    form.on('finish', () => {
      resolve(files)
    })
    req.pipe(form) // pipe the request to the form handler
  })
}

export default async (req, res) => {
// or module.exports = async (req, res) => {
  try {
    const files = await parseForm(req)
    const fileUrls = []
    for (const file of files) {
      const { fileBuffer, ...fileParams } = file
      const result = uploadFile(fileBuffer, fileParams)
      urls.push({ filename: result.key, url: result.Location })
    }
    res.status(200).json({ success: true, fileUrls: urls })
  } catch (err) {
    console.error(err)
    res.status(500).json({ success: false, error: err.message })
  }
}

upload.js

import S3 from 'aws-sdk/clients/s3'
// or const S3 = require('aws-sdk/clients/s3')

export default (buffer, fileParams) => {
// or module.exports = (buffer, fileParams) => {
  const params = {
    Bucket: 'my-s3-bucket',
    Key: fileParams.fileName,
    Body: buffer,
    ContentType: fileParams.fileType,
    ContentEncoding: fileParams.fileEnc,
  }
  return s3.upload(params).promise()
}
josiahwiebe
  • 151
  • 1
  • 9