1

I am working on migrating an application from Azure Storage to Google Cloud Storage.

My upload API is as follows:

let uploadFile = async (req, res) => {
try {
  var form = new multiparty.Form();
  form.on("part", async function (part) {
    if (part.filename) {
      let filnam = part.filename.replace(/\s/g, "");
      var size = part.byteCount - part.byteOffset;
      container = containerName(part.headers["content-type"]);

      blobName = fileName(filnam);

      let fileDetails = await GCSService.uploadFile(
        container,
        part,
        blobName,
        part.headers["content-type"],
        part.filename
      )
      Response.success(res, res, status.HTTP_OK, fileDetails);
    }
  });
  form.parse(req);
  
} catch (err) {
  console.log("Error Uploading File: ", err);
  throw new Error("file upload error");
}

};

With Azure Storage, I was able to upload multipart directly by using the uploadStream function,

await blockBlobClient.uploadStream(part, 4 * 1024 * 1024, 20, {
    blobHTTPHeaders: {
      blobContentType: mimetype
    },
    onProgress: (ev) => console.log(ev)
  });
  const blobClient = containerClient.getBlobClient(blobName);

But when I try the same with Google Cloud Storage createWriteStream it uploads the file, but the file is 0 bytes.

bucket.file(blobName).createWriteStream({
         resumable: false
    }).on('finish', async () => {});

Can anyone point me in the direction of the right Google Cloud Storage implementation?

Thanks

Wytrzymały Wiktor
  • 11,492
  • 5
  • 29
  • 37
Varun Joshi
  • 599
  • 2
  • 9
  • 24

1 Answers1

0

Try using examples from the GCS API reference:

fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
  .pipe(file.createWriteStream())
  .on('error', function(err) {})
  .on('finish', function() {
    // The file upload is complete.
  });

There is also a similar issue discussed in another Stack thread.

Farid Shumbar
  • 1,360
  • 3
  • 10