I am working on migrating an application from Azure Storage to Google Cloud Storage.
My upload API is as follows:
let uploadFile = async (req, res) => {
try {
var form = new multiparty.Form();
form.on("part", async function (part) {
if (part.filename) {
let filnam = part.filename.replace(/\s/g, "");
var size = part.byteCount - part.byteOffset;
container = containerName(part.headers["content-type"]);
blobName = fileName(filnam);
let fileDetails = await GCSService.uploadFile(
container,
part,
blobName,
part.headers["content-type"],
part.filename
)
Response.success(res, res, status.HTTP_OK, fileDetails);
}
});
form.parse(req);
} catch (err) {
console.log("Error Uploading File: ", err);
throw new Error("file upload error");
}
};
With Azure Storage, I was able to upload multipart directly by using the uploadStream
function,
await blockBlobClient.uploadStream(part, 4 * 1024 * 1024, 20, {
blobHTTPHeaders: {
blobContentType: mimetype
},
onProgress: (ev) => console.log(ev)
});
const blobClient = containerClient.getBlobClient(blobName);
But when I try the same with Google Cloud Storage createWriteStream
it uploads the file, but the file is 0 bytes.
bucket.file(blobName).createWriteStream({
resumable: false
}).on('finish', async () => {});
Can anyone point me in the direction of the right Google Cloud Storage implementation?
Thanks