I want to build a cloud function that receives an object like this one:
{
"files": [
{
"url": "https://myUrl/cat.jpg",
"name": "cat.jpg"
},
{
"url": "https://anyOtherUrl/mouse.jpg",
"name": "mouse.jpg"
},
{
"url": "https://myUrl2/dog.jpg",
"name": "dog.jpg"
}
],
"referenceId": "cute-images"
}
I would like to get those files, compress them into a zip file (with name = referenceId), save that zip file to a bucket, and finally, send the zip URL back as a response.
My main problem lies with the use of memory and my desire to make correct use of pipes/streams. I would really appreciate good sources of where to find the documentation for this implementation.
This is what I got for now but I don't know if it's any good:
const ZipStream = require("zip-stream");
const fetch = require("node-fetch");
const { Storage } = require("@google-cloud/storage");
exports.zipBuilder = async (req, res) => {
// Deleted lines of request validation
let zip = new ZipStream();
const queue = req.body.files;
async function addFilesToZip() {
let elem = queue.shift();
const response = await fetch(elem.url);
const stream = await response.buffer();
zip.entry(stream, { name: elem.name }, (err) => {
if (err) throw err;
if (queue.length > 0) addNextFile();
else zip.finalize();
});
}
await addFilesToZip();
const storage = new Storage();
const ourBucket = storage.bucket(process.env.DESTINATION_BUCKET);
zip.pipe(ourBucket); // this fails
// Get ZIP URL from bucket
res.send(zipUrl);
};
Edit: This question seems like a lot of questions in one. But since this must work as a single stream, I ask not for exact answers but of ideas on what to study to get a better understanding of the solution.