What I want to do is read a file from S3 - update some information - upload it back, all using streams, without having to create a copy of the file on the server. I used the event-streams library for parsing the file like this:(Updated with solution !)
let params = {
Bucket: Bucket,
Key: Key,
};
let rStream = s3.getObject(params).createReadStream();
let updatedFile = fs.createWriteStream('/tmp/updated-' + Key);
return new Promise((resolve, reject) => {
let s = rStream
.pipe(es.split())
.pipe(es.mapSync(function(data) {
if(!data) return;
s.pause();
let line = data.split(',');
if(line[1]==='xyz'){
line[1] = 'xyz11';
}
updatedFile.write(line.join(','));
updatedFile.write('\n');
s.resume();
})
.on('error', function(err) {
reject(err);
})
.on('end', function() {
updatedFile.end();
//createReadStream from path of updatedFile
//s3 upload file logic
//delete tmp file logic
resolve(true);
}));
});
My problem is that when I return this stream back, its closed and has readable : false , so I cannot use it :
const updatedStream = fs.createReadStream(tmpfilePath);
params={
Bucket: Bucket,
Key: Key,
Body: updatedStream
};
await s3.upload(params)
.on('httpUploadProgress', (progress) => {
console.log('progress', progress)
})
.send();
try {
fs.unlink(tmpfilePath, function (err) {
if (err) throw err;
console.log("Tmp File deleted successfully.");
});
} catch(err) {
console.log("Warning: Unable to delete the tmp file.", err);
}
Any thoughts? The event-stream closes the stream on end and that's why its readable:false when I pass it back.
How can I create a read stream, change the data and have the stream readable in order to pass it to the s3.upload function?
SOLUTION
In the end I managed to get this working.
I created a writeStream temp file in whom I write after each iteration. When the stream ends I close the writeStream to close my temporary file.
Then I upload it back to S3 (creating a readStream from the my temp file).
Once the upload is done I delete my temporary file.