I have a use case where I have to move a zip file from EFS to S3 using NodeJs Lambda function. I am ready data using fs.createReadStream. It is creating a zip file on EFS and when I try to move that file to S3 using putObjectCommand it doesn't upload that file to S3. The weird thing is that it doesn't even throw any error. Here is my nodejs Lambda code:
export const lambdaHandler = async (event, context) => {
try {
var efsDirectory = "/mnt/data/";
//var zipFile = '/mnt/data/zipped.zip'
fs.readdirSync(efsDirectory).forEach(async file => {
var filePath = path.join(efsDirectory, file);
console.log("My file is :", file);
console.log("upper file path is :", filePath);
var response = fs.statSync(filePath);
console.log(response.birthtime)
let fileBirthTime = response.birthtime;
console.log(fileBirthTime);
let currentTime = new Date();
console.log("value of current time :", currentTime);
let timeDifference = currentTime - fileBirthTime;
console.log("Time difference is :", timeDifference);
let timeDifferenceInMin = ((timeDifference / 1000) / 60);
console.log(timeDifferenceInMin);
if (timeDifferenceInMin >= 1) {
console.log("File created time is greater than or euqal to 15 mins");
var filePathWithouExt = path.parse(file).name;
var zipFile = path.join(efsDirectory, filePathWithouExt);
console.log("zip file path is this :", zipFile);
var result = ZipLocal.sync.zip(efsDirectory).compress().save(`${zipFile}.zip`);
console.log("files zipped");
var fileStream = fs.createReadStream(`${zipFile}.zip`);
fileStream.on('error', function (error) {
console.log(`error: ${error.message}`);
})
// console.log("EFS stream data :", fileStream)
var s3Key = path.basename(`${zipFile}.zip`);
console.log("s3key value is :", s3Key);
let bucketName = "matomo-lambda-test-bucket";
const params = {
Bucket: bucketName,
Key: `${s3Key}.zip`,
Body: fileStream
};
console.log("Params are :", params)
await s3Client.send(
new PutObjectCommand(params)
).then(res=>{
console.log("res", res)
}).catch(err=>{
console.log("err",err)
})
// console.log("Upload Completed", s3Data);
var bucketUrl = `https://${bucketName}.s3.us-west-2.amazonaws.com/${s3Key}`;
console.log(bucketUrl);
fs.unlinkSync(efsDirectory + file);
console.log("deleted file :", file);
fs.readdirSync(efsDirectory).forEach(file => {
console.log("Reading file :", file);
});
return {
'statusCode': 200,
'body': JSON.stringify({
message: 'hello world lambda',
})
}
}
else {
console.log("File created time is less than 15 min.");
}
});
}
catch (err) {
console.log(err);
return err;
}
};
For each iteration it just zip the file but doesn't trigger s3 putobjectcommand.