code below fetches csv file data from aws s3 and after fetching data i need to manipulate the response and return the same data from node.js backend to frontend.But problem is that data is more than 200k records which is not feasible node keep that in memory and return same to frontend.
AWS.config.update({
accessKeyId: "xxxxxxxxxxxxxxxx",
secretAccessKey: "xxxxxxxxxxxxxxxxxxxxxxxx",
"region": "--------"
})
const s3 = new AWS.S3();
const params = {
Bucket: 'bucket',
Key: 'userFIle/test.csv',
Range:"bytes=7777-9999"
}
const datae = await s3.getObject(params).promise();
let str=datae.Body.toString()
let workBook ,jsonData
workBook = xlsx.read(str, { type: 'binary' });
jsonData = workBook.SheetNames.reduce((initial, name) => {
const sheet = workBook.Sheets[name];
initial[name] = xlsx.utils.sheet_to_json(sheet);
return initial;
}, {});
console.log(jsonData,"==fffffff==",jsonData.Sheet1.length)