ISSUE: I am trying to use Nodejs streams to read a small CSV file (1 row) using the fast-csv module.
The CSV 'rows' are pushed to an array(rows []) when the 'data' event is emitted. When 'end' is emitted, the data is update in a DB. However, the 'end' event is triggered before the rows[] array can be populated. This happens intermittently and sometimes the code works as intended.
My guess after reading the Nodejs docs is that this is due to the small size of the CSV file. The data is being read in the 'flowing' mode and as soon as the the first row is read, the 'end' even is triggered, which seems to happen before the record is pushed to the required array.
Tried using the 'paused' mode, but it didn't work.
I am new with Nodejs and not able to figure out how to make this function work. Any help or guidance would be appreciated.
CODE:
function updateToDb(filename, tempLocation) {
const rows = [];
const readStream = fs.createReadStream(tempLocation + '\\' + filename).pipe(csv.parse());
return new Promise((resolve, reject) => {
readStream.on('data', row => {
console.log('Reading');
rows.push(row);
})
.on('end', () => {
console.log('Completed');
let query = `UPDATE ${tables.earnings} SET result_date = CASE `;
rows.forEach(element => {
query += `WHEN isin = '${element[0]}' AND announcement_date = '${element[1]}' THEN '${element[2]}' ELSE result_date`;
});
query += ' END';
connection.query(query, (error, results) => {
if (error)
reject(error);
else
resolve(results.changedRows);
});
})
.on('error', error => {
reject(error);
});
});
}