I have an app which is nodejs + express + reactjs. On Nodejs side I am fetching large volume of data rows using mysql stored procedure and sending the response to client asynchronously. But this causing some delays on browser because of large volume. To resolve this, I want to send chunks of data to client from node side. May be 10K at a time and then keep on sending further data which will be added to existing data on client side. Which technique I should use here?
I tried to use stream the data and send it in a rows set of 10000. Like below:
return new Promise((resolve, reject)=>{
var request = new sql.Request(dbConn);
request.stream = true;
request.query(<sp>);
let rowsToProcess = [];
request.on('row', row => {
rowsToProcess.push(row);
if (rowsToProcess.length === 10000) {
resolve(rowsToProcess)
request.pause();
processRows();
}
});
request.on('done', () => {
processRows();
});
function processRows() {
// process rows
rowsToProcess = [];
request.resume();
}
});
But it is only sending first 10000 records only. After that on the server side it gives error as:
uncaughtException: request.pause is not a function TypeError: request.pause is not a function