On the server side, I have this nodejs code. I simplified the code here to make the question clear:
var express = require('express');
var app = express();
var spawn = require('child_process').spawn;
app.get('/print_N', function(req, res) {
var child = spawn('python', ['some_tick_data_process.py']);
req.on('close', function() {
console.log('req to close with pid=' + child.pid);
child.kill('SIGTERM');
});
child.stdout.pipe(res);
child.stdout.on('error', function(err) {
console.log(child.pid + " error !!!")
console.log(err);
});
});
app.listen(3000);
console.log('Listening on port 3000...');
The underlying some_tick_data_process.py
is quite I/O intensive. On the client side, I have a small python application to read the stream. The problem is that some processes will run into error "req to close"
. With a small number of processes, it is OK. I tried:
var http = require('http');
http.globalAgent.maxSockets = 100;
But it doesn't help. Please share your thoughts, thanks!
After leveraging on P.T.'s advice, the fix is:
app.get('/:version/query', function(req, res) {
res.setTimeout(4 * 60 * 60 * 1000); // nodejs socket timeout 4-hour
}