I have written a script in Nodejs that takes a screenshot of websites(using slimerJs
), this script takes around 10-20 seconds to complete, the problem here is the server is stalled until this script has is finished.
app.get('/screenshot', function (req, res, next) {
var url = req.query.url;
assert(url, "query param 'url' needed");
// actual saving happens here
var fileName = URL.parse(url).hostname + '_' + Date.now() + '.png';
var command = 'xvfb-run -a -n 5 node slimerScript.js '+ url + ' '+ fileName;
exec(command, function (err, stdout, stderror) {
if(err){ return next(err); }
if(stderror && (stderror.indexOf('error')!= -1) ){ return next(new Error('Error occurred!')); }
return res.send({
status: true,
data: {
fileName: fileName,
url: "http://"+path.join(req.headers.host,'screenshots', fileName)
}
});
})
});
Since the script spawns a firefox browser in memory and loads the website, the ram usage can spike upto 600-700mb, and thus i cannot execute this command asynchronously as ram is expensive on servers.
may i know if its possible to queue the incoming requests and executing them in FIFO fashion?
i tried checking packages like kue, bull and bee-queues, but i think these all assume the job list is already known before the queue is started, where as my job list depends on users using the site, and i wanna also tell people that they are in queue and need to wait for their turn. is this possible with the above mentioned packages?