I was trying to write a program that takes all of the files that are big enough for gzip and compresses them using the gzip part of the zlib module and i came across the same error described in this question the error being node.js ran out of file descriptors and therefore is unable to open any other files. and in that same question it describes fixing it by increasing the number of file descriptors. in trying to do this though ive come across a couple of questions that i can't find the answer to.
-
Are file descriptors shared between parent and child processes? meaning could we fix this error simply by creating a new child process for programs that use a lot of file descriptors? does the type of child process matter? - How many file descriptors do processes like zlib use? in my program i was trying to zip 1695 files but 673 failed i know that each file has at least 2 file descriptors (1 for the readStream and 1 for the writeStream)but the limit is far above that so how many does the zlib itself create?
is there any way of changing the file descriptor limit inside a node.js javascript file? or can it only be changed externally?can the limit be changed from command line parameters so that it can be application specific?- is it possible to monitor how many file descriptors are currently in use? that might allow you to slow down the creation of new read/write stream calls allowing older processes to complete freeing up file descriptors. preferably a way within node.js itself so it can be easily integrated into a node javascript file
more for example purposes here is the code for my program
var errors=0;
function compressFile(file,type){
if(type.indexOf('gzip')>=0){
fs.stat(file,function(err,stat){
if(!err){
if(stat.size>1000){
var gzip=zlib.createGzip();
var compiled=fs.createReadStream(file,{autoclose:true}).on('error',function(err){
console.log(file,1);
//console.log(err);
});
var compressed=fs.createWriteStream(file+'.gz',{autoclose:true}).on('error',function(err){
console.log(file,2);
errors++
//console.log(err);
console.log(errors);
});
compiled.pipe(gzip).pipe(compressed);
}
}else{
console.log(err);
}
});
}else{
console.log('not supported compression');
}
}
function compressAll(){
fs.readdir('./',function(err,files){
if(err){
console.log(err);
}else{
for(i=0;i<files.length;i++){
var stat=fs.statSync('./'+files[i]);
if(stat.isDirectory()){
var subfiles=fs.readdirSync(files[i]);
subfiles=subfiles.map(function(value){
return files[i]+'/' +value;
});
files.splice(i,1);
Array.prototype.push.apply(files,subfiles);
i--;
}else if(stat.size<1000){
console.log(files[i],stat.size);
files.splice(i,1);
i--;
}else if(path.parse(files[i]).ext==='.gz'){
files.splice(i,1);
i--;
}else{
compressFile(files[i],compress);
}
}
console.log(files.length);
}
});
}
as i said before i attempted to run 1695 files through this and received 673 errors so its running out of file descriptors somewhere around 1000 files being zipped
update from my new understanding of how file descriptors relates to the OS i see that my questions, 1,3, and 4 don't apply to node.js however im still wondering on 2 and 5. how many does zlib use and is there a way to monitor file descriptors?