I'm dealing with a strange issue where while uploading a file to the server, you're not able to make any other POST requests. The error is thrown by nginx, indicating the following (nginx error log)
10855 upstream prematurely closed connection while reading response header from upstream, client
My assumption is that since the upload makes use of a read stream, the nodejs app sort of 'locks' into a read stream state where it won't allow any other POST requests that would interfere with the read stream. However, that's just a wild guess.
Any thoughts?
Nginx config
server {
listen 80;
underscores_in_headers on;
server_name myserver.com;
location / {
client_max_body_size 1000M;
proxy_pass http://localhost:3000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
}
}
Server code
module.exports.upload = function(req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
pipetoS3(file, guid + '_' + filename, mimetype, hash, function (err, result) {
if (err) {
console.log(err);
} else {
console.log("success!");
}
});
});
}
const pipetoS3 = function(readStream, key, mimetype, hash, callback) {
//var compress = zlib.createGzip();
var upload = s3Stream.upload({
Bucket: "mybucket",
Key: hash + '/' + key,
ACL: "public-read",
ContentType: mimetype
});
// Optional configuration
upload.maxPartSize(20971520); // 20 MB
upload.concurrentParts(5);
// Handle errors.
upload.on('error', function (error) {
console.log(error);
});
upload.on('uploaded', function (details) {
console.log('success!');
});
// Pipe the incoming filestream through compression, and up to S3.
//readStream.pipe(compress).pipe(upload);
readStream.pipe(upload);
}