I'm trying to make a Node https.request to a Docker daemon on a remote machine. Everything works fine when connecting to daemons with around 40 containers (running or not running) - or when I limit the request to only return around 40 containers.
However, with no limit (all containers should be returned, basically a docker ps -a
command) it seems that the connection ends before the full body has been returned. What I'm wondering is if this has got to do with how much Docker can return, or if I need to specify in my request to keep the connection alive for a longer response.
Here's my code so far:
var options = {
hostname: server,
port: port,
path: path,
method: 'GET',
key: fs.readFileSync('/path/to/**-client-key.pem'),
cert: fs.readFileSync('/path/to/**-client-cert.pem'),
rejectUnauthorized: false,
};
var request = https.request(options, function(result) {
console.log("statusCode: ", result.statusCode);
console.log("headers: ", result.headers);
var resultBody = '';
result.on('data', function(d) {
resultBody += d;
});
result.on('end', function() {
callback(resultBody);
});
});
request.on('error', function(e) {
console.error(e);
});
request.write('data\n');
request.write('data\n');
request.end();
What happens is that the result.on('end')
function is called before all data has been received.