I'm very new to node and lambda, so I'm probably making some dumb mistakes. I've created a node.js aws lambda function which grabs a file from an s3 event. If the file is gzip it decompresses, uploads it to a sftp server, then creates and uploads a sig file to the same sftp server. It works when everything goes well, but it doesn't seem to correctly trigger errors.
The sftp commands are chained using then, so I would expect any error to fail the subsequent thens. For example, if I turn off my sftp server, the sftp client will generate a timeout error, but lambda never sees the callback error, only success. The log does show the error output to the console, but it appears to use the success callback after following the rest of the .then() items. Is the connection not correctly recorded as a promise?
Sample log:
...
Starting SFTP
Connected to sftp, starting sftp put for lastsub2.dat file.
{ Error: Timed out while waiting for handshake
at Timeout._onTimeout (/var/task/node_modules/ssh2/lib/client.js:687:19)
at ontimeout (timers.js:386:14)
at tryOnTimeout (timers.js:250:5)
at Timer.listOnTimeout (timers.js:214:5) level: 'client-timeout' } 'Error occured during sftp relay.'
END
Example code:
console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3({
apiVersion: '2006-03-01'
});
const zlib = require('zlib');
const fs = require("fs");
const connSettings = {
host: 'xxx',
port: '22',
username: 'xxx',
password: 'xxx'
};
exports.handler = function (event, context, callback) {
console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Bucket Name: ' + event.Records[0].s3.bucket.name);
console.log('Object Key: ' + decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')));
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = 'Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.';
console.log(message);
callback(message);
} else {
if (data.ContentType == 'application/x-gzip') {
console.log('CONTENT TYPE is application/x-gzip');
var dataStream = s3.getObject(params).createReadStream().pipe(zlib.Unzip());
console.log('Created unzip datastream');
console.log('Starting SFTP');
let Client = require('ssh2-sftp-client');
let sftp = new Client();
sftp.connect(connSettings)
.then(console.log('Connected to sftp, starting sftp put for ' + key.replace('.gz', '.dat') + ' file.'))
.then(() => {
console.log('Finished sftp put for ' + key.replace('.gz', '.dat') + ' file.');
return sftp.put(dataStream, key.replace('.gz', '.dat'), true, 'utf-8');
}).then(() => {
var sigFileName = key.replace('.gz', '.sig');
var sigStream = fs.createWriteStream('/tmp/' + sigFileName);
sigStream.end();
console.log('Created ' + sigFileName + ' sig file.');
var readStream = fs.createReadStream('/tmp/' + sigFileName);
console.log('Uploaded ' + sigFileName + ' sig file.');
return sftp.put(readStream, sigFileName, true, 'utf-8');
}).then(() => {
console.log('Ended sftp connection.');
return sftp.end();
})
.then(callback(null, 'Success'))
.catch ((err) => {
console.log(err, 'Error occured during sftp relay.');
callback('Error', err);
});
} else {
callback(null, 'Uploaded file not in gzip format, will not be processed.');
}
}
});
};