I am implementing a rest api
using node.js
that uses ssh2 package
tunneling to connect to the remote mysql database
. Here is the code I am using to connect via ssh and to execute query
executeQuery : function(query, callback) {
var sshConnected = false;
var connection = require('ssh2').Client();
var server = require('net').createServer(function(sock) {
if (!sshConnected) return sock.end();
connection.forwardOut(
'127.0.0.1',
sock.remotePort,
'127.0.0.1',
3306,
function (err, stream) {
if (err) return sock.end();
stream.pipe(sock).pipe(stream);
});
});
connection.on('ready', function() {
console.log('Client :: ready');
sshConnected = true;
server.listen(3306);
var sqlconn = require('mysql').createConnection(config.dbOrg);
sqlconn.connect(function (conError) {
if (!conError) {
console.log(query);
sqlconn.query({sql: query.sql, values: query.values}, function (qError, results, fields) {
callback(qError, results, fields);
});
} else {
callback(response.dbError, null, null);
}
server.close();
});
}).connect(config.sshConfig);
However, as you can see it is not very efficient.
For every new query, I am establishing the connection again, starting the TCP Server and stopping it after query. Each query takes roughly around 3-4s
which is really bad. Any approaches keeping both the ssh connection
and the tcp server
alive to execute queries
as they arrive, would be of great help. Thanks!