I'm using the below code to reset a password on a linux box using nodejs and ssh2 module:
// FILE * ./workflow/ssh.js
var Client = require('ssh2').Client;
var conn = new Client();
// var opts = require('optimist')
// .options({
// user: {
// demand: true,
// alias: 'u'
// },
// }).boolean('allow_discovery').argv;
// Definition of reset password;
var resetPassword = function(user, host, loginUser, loginPassword){
var command = 'echo -e "linuxpassword\nlinuxpassword" | passwd '+ user;
conn.on('ready', function() {
console.log('Client :: ready');
conn.exec(command, function(err, stream) {
if (err) throw err;
stream.on('close', function(code, signal) {
console.log('Stream :: close :: code: ' + code + ', signal: ' + signal);
conn.end();
return(code);
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDLOG: ' + data);
});
});
}).connect({
host: host,
port: 22,
username: loginUser,
password: loginPassword
});
};
exports.resetPassword = resetPassword;
I'm calling the resetPassword password function from an other module , say test.js as below.
var ssh = require('./workflow/ssh.js');
result = ssh.resetPassword('test122', '192.168.0.101', 'root' , 'password');
console.log(result)
But the console.log says "undefined". Tried using the process.nextTick, but no luck. Please help.
Welcome to the world of developing with asynchronous operations in node.js. This is a very common mistake when initially learning node.js (and a common question here on StackOverflow). An asynchronous operation completes some indeterminate time in the future and meanwhile, the rest of your code continues to run. In fact your resetPassword() function likely returns BEFORE the stream has finished and before the resetPassword result is available.
Thus you cannot return the result from the resetPassword function directly because the function returns before the result is ready. Instead, you will have to pass in a callback and get the result when the callback is called and the caller of this function will have to use the callback rather than a directly returned result. You can implement that like this:
// FILE * ./workflow/ssh.js
var Client = require('ssh2').Client;
var conn = new Client();
// var opts = require('optimist')
// .options({
// user: {
// demand: true,
// alias: 'u'
// },
// }).boolean('allow_discovery').argv;
// Definition of reset password;
var resetPassword = function(user, host, loginUser, loginPassword, callback){
var command = 'echo -e "linuxpassword\nlinuxpassword" | passwd '+ user;
conn.on('ready', function() {
console.log('Client :: ready');
conn.exec(command, function(err, stream) {
if (err) {
callback(err);
return;
}
stream.on('close', function(code, signal) {
console.log('Stream :: close :: code: ' + code + ', signal: ' + signal);
conn.end();
callback(null, code);
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDLOG: ' + data);
});
});
}).connect({
host: host,
port: 22,
username: loginUser,
password: loginPassword
});
};
exports.resetPassword = resetPassword;
And, then you can use that like this:
var ssh = require('./workflow/ssh.js');
ssh.resetPassword('test122', '192.168.0.101', 'root' , 'password', function(err, code) {
if (!err) {
console.log(code);
}
});
P.S. From your original code, it also doesn't really help you to throw from within an async callback. The exception only goes into whatever node.js infrastructure triggered the async callback, it does not get back to your original code. That's why your code has also been changed to communicate the error via the callback.
It is a node.js convention that callbacks contain at least two arguments. The first argument is an error code which should be null if no error and an appropriate error code or error object if there is an error. The second argument (and any other arguments you want to have) then communicate a successful result. It is useful to follow this convention since it is both consistent with how node.js does it elsewhere and it is compatible with some mechanical error handling such as done when interfaces are modified to use promises.
The return value from the on close event is only returning out of the closure.
You need to provide either a callback hook or a result object as a parameter from which to retrieve the result in the signature of the resetPassword method you export from the module.
As I have studied NodeJs in just recent times I agree this is late for the answer, I am building a monitoring tool and using the SSH-2 library of node.js to get the tasks done. Capturing the value in response for the API was troublesome but I thought to create the operations as Promise to capture then in Api call. I hope the below code will help.
//Operations file that I am calling from the API
var Poperation = (params)=>{
return new Promise((resolve, reject) => {
let result = "";
conn
.on("ready", function () {
conn.exec(params["command"], function (err, stream) {
if (err) {
reject(err);
};
stream
.on("close", function (code, signal) {
conn.end();
//callback(null, result);
resolve(result)
})
.on("data", function (data) {
result = data;
})
.stderr.on("data", function (data) {
});
});
})
.connect({
host: params["host"],
port: params["port"],
username: params["username"],
password: params["password"],
});
})};
Controler file for operations
const express = require('express');
const router = express.Router();
const Joi = require('joi');
const opers = require('../operations/ssh_test');
module.exports = router;
router.post('/operations',operations);
async function operations(req,res,next){
opers.Poperation(req.body).then((data)=>{res.send(data)}).catch((data)=>{res.send(data)})
console.log('finished')
}
Related
How to have express.post to wait for a response from a socket created using net client? net here means the package require("net")
Architecture
browser <-> express with built in unix domain client <-> some unix domain server
My express server serves the front end as usual. Sometimes, it gets information from other services running on the same machine. These services are outside my control. I use net to create a client to connect with them and this works fine.
Tried approaches
All the usual answers about express.post using, for example, a promise, are not applicable because they wait for a reply on the function you call.
For example. fs.readFile will return something related to the completion of fs.readFile and this can easily be promisified using the examples on the internet.
Node.js promise request return
https://www.intuz.com/blog/promises-in-node-js-with-examples
https://dzone.com/articles/how-to-interact-with-a-database-using-promises-in
https://www.turtle-techies.com/using-promises-with-express-js/
The problem
However, with net, the reply comes from somewhere else.
client.write is the method for sending. This method just returns true, not the response from the server.
We become aware of the response at the client.data event and I can't figure how to get a promise to watch the client.data event to fulfill the promise.
My best attempt
Using turtle-techies as an example (all above are similar though differing syntax):
const readFilePromise = (filename) => new Promise((resolve, reject) => {
fs.readFile(filename, 'utf-8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
const app = express();
//call the promisified long-running function. ONLY works if the function returns the data you need. client.write does not.
app.post('/get-file', (req, res) => {
readFilePromise('important_file.txt')
.then(data => res.end(data))
.catch(err => res.end(`could not serve data: ${err}`))
});
Now I try rewriting the promisified function or method to suit net.client:
var net = require('net');
var socketName = '/tmp/dp.sock';
var client = net.createConnection(socketName);
client.on("connect", function() {
client.write('client on connect');
});
client.on("data", function(data) {
console.log("client.on data: ", data.toString());
//I don't see a way to get a handle here on express.post req,res so
// I think the answer lies elswhere. Anyway, it would be messy to put it here.
});
client.on('close', function() {
console.log('client on close');
});
//========= end of client building ===========
//========= promisified client.write =========
const readFilePromise = (filename) => new Promise((resolve, reject) => {
client.write('my message'); //This just returns true which I don't care about
//now what to do here to become aware of client.ondata and handle its data?
});
I have a feeling the answer is in front of my face.
The answer is here: https://techbrij.com/node-js-tcp-server-client-promisify
In principle, wrap the client creation and client.write in a new class in which you can then do any promisification you like.
In case the link breaks, here's what they say (modified by me to be for unix domain socket; techbrij.com did tcp)
//in a module.js file
const net = require('net');
const DEF_socketName = '/tmp/echo.sock';
class Client {
constructor(socketName = '/tmp/echo.sock') {
this.socket = new net.createConnection(socketName);
this.address = socketName || DEF_socketName;
this.init();
}
init() {
var client = this;
client.socket.on("connect", function() {
client.socket.write('hello from unix client!');
});
client.socket.on("data", function(data) { //approahch 1 here we delete the function def and simply put dodata
console.log("client.on data: ", data.toString());
//client.destroy();
});
client.socket.on('close', function() {
console.log('Connection closed');
});
}
sendMessage(message) {
console.log('client.sendMessage: ' + message);
var client = this;
return new Promise((resolve, reject) => {
client.socket.write(message);
client.socket.on('data', (data) => {
resolve(data);
if (data.toString().endsWith('exit')) {
client.socket.destroy();
}
});
client.socket.on('error', (err) => {
reject(err);
});
});
}
}
module.exports = Client;
Then in a client.js file:
const Client = require('./client_mod');
const client = new Client();
client.sendMessage('A')
.then((data)=> { console.log(`Received: ${data}`); return client.sendMessage('B');} )
.then((data)=> { console.log(`Received: ${data}`); return client.sendMessage('C');} )
.then((data)=> { console.log(`Received: ${data}`); return client.sendMessage('exit');} )
.catch((err) =>{ console.error(err); })
I'm using the Bluebird promise library under Node.js, it's great! But I have a question:
If you take a look at the documentation of Node's child_process.exec and child_process.execFile you can see that both of these functions are returning a ChildProcess object.
So what's the recommended way to promisify such functions?
Note that the following works (I get a Promise object):
var Promise = require('bluebird');
var execAsync = Promise.promisify(require('child_process').exec);
var execFileAsync = Promise.promisify(require('child_process').execFile);
But how can one get access to the original return value of the original Node.js functions? (In these cases I would need to be able to access the originally returned ChildProcess objects.)
Any suggestion would be appreciated!
EDIT:
Here is an example code which is using the return value of the child_process.exec function:
var exec = require('child_process').exec;
var child = exec('node ./commands/server.js');
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
But if I would use the promisified version of the exec function ( execAsync from above ) then the return value will be a promise, not a ChildProcess object. This is the real problem I am talking about.
I would recommend using standard JS promises built into the language over an additional library dependency like Bluebird.
If you're using Node 10+, the Node.js docs recommend using util.promisify which returns a Promise<{ stdout, stderr }> object. See an example below:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function lsExample() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
lsExample()
Handle errors first from stderr.
It sounds like you'd like to return two things from the call:
the ChildProcess
a promise that resolves when the ChildProcess completes
So "the recommended way to promisify such functions"? Don't.
You're outside the convention. Promise returning functions are expected to return a promise, and that's it. You could return an object with two members (the ChildProcess & the promise), but that'll just confuse people.
I'd suggest calling the unpromisified function, and creating a promise based off the returned childProcess. (Maybe wrap that into a helper function)
This way, it's quite explicit for the next person who reads the code.
Something like:
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
If you're just wanting to promisify specifically child_process.exec() and child_process.execFile(), in recent node versions there is a better answer here.
Since Node v12 the built-in util.promisify allows access to the ChildProcess object in the returned Promise for built-in functions where it would have been returned by the un-promisified call. From the docs:
The returned ChildProcess instance is attached to the Promise as a child property.
This correctly and simply satisfies the need to access ChildProcess in the original question and makes other answers out of date providing that Node v12+ can be used.
Adapting the example (and concise style) provided by the questioner, access to the ChildProcess can be achieved like:
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const promise = exec('node ./commands/server.js');
const child = promise.child;
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
// i.e. can then await for promisified exec call to complete
const { stdout, stderr } = await promise;
Here's another way:
function execPromise(command) {
return new Promise(function(resolve, reject) {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout.trim());
});
});
}
Use the function:
execPromise(command).then(function(result) {
console.log(result);
}).catch(function(e) {
console.error(e.message);
});
Or with async/await:
try {
var result = await execPromise(command);
} catch (e) {
console.error(e.message);
}
There's probably not a way to do nicely that covers all use cases. But for limited cases, you can do something like this:
/**
* Promisified child_process.exec
*
* #param cmd
* #param opts See child_process.exec node docs
* #param {stream.Writable} opts.stdout If defined, child process stdout will be piped to it.
* #param {stream.Writable} opts.stderr If defined, child process stderr will be piped to it.
*
* #returns {Promise<{ stdout: string, stderr: stderr }>}
*/
function execp(cmd, opts) {
opts || (opts = {});
return new Promise((resolve, reject) => {
const child = exec(cmd, opts,
(err, stdout, stderr) => err ? reject(err) : resolve({
stdout: stdout,
stderr: stderr
}));
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
This accepts opts.stdout and opts.stderr arguments, so that stdio can be captured from the child process.
For example:
execp('ls ./', {
stdout: new stream.Writable({
write: (chunk, enc, next) => {
console.log(chunk.toString(enc));
next();
}
}),
stderr: new stream.Writable({
write: (chunk, enc, next) => {
console.error(chunk.toString(enc));
next();
}
})
}).then(() => console.log('done!'));
Or simply:
execp('ls ./', {
stdout: process.stdout,
stderr: process.stderr
}).then(() => console.log('done!'));
Just want to mention that there's a nice tool that will solve your problem completely:
https://www.npmjs.com/package/core-worker
This package makes it a lot easier to handle processes.
import { process } from "CoreWorker";
import fs from "fs";
const result = await process("node Server.js", "Server is ready.").ready(1000);
const result = await process("cp path/to/file /newLocation/newFile").death();
or combine these functions:
import { process } from "core-worker";
const simpleChat = process("node chat.js", "Chat ready");
setTimeout(() => simpleChat.kill(), 360000); // wait an hour and close the chat
simpleChat.ready(500)
.then(console.log.bind(console, "You are now able to send messages."))
.then(::simpleChat.death)
.then(console.log.bind(console, "Chat closed"))
.catch(() => /* handle err */);
Here are my two cents. Uses spawn which streams the output and writes to stdout and stderr. The error and standard output is captured in buffers and are returned or rejected.
This is written I Typescript, feel free to remove typings if using JavaScript:
import { spawn, SpawnOptionsWithoutStdio } from 'child_process'
const spawnAsync = async (
command: string,
options?: SpawnOptionsWithoutStdio
) =>
new Promise<Buffer>((resolve, reject) => {
const [spawnCommand, ...args] = command.split(/\s+/);
const spawnProcess = spawn(spawnCommand, args, options);
const chunks: Buffer[] = [];
const errorChunks: Buffer[] = [];
spawnProcess.stdout.on("data", (data) => {
process.stdout.write(data.toString());
chunks.push(data);
});
spawnProcess.stderr.on("data", (data) => {
process.stderr.write(data.toString());
errorChunks.push(data);
});
spawnProcess.on("error", (error) => {
reject(error);
});
spawnProcess.on("close", (code) => {
if (code === 1) {
reject(Buffer.concat(errorChunks).toString());
return;
}
resolve(Buffer.concat(chunks));
});
});
Just another example you might run into issues when running multiple commands when destructuring with the same const's you can rename them like this.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function runCommands() {
try {
const { stdout, stderr } = await exec('ls');
console.log('stdout:', stdout);
console.log('stderr:', stderr);
const { stdout: stdoutTwo, stderr: stderrTwo } = await exec('ls');
console.log('stdoutTwo:', stdoutTwo);
console.log('stderrTwo:', stderrTwo);
const { stdout: stdoutThree, stderr: stderrThree } = await exec('ls');
console.log('stdoutThree:', stdoutThree);
console.log('stderrThree:', stderrThree);
} catch (e) {
console.error(e); // should contain code (exit code) and signal (that caused the termination).
}
}
runCommands()
Here's mine. It doesn't deal with stdin or stdout, so if you need those then use one of the other answers on this page. :)
// promisify `child_process`
// This is a very nice trick :-)
this.promiseFromChildProcess = function (child) {
return new Promise((resolve, reject) => {
child.addListener('error', (code, signal) => {
console.log('ChildProcess error', code, signal);
reject(code);
});
child.addListener('exit', (code, signal) => {
if (code === 0) {
resolve(code);
} else {
console.log('ChildProcess error', code, signal);
reject(code);
}
});
});
};
As far as my understanding goes this is a stream so it is constantly streaming values to the Oracle database.
I'm wondering if I can do a timeout function to wait about 3 seconds before sending again.
var net = require('net');
var fs = require('fs');
var oracledb = require('oracledb');
var dbConfig = require('./dbconfig.js');
var client = new net.Socket();
client.connect(8080, "192.168.0.7");
console.log("Client most likely connected...");
oracledb.getConnection(
{
user : dbConfig.user,
password : dbConfig.password,
connectString : dbConfig.connectString
},
function(err, connection) {
if (err) {
console.error(err.message);
return;
}
client.on('data', function (data) {
var weight_data = Number(data);
console.log('Data: ' + data);
connection.execute("INSERT INTO UNI_SCRAP_SCALE(WEIGHT) VALUES (:weight)", [weight_data], function (err, result) {
if (err) throw err;
console.log("Rows inserted: " + result.rowsAffected);
console.log('Data received from Db:\n');
console.log(result);
connection.commit(
function (err) {
console.log('Done')
});
});
});
});
});
// client.destroy();
There is a function to set timeout in JavaScript, setTimeout(), here is an example :
setTimeout(function {
// place your code here
}, 3000); //number of millisecond before executing code
Your code will be executed after 3 seconds.
Documentation :
https://www.w3schools.com/jsref/met_win_settimeout.asp
JavaScript:
setTimeout(function () {
// code you want to wait for here
}, 3000);
I'm new to NodeJS, and JS in general (mostly a PHP and C# guy), so I could really use some help with this function below.
The goal is to receive a JSON payload, connect to MySQL, and then return the results of the query in a JSON response. I've got it connecting to the DB, I can read the JSON data that it receives (event.fieldname) but for some reason it's not sending back the JSON for the applicant_data variable.
Do I just have the variable in the wrong location? When I run the code below I just get back "{}" as the returned data.
Thanks in advance for the help!
NodeJS Code:
exports.handler = function(event, context, callback) {
console.log('Starting:');
console.log("Request received:\n", JSON.stringify(event));
var mysql = require('mysql');
var jsonconnection = mysql.createConnection({
host: 'servername',
user: 'username',
password: 'password',
database: 'database'
});
jsonconnection.connect();
console.log('Connected to MySQL:');
jsonconnection.query('SELECT applicant_id FROM customers WHERE applicant_id = \'' + event.applicant_id + '\'',
function(err,res){
if(err) throw err;
console.log('Row Details:', JSON.stringify(res));
var applicant_data = {
applicant_id : res.applicant_id
};
jsonconnection.end();
context.succeed(applicant_data);
})
};
I am not familiar with AWS, but base on http://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html, following code may work.
exports.handler = function (event, context, callback) {
console.log('Starting:');
console.log("Request received:\n", JSON.stringify(event));
var mysql = require('mysql');
var jsonconnection = mysql.createConnection({
host: 'servername',
user: 'username',
password: 'password',
database: 'database'
});
// Move applicant_data outside of query as it will be needed at the end in callback
var applicant_data = {};
jsonconnection.connect();
console.log('Connected to MySQL:');
jsonconnection.query('SELECT applicant_id FROM customers WHERE applicant_id = \'' + event.applicant_id + '\'',
function (err, res) {
if (err) throw err;
console.log('Row Details:', JSON.stringify(res));
applicant_data = {
// Only use first row of data
applicant_id: res[0].applicant_id;
};
});
// Move connection end out side of query
jsonconnection.end();
// This should return your data, in JSON form
callback(null, JSON.stringify(applicant_data));
// I assume this is the correct use for succeed
context.succeed();
};
exports.handler = async (event) => {
let jsonResponse = {"hello":"world"}
const response = {
statusCode: 200,
body: JSON.stringify(jsonResponse),
};
return response;
};
How can I call functions one-by-one after completing? I have code like that, which must be executed continuously.
For example, I have:
var Connection = require('ssh2');
var c = new Connection();
c.on('connect', function() {
console.log('Connection :: connect');
});
c.on('ready', function() {
console.log('Connection :: ready');
c.exec('uptime', function(err, stream) {
if (err) throw err;
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ')
+ data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
});
c.on('error', function(err) {
console.log('Connection :: error :: ' + err);
});
c.on('end', function() {
console.log('Connection :: end');
});
c.on('close', function(had_error) {
console.log('Connection :: close');
});
c.connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
privateKey: require('fs').readFileSync('/here/is/my/key')
});
In what way can I call several functions one-by-one? When the connection to the ssh2 server is established, and the first request is executed, I have to parse the output of this request and send another request with data from the previous request.
I have functions which can do all this; the only problem is how to make another request after the first one is completed.
You can use nested callbacks. In the 'end' handler of each request, create a new connection and setup its end handler. Nest until needed. Similar to:
c.on('end', function() {
// after c is done, create newC
var newC = new Connection();
newC.on('end', function() {
// more if needed.
});
});
Another approach is to use some library for promises, like Q, which clears up the syntax a lot.
You can also take a look at this question: Understanding promises in node.js
From the sounds of it, you might want to look into a control flow node module such as async. Also you shouldn't need to tear down the connection just to execute another command on the same server.