Maintain access to console while running Node.js program - node.js

Can I send commands to Node.js process running a .js script? Currently I just run node and paste my code into console. It's not very convenient.

You can start a repl server (that has access to the script's scope) like this:
var repl = require('repl'),
net = require('net');
var REPL_PORT = 2323;
var replServer = net.createServer(function(sock) {
repl.start({
prompt: 'myrepl> ',
input: sock,
output: sock,
eval: function(cmd, context, filename, callback) {
var ret, err;
try {
ret = eval(cmd);
} catch (e) {
err = e;
}
if (err)
callback(err);
else
callback(null, ret);
}
}).on('exit', function() {
sock.end();
});
});
replServer.on('error', function(err) {
console.log('REPL error: ' + err);
});
replServer.on('close', function(had_err) {
console.log('REPL shut down ' + (had_err ? 'due to error' : 'gracefully'));
});
replServer.on('listening', function() {
console.log('REPL listening on port ' + REPL_PORT);
});
replServer.listen(REPL_PORT, '127.0.0.1');
Then just telnet to localhost at port 2323 and you'll get a repl prompt that you can type stuff into and poke at variables and such that are defined in your script.

Related

run a few node.js web servers from within a node.js application

I would like to control a few web sites using a UI to start and stop them, changing the ports that the different web server's listen to.
PM2 is a command line utility to manage node sites, but does not have a user interface that I can supply my customer.
How can I run a node.js web site from within another node.js web application.
The following node.js code does not seem to do the work.
const { exec } = require('child_process');
....
exec('node app.js', (err, stdout, stderr) => {
if (err) {
console.log(`err: ${err}`);
return;
}
// the *entire* stdout and stderr (buffered)
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
note: regular linux commands instead of the 'node app.js' command work as expected.
Got the following code to work in case you want to run the same:
This is the code on the server that will spawn a new web server.
app.get('/start', ( req, res ) => {
var node = spawn('node', ['app.js &'], { shell: true });
node.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
node.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
node.on('exit', function (code) {
console.log('child process exited with code ' +
code.toString());
});
// Notify client
res.status(200).send( {} );
});
The simplest alternative would be to keep a collection of ExpressJS instances and create/destroy them as needed within NodeJS:
const express = require("express");
var app1 = express();
app1.use("/", function(req, res){ res.send("APP 1"); });
var app2 = express();
app2.use("/", function(req, res){ res.send("APP 2"); });
// start them
var server1 = app.listen(9001);
var server2 = app.listen(9002);
// close the first one
server1.close();
// port 9001 is closed
// port 9002 is still listening
However, if you need to spawn independent processess, you could have:
const { spawn } = require("child_process");
// Prevent Ctrl+C from killing the parent process before the children exit
process.on("SIGINT", () => {
console.log("Terminating the process...");
if (runningProcesses > 0) {
setTimeout(() => process.exit(), 3000);
} else {
process.exit();
}
});
function asyncSpawn(command, parameters = []) {
return new Promise((resolve, reject) => {
runningProcesses++;
console.log(command, ...parameters, "\n");
spawn(command, parameters, {
stdio: "inherit"
})
.on("close", code => {
if (code)
reject(new Error(command + " process exited with code " + code));
else resolve();
})
.on("exit", error => {
runningProcesses--;
if (error) reject(error);
else resolve();
});
});
}
And launch new processes like this:
asyncSpawn("node", ["server.js", "--param1"])
.then(() => console.log("DONE"))
.catch(err => console.error(err));

nodejs-serialport => RE-Establish connection to port after closed

Accordding to my last question SerialPort 'close' event never fire. I was unabled to detected if the COM is disconnected so I have created my own way to detect it.
I have created timestamp, and checked it with interval() every 1 sec to see if it is connected.
when it's detect the COM is unplugged I have try to re-establish the connection or re-instance port with SerialPort like you'll see inside the code below.
When it's try to reconnect I've get Error: Access denied.
There is a way to refresh or clean the cache? , because I think the server still hold the connection when isn't closed propely.
I've also tried port.close() and it's throw me out: Error: Port is not open.
var comPort = '\\\\.\\COM7',
lastDataTime,
lastresult,
count = 0,
lastDataTime,
comStatus,
error;
var port = new SerialPort(comPort, function (err) {
if (err) {
comStatus = false;
return console.log('Error: ', err.message);
}
});
const parser = port.pipe(new Readline());
port.on('open', function () {
console.log('~Port is open.');
parser.on('data', function (data) {
comStatus = true;
lastDataTime = Date.now();
if (++count == 10) {
count = 0;
lastresult = data;
}
});
});
setInterval(function () {
if (Date.now() - lastDataTime > 1000 || !comStatus) {
comStatus = false;
port.close();
port = new SerialPort(comPort, function (err) {
if (err) {
error = 'Error: ' + err.message;
return console.log(error);
}
});
}
}, 1000);
app.get('/', function (req, res) {
res.send((comStatus) ? lastresult : 'Disconnected - ' + error);
console.log(lastresult);
})
Thanks!
As you can see in /node_modules/serialport/lib/serialport.js: close-event may not be emitted (unlike disconnect).
You can add console.log locally like below to simple debug.
P.S. I tested it on Win7x32. Close-event is emitted.
SerialPort.prototype._disconnected = function(err) {
this.paused = true;
this.emit('disconnect', err);
// add: console.log('1', this.closing);
if (this.closing) {
return;
}
// add: console.log('2', this.fd);
if (this.fd === null) {
return;
}
this.closing = true;
if (process.platform !== 'win32') {
this.readable = false;
this.serialPoller.close();
}
// add: console.log('3');
SerialPortBinding.close(this.fd, function(err) {
// add: console.log('4', this._events.close.toString());
this.closing = false;
if (err) {
debug('Disconnect close completed with error: ', err);
}
this.fd = null;
this.emit('close'); // it's your target
}.bind(this));
};
Reconnect example
var SerialPort = require('serialport');
var port = new SerialPort('COM1', {autoOpen: false, baudRate: 9600});
function open () {
port.open(functon (err) {
if (!err)
return;
console.log('Port is not open: ' + err.message);
setTimeout(open, 10000); // next attempt to open after 10s
});
}
port.on('open', function() {
function send() {
if (!port.isOpen()) // v5.x require
return console.log('Port closed. Data is not sent.');
port.write(123, function (err) {
if (err)
console.log('Error on write: ' + err.message)
port.drain(() => console.log('DONE'));
});
}
setInterval(send, 1000);
});
port.on('close', function () {
console.log('CLOSE');
open(); // reopen
});
port.on('data', (data) => console.log('Data: ' + data));
port.on('error', (err) => console.error('Error: ', err.message));
open(); // open manually
According to the serialport.io,
The resume() method causes an explicitly paused, Readable stream to
resume emitting 'data' events, switching the stream into flowing mode.
Simply, when port is closes, serialport library emits a close event
serialport.on('close', function(error){
if(error.disconnected === true){
console.log("disconnected");
}
}
, which will allow us whether port is disconnected or not.
That means the disconnected port is not available to re-establish the connection again, so you have to use serialport.resume() method to re-enable the connection.
serialport.on('close', function(err){
console.log("Port closed.");
if(err.disconnected === true){
console.log("Disconnected!");
serialport.resume(function(e){
reconnectDevice(); // Serial Port Initialization Function. It's your method to declare serial port.
console.log("Error on resuming port:", e);
});
}
});
After that, it will automatically switch COM ports and you won't get error as 'Port Access denied.'.

synchronous actions in node js

I want to run these three actions in an ordered manner:
Opening the serial port
Listening to any changes
Sending commands via serial port
The problem is because of the asynchronicity of node js, sometimes data is sent before the port starts listening. Hence, this data is lost. Here is my sample code. How can I solve this problem?
var SerialPort = require("serialport").SerialPort
var serialPort = new SerialPort("COM27", {
baudrate: 57600
}, false); // this is the openImmediately flag [default is true]
//***1***open the serialport
serialPort.open(function (error) {
if ( error ) {
console.log('failed to open: '+error);
}
else {
console.log('open');
}
//***2***port is now listening
serialPort.on('data', function (data) {
console.log('data received: ' + data)
})
//***3***send command using serialport
serialPort.write("AJ+APN?\n", function(err, results) {
console.log('err ' + err);
console.log('results ' + results)
})
});
Create a promise for each of the tasks that should execute in series. Then it should be simple chaining of the promises in the required order.
I don't know about the serialport module you're using, but if it's "standard" in the way it behaves then:
var SerialPort = require("serialport").SerialPort
var serialPort = new SerialPort("COM27", { baudrate: 57600 }, false);
//***1*** listen for events on port
serialPort.on('data', function (data) {
console.log('data received: ' + data);
//***2*** open the serialport
serialPort.open(function (error) {
if ( error ) {
console.log('failed to open: '+error);
} else {
console.log('open');
//***3*** send command using serialport -> after port opened
serialPort.write("AJ+APN?\n", function(err, results) {
console.log('err ' + err);
console.log('results ' + results)
});
}
});

Return value, or stop script

Helo,
i create API in Windows Azure Mobile service, In this api script i have function to connect the other service. I have problem how to return value or stop executable my script when i have good answer from service. Function process.exit(1), don't work.
function function1(item,response) {
var buf ='';
var net = require('net');
var HOST = 'xxxx.xxxx.xxxx.xxxx';
var PORT = xxx;
var client = new net.Socket();
client.setTimeout(100000, function() {
console.log("Timeout");
response.send(500, "Timeout");
});
client.connect(PORT, HOST, function() {
client.write(item + "\n");
client.on('data', function(data) {
buf = buf + data.toString('utf-8');
});
client.on('close', function() {
});
client.on('end', function() {
if (buf.length > 1) {
var result = JSON.parse(buf);
//if resulr.Aviable is true the functios should return result or send result and stop execiuting script
if ( result.Avaiable) {
response.send(200, result);
//now i wont't to respond answer to client or return my value(result)
console.log('Send data');
}
}
client.destroy();
});
});
}
One alternative is to have a flag which indicates whether a response has been sent or not. This way, when the first of the alternatives is reached, you can set the flag to true (possibly clearing the timeout so it doesn't linger more than it needs) and in all cases check whether the flag has been set before returning the response. Something along the lines of the code below:
function function1(item,response) {
var buf = '';
var net = require('net');
var HOST = 'xxxx.xxxx.xxxx.xxxx';
var PORT = xxx;
var client = new net.Socket();
var responseSent = false;
var timeoutHandler = client.setTimeout(100000, function() {
if (!responseSent) {
responseSent = true;
console.log("Timeout");
response.send(500, { error: "Timeout" });
}
});
client.connect(PORT, HOST, function() {
client.write(item + "\n");
client.on('data', function(data) {
buf = buf + data.toString('utf-8');
});
client.on('close', function(had_error) {
if (!responseSent) {
clearTimeout(timeoutHandler);
responseSent = true;
console.log('Socket closed');
response.send(500, { error: had_error ? 'Socket error' : 'unknown' });
}
});
client.on('end', function() {
if (!responseSent) {
responseSent = true;
clearTimeout(timeoutHandler);
if (buf.length > 1) {
try {
var result = JSON.parse(buf);
if (result.Available) {
response.send(200, result);
} else {
response.send(500, { error: 'Socket data is not available' });
}
} catch (ex) {
response.send(500, { error: 'error parsing JSON', exception: ex });
}
} else {
// We should always return a response
response.send(500, { error: 'No data read from socket' });
}
}
client.destroy();
});
});
}
Notice that since node.js runs on a single thread, you can assume that there will be no cases where the response is sent twice. Also, you should make sure that the response is always sent once - in the code you had, if there was a socket error, or if buf.length was not greater than 1, or if result.Avaiable was not true, then the timeout response would be sent, but you didn't need to wait for the whole (100 seconds) time to send that response.

How to SSH into a server from a node.js application?

var exec = require('child_process').exec;
exec('ssh my_ip',function(err,stdout,stderr){
console.log(err,stdout,stderr);
});
This just freezes - I guess, because ssh my_ip asks for password, is interactive, etc. How to do it correctly?
There's a node.js module written to perform tasks in SSH using node called ssh2 by mscdex. It can be found here. An example for what you're wanting (from the readme) would be:
var Connection = require('ssh2');
var c = new Connection();
c.on('connect', function() {
console.log('Connection :: connect');
});
c.on('ready', function() {
console.log('Connection :: ready');
c.exec('uptime', function(err, stream) {
if (err) throw err;
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ')
+ data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
});
c.on('error', function(err) {
console.log('Connection :: error :: ' + err);
});
c.on('end', function() {
console.log('Connection :: end');
});
c.on('close', function(had_error) {
console.log('Connection :: close');
});
c.connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
privateKey: require('fs').readFileSync('/here/is/my/key')
});
The other library on this page has a lower level API.
So I've wrote a lightweight wrapper for it. node-ssh which is also available on GitHub under the MIT license.
Here's an example on how to use it.
var driver, ssh;
driver = require('node-ssh');
ssh = new driver();
ssh.connect({
host: 'localhost',
username: 'steel',
privateKey: '/home/steel/.ssh/id_rsa'
})
.then(function() {
// Source, Target
ssh.putFile('/home/steel/.ssh/id_rsa', '/home/steel/.ssh/id_rsa_bkp').then(function() {
console.log("File Uploaded to the Remote Server");
}, function(error) {
console.log("Error here");
console.log(error);
});
// Command
ssh.exec('hh_client', ['--check'], { cwd: '/var/www/html' }).then(function(result) {
console.log('STDOUT: ' + result.stdout);
console.log('STDERR: ' + result.stderr);
});
});
The best way is to use promisify and async/await. Example:
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
export default async function (req, res) {
const { username, host, password } = req.query;
const { command } = req.body;
let output = {
stdout: '',
stderr: '',
};
try {
output = await exec(`sshpass -p ${password} ssh -o StrictHostKeyChecking=no ${username}#${host} ${command}`);
} catch (error) {
output.stderr = error.stderr;
}
return res.status(200).send({ data: output, message: 'Output from the command' });
}
Check my code:
// redirect to https:
app.use((req, res, next) => {
if(req.connection.encrypted === true) // or (req.protocol === 'https') for express
return next();
console.log('redirect to https => ' + req.url);
res.redirect("https://" + req.headers.host + req.url);
});
pure js/node way to ssh into hosts. Special thanks to
ttfreeman. assumes ssh keys are on host. No need for request object.
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
require('dotenv').config()
//SSH into host and run CMD
const ssh = async (command, host) => {
let output ={};
try {
output['stdin'] = await exec(`ssh -o -v ${host} ${command}`)
} catch (error) {
output['stderr'] = error.stderr
}
return output
}
exports.ssh = ssh

Resources