How to execute an external program from within Node.js? - node.js

Is it possible to execute an external program from within node.js? Is there an equivalent to Python's os.system() or any library that adds this functionality?

var exec = require('child_process').exec;
exec('pwd', function callback(error, stdout, stderr) {
// result
});

exec has memory limitation of buffer size of 512k. In this case it is better to use spawn.
With spawn one has access to stdout of executed command at run time
var spawn = require('child_process').spawn;
var prc = spawn('java', ['-jar', '-Xmx512M', '-Dfile.encoding=utf8', 'script/importlistings.jar']);
//noinspection JSUnresolvedFunction
prc.stdout.setEncoding('utf8');
prc.stdout.on('data', function (data) {
var str = data.toString()
var lines = str.split(/(\r?\n)/g);
console.log(lines.join(""));
});
prc.on('close', function (code) {
console.log('process exit code ' + code);
});

The simplest way is:
const { exec } = require("child_process")
exec('yourApp').unref()
unref is necessary to end your process without waiting for "yourApp"
Here are the exec docs

From the Node.js documentation:
Node provides a tri-directional popen(3) facility through the ChildProcess class.
See http://nodejs.org/docs/v0.4.6/api/child_processes.html

Related

Use node script to start child process, capture child stdout, and kill child on close

I would like to start a node-script called myScript.js, that starts a child process npm start, store the stdout of npm start into a global variable let myVar, and make sure that if the main program myScript.js is exited for any reason, the child process is killed as well. Nothing from the child's stdout should appear in the terminal window after ctr-c or similar.
My current solution does not kill on close:
const childProcess = require('child_process');
let myVar = ''
const child = childProcess.spawn('npm', ['start'], {
detached: false
});
process.on('exit', function () {
child.stdin.pause();
child.kill();
});
child.stdout.on('data', (data) => {
myVar = `${data}`
});
Can this be accomplished?
Small change, but I think that might look something like this:
const childProcess = require('child_process')
const child = childProcess.spawn('npm', ['start'], {shell:true});
var myVar = ''; child.stdout.setEncoding('utf8');
child.stdout.on('data', function(data) {
myVar = data.toString();
});
child.on('close', function(exitcode) {
// on the close of the child process, use standard output or maybe call a function
});
process.on('exit', function() {
// I don't think pausing std.in is strictly necessary
child.kill()
})
Further reading
node's child_process documentation, including event names
geeksforgeeks article on child_process.spawn
This StackOverflow thread on getting and using the output of a child_process' pipe
This StackOverflow thread on the difference between the close and exit events

How to get the output of a spawned child_process in Node.JS?

First of all, I'm a complete noob and started using Node.JS yesterday (it was also my first time using Linux in years) so please be nice and explicit
I'm currently making a Node.JS program which has to, among other things, launch shell commands (mainly : mount an usb drive).
I'm currently using
var spawn = require('child_process').spawnSync;
function shspawn(command) {
spawn('sh', ['-c', command], { stdio: 'inherit' });
}
shspawn('echo Hello world');
shspawn('mkdir newdir');
etc. which is a really comfortable way to do it for me.
The problem is that I'd like to store the output of, for example, a "ls" command in a variable, in a way like
var result = shspawn('ls -l')
I've read some examples online but they rarely use spawn and when they do, it doesn't work for me (I guess I may do something wrong, but again I'm a noob in Node)
If you guys have a better idea than using child_process_spawnSync I'm open to any idea, but I'd like as long as possible to keep my program package-free :)
EDIT : I need it to work synchronously ! That's why I've started using spawnSync. I will be using some commands like dd, that takes time and needs to be fully finished before the program moves on to another command.
You can do it something like below.
var spawn = require('child_process').spawn;
// Create a child process
var child = spawn('ls' , ['-l']);
child.stdout.on('data',
function (data) {
console.log('ls command output: ' + data);
});
child.stderr.on('data', function (data) {
//throw errors
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('child process exited with code ' + code);
});
Update: with spawnSync
var spawn = require('child_process').spawnSync;
var child = spawn('ls' , ['-l','/usr']);
console.log('stdout here: \n' + child.stdout);

Retrieve shell error output from child_process.spawn

I'm using child_process.spawn and need to capture the shell error that occurs when the command fails. According to this question, I should be able to do:
var child_process = require('child_process');
var python = child_process.spawn(
'python', ["script.py", "someParam"]
);
python.on('error', function(error) {
console.log("Error: bad command", error);
});
When I replace 'python', ["script.py", "someParam"] with banana, like in the linked question, it works, and the error is visible. But in my case, using python with arguments, the 'error' event is never called.
How can I capture shell errors from python?
According to the Node.js docs for the ChildProcess error event, it is only fired in a few situations:
The process could not be spawned, or
The process could not be killed, or
Sending a message to the child process failed for whatever reason.
To capture the shell error output, you can additionally listen to data events on the stdout and stderr of your spawned process:
python.stdout.on('data', function(data) {
console.log(data.toString());
});
python.stderr.on('data', function(data) {
console.error(data.toString());
});
To capture the shell error code, you can attach a listener to the exit event:
python.on('exit', function(code) {
console.log("Exited with code " + code);
});
Thread is a little bit old and all but I encountered this today while working on my test casing library. I realize that the accepted answer has a solution already, but, for me, it is not clearly explained. Anyway in case someone needs it here is what you need to do.
The thing I realized is that, while executing code, if python interpreter encounters an error, or should I say, if your code has an error in it, it will write it to standard error stream and exit. So what you need to do, in your Node.js code, is to listen to the stderr stream of the spawned process. In addition, all of the data passed to print() function is written to the 'stdout' stream of the process.
So here is an example code:
const { spawn } = require('child_process');
const proc = spawn('python',['main.py','-c']);
proc.stderr.on('data',(data)=>{
//Here data is of type buffer
console.log(data.toString())
})
proc.stdout('data',(data)=>{
//Also buffer
console.log(data.toString());
})
What happens clear should already be clear if you read the first part of my answer. One other thing you could do instead of writing data to the console, is redirect it to another stream, this could be really useful if you want to write output data to a file for example. This is how you could do it:
const fs = require('fs');
const path = require('path');
const { spawn } = require('child_process');
const outputFile = path.join(__dirname,'output.txt');
const errorFile = path.join(__dirname,'output.txt');
const outputStream = fs.createWriteStream(outputFile, {
encoding: "utf8",
autoClose: true
});
const outputStream = fs.createWriteStream(errorFile, {
encoding: "utf8",
autoClose: true
});
const proc = spawn('python',['main.py','-c']);
proc.stdout.pipe(outputStream);
proc.stderr.pipe(errorStream);
What is happening here is that, using pipe function we send all data from stdout and stderr of the process to the file streams. Also you do not have to worry about files existing, it will create them for you

Node.js stream data from a terminal command to the client

There were already a few questions here about node.js executing commands and outputting the data, but I still can't get this working. What I want is that using node.js I want to execute a python script that runs for a long time and produces some intermediate outputs. I want to stream these outputs to the client as soon as they are produced. I have tried the following, but what I get is that I get the whole output only once the command has finished. How can I make it pass on the data into the socket in real time? Thanks.
function run_cmd(cmd, args) {
var spawn = require('child_process').spawn,
child = spawn(cmd, args);
return child;
}
io.sockets.on('connection', function (socket) {
var foo = new run_cmd('python', ['test.py']);
foo.stdout.setEncoding('utf-8');
foo.stdout.on('data', function(data) {
console.log('sending data');
io.sockets.emit('terminal', {output: data});;
});
);
all your node.js code is okay.your code sends data only once because your code gets data only once.
The point is puts or print commands are not enough to trigger foo.stdout.on
try adding '$stdout.flush' at the point you want to send chunk in ruby code.
You need to order explicitly to output data.
here is my test code.
js
var spawn = require('child_process').spawn;
var cmd = spawn('ruby', ['testRuby.rb']);
var counter = 0;
cmd.stdout.on('data', function(data) {
counter ++;
console.log('stdout: ' + data);
});
cmd.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
cmd.on('exit', function(code) {
console.log('exit code: ' + code);
console.log(counter);
});
testRuby.rb
def execute_each_sec(sleep_sec)
yield
sleep sleep_sec
end
5.times do
execute_each_sec(1) do ||
puts "CurrentTime:#{Time.now}"
$stdout.flush
end
end
as you can see I'm calling $stdout.flush to output data explicitly in testRuby.rb.
if I remove that,node.js won't get anything until execution of testRuby.rb's finished.
edited
lol my bad. I used ruby instead of python.
in the case of python, use sys.stdout.flush() like svkk says
Edit:
In python you can also use -u flag to force it to flush after each print.

Run synchronous tasks using node.js on windows

I am using the Node FFI module and am trying to run sync tasks on Windows. I can successfully run a task using the following code.
var ffi=require('ffi')
var nativeC = new ffi.Library("Kernel32", {
"WinExec": ["int32", ["string"]]
});
nativeC.WinExec('ls -lrt');
I presume this is the way to execute sync tasks, but this code always exits after the 1st 'ls -lrt' command; if I chain a few more commands, they won't work. So, is there a callback function over here, in the FFI module, or another way I can chain commands in node.js on Windows so they run in sync, one after the other.
I'm not sure you need WinExec to run a windows command. As Jonathan pointed out, ls isn't available.
However, if you want to chain commands you could use async.js and exec like this:
var
async = require('async');
exec = require('child_process').exec,
commands = [ 'dir /w', 'echo test'];
var executeCommand = function(command, callback){
exec(command, function (err, stdout, stderr) {
if(err) return callback(err);
console.log(stdout);
callback();
});
};
async.eachSeries(commands, executeCommand, function(err){
console.log('error: ' + err);
});

Resources