I can spawn a process like:
var spawn = require('child_process').spawn;
var topicListener = spawn('python', ['topic_listener.py','Node.js'], {env: {
TWITTER_CONSUMER_SECRET: process.env.TWITTER_CONSUMER_SECRET,
TWITTER_CONSUMER_KEY: process.env.TWITTER_CONSUMER_KEY,
TWITTER_TOKEN_SECRET: process.env.TWITTER_TOKEN_SECRET,
TWITTER_ACCESS_TOKEN: process.env.TWITTER_ACCESS_TOKEN
}});
topicListener.stdout.on('data', function (data) {
console.log(data.toString());
});
topicListener.stderr.on('data', function (data) {
console.log(data.toString());
});
topicListener.on('close', function (code) {
console.log("EXITED " + code);
});
So of course I can control it all asycnchronously with .on(close, ...) but is there any other way to control if a process is still alive?
topicListener.on('exit', function (code) {
topicListener = null;
console.log("EXITED " + code);
});
If topiclistener is null, the process is gone
spawn('python', ['topic_listener.py','Node.js'].. Return Child process Object. Use topicListener.pid to find unique ID associated with the process if it's alive.
Related
I have an array with some IDs like a = [abc,cde,efg]. I pass this array and a second one containing the names with fit to the IDs to a child process in Node.
I want to wait until the child process finished and processes the next array member after that.
The code to achieve it is (with the suggested Edit):
function downloadSentinel(promObj) {
return new Promise((resolve,reject) => {
function makeRequest(url, i, callback) {
url = promObj.ID;
name = promObj.Name;
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
child = exec(directory + '\\downloadProducts.sh' + promObj.ID[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
child.on("error", function (error) {
console.log(error);
})
child.stdout.on('data', function (data) {
console.log(data.toString());
});
child.on('exit', function(exit) {
console.log(exit);
callback();
})
})
}
async.eachOfLimit(promObj.requestURLS, 2, makeRequest, function (err) {
if (err) reject(promObj)
else {
resolve(promObj);
}
});
});
}
I am using npm-async to control the concurrency flow because I want to limit the curl requests I do inside the shell script. The shell sript works without an error. Now the script is only called twice because of the async.eachOfLimit limit. Then the other array IDs are not processed anymore.
EDIT
This is the code I tried at last, but then all possible urls are evoked instead of only 2. I found this here Another Stackoverflow question. I also tried with async.timesLimit
function downloadSentinel(promObj,req,res) {
async.eachOfLimit(promObj.requestURLS, 2,function (value,i,callback) {
console.log('I am here ' + i + 'times');
url = promObj.requestURLS;
name = promObj.Name;
console.log(promObj.requestURLS);
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
console.log("executing:", directory + '\\downloadProducts.sh ' + promObj.requestURLS[i] + ' ' + promObj.Name[i]);
child = exec(directory + '\\downloadProducts.sh' + ' ' + promObj.requestURLS[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
if (error != null){
console.log(error);
}
// this task is resolved
return callback(error, stdout);
});
}, function (err) {
if (err) console.log('error')
else {
console.log('Done');
}
});
}
What have I missed? Thanks in advance.
exec is asynchronous, it does not wait for the launched process to finish.
Move the call to callback() inside the child.on('exit' ...) handler or use execSync
I have a node.js server, which will print out some message in the console and then start the server.
I am creating a automation test by using tap to check the message in the console.log and check if server is started, i.e. there is a PID generated.
I tried 2 different methods -child_process.exec and child_process.spawn
1. Use child_process.exec with a call back function.
This does not work as the server is long running and will not even
go to the call back, so I cannot even check for any stdout.
Then I use child_process.exec without call back, this solves the
first issue where I can now get the message back from stdout.
The second issue is that the test will hang since the server is long running and will not terminate by itself.
code snippet:
var exec = require('child_process').exec;
tap.test('test server start', function(t) {
childProcess= exec('node',['server']);
console.log('[exec] childProcess.pid: ', childProcess.pid);
t.notEqual(childProcess.pid, undefined);
childProcess.stdout.on('data', function (data) {
console.log('[exec] stdout: ', data.toString());
t.match(data.toString(), "Example app listening at http://:::3000");
t.end();
childProcess.kill('SIGTERM');
});
childProcess.stderr.on('data', function (data) {
console.log('[exec] stderr: ', data.toString());
});
childProcess.on('close', function (code) {
if (code!=null)
console.log('child process exited with code '+ code);
});
});
use child_process.spawn -code snippet
var spawn = require('child_process').spawn;
tap.test('test server start', function(t) {
childProcess= spawn('node',['server']);
console.log('[spawn] childProcess.pid: ', childProcess.pid);
t.notEqual(childProcess.pid, undefined);
childProcess.stdout.on('data', function (data) {
console.log('[spawn] stdout: ', data.toString());
t.match(data.toString(), "Example app listening at http://:::3000");
t.end();
childProcess.kill('SIGTERM');
});
childProcess.stderr.on('data', function (data) {
console.log('[spawn] stderr: ', data.toString());
});
childProcess.on('close', function (code) {
if (code!=null)
console.log('child process exited with code '+ code);
});
});
In both 1 & 2, the test will hang since the server is long running,
I need to use child_process.kill() to terminate the test
Is there a better method to achieve this?
Thanks in advance for any improvements.
Well, I think that you can check if the server is alive in a different way (without spawning a new process).
For example, you can start your server waiting for connections:
const net = require('net');
var connections = {};
var server = net.createServer(function(conn) { });
server.listen(3333);
server.on('connection',function(conn) {
var key = conn.remoteAddress + ':' + conn.remotePort;
connections[key] = conn;
conn.on('close',function() {
delete connections[key];
});
});
Then, connect some clients (or just one) to the server:
var connected = 0;
for (var i = 0;i < 10;i++) {
var client = net.connect(3333);
client.on('connect',function() {
connected++;
console.log(connected);
});
}
So, if you are be able to connect to the server, then your server is alive.
And finally, when you want to close the server, just create a new function like this one:
var destroy = function ()
{
server.close(function() {
console.log('ok');
});
for (var key in connections) {
connections[key].destroy();
}
}
Call it for example after 10 successful connections to the server. Inside the for loop:
for (var i = 0; i < 10; i++) {
var client = net.connect(3333);
client.on('connect',function() {
connected++;
if (connected === 10) {
destroy();
}
});
}
This is a very basic example, but I think that it's enough to understand another way to do what you want to do.
I.
I try to spawn a child process performing cpu intensive calculations through a job queue with Kue. My code at the moment looks like this:
consumer.js
var kue = require('kue');
var util = require('util');
var spawn = require('child_process').spawn;
var jobs = kue.createQueue();
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
work.stderr.on('data', function (data) {
job.log('stderr: ' + data);
});
work.stdout.on('data', function (data) {
job.log('stdout: ' + data);
});
work.on('exit', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(****How to get the stderr of the child process as an error here***);
} else {
done(Error());
}
});
});
The code somewhat do what i would like it to do, but is there a better way to report the job as failed (to Kue) and get the stderr from the spawned process?
You can use job.log method to send data directly to Kue.
I would also recommend you to switch from .spawn to .exec, because it returns stdout and stderr as strings in its final callback along with a good error, which suits your needs well:
var exec = require('child_process').exec;
jobs.process('calc', 2, function(job, done){
exec('Rscript opti2.R ' + job.data.file, function (error, stdout, stderr) {
if (stdout.length > 0) job.log('stdout: ' + stdout);
if (stderr.length > 0) job.log('stderr: ' + stderr);
done(error);
});
});
Though solution should work with .spawn as well: simply replace each console.log call in your code with job.log.
Though, you may want to bufferize your stderr in order to send it to Kue in one chunk:
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
var stderr = '';
work.stderr.on('data', function (data) {
stderr += data;
});
work.stdout.on('data', function (data) {
job.log(data); // sending arriving `stdout` chunks as normal log events
});
work.on('close', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(stderr); // sending all collected stderr as an explanation
} else {
done();
}
});
});
I would also recommend using close event instead of exit, because it waits for child's stdio streams.
For more information see Event: 'exit' docs:
This event is emitted after the child process ends.
Note that the child process stdio streams might still be open.
and Event: 'close' docs:
This event is emitted when the stdio streams of a child process have
all terminated.
I have been tinkering around with node a little, and while trying to learn the child_process module, I ran into a problem. I was attempting to serialize many calls to 'ps -eF | grep ssh', but it crashes on my system with the error below. So two questions. First, is there a better way to do what I am attempting without a library? Second, why isn't it working :)
events.js:71
throw arguments[1]; // Unhandled 'error' event
^
Error: This socket is closed.
at Socket._write (net.js:519:19)
at Socket.write (net.js:511:15)
at Socket.<anonymous> (/home/me/tmp/test.js:10:16)
at Socket.EventEmitter.emit (events.js:96:17)
at Pipe.onread (net.js:397:14)
function callpsgrep(callback) {
var spawn = require('child_process').spawn,
ps = spawn('ls', ['-la']),
grep = spawn('grep', ['bananas']);
ps.stdout.on('data', function (data) {
grep.stdin.write(data);
});
ps.stderr.on('data', function (data) {
console.log('ps stderr: ' + data);
});
ps.on('exit', function (code) {
if (code !== 0) {
console.log('ps process exited with code ' + code);
}
grep.stdin.end();
});
grep.stdout.on('data', function (data) {
console.log('' + data);
});
grep.stderr.on('data', function (data) {
console.log('grep stderr: ' + data);
});
grep.on('exit', function (code) {
if (code !== 0) {
console.log('grep process exited with code ' + code);
}
callback();
});
}
function series(i) {
if (i < 1000) {
callpsgrep( function() {
return series(i+1);
});
}
}
series(0);
Close grep's stdin on the close event instead of the exit event.
ps.on('exit', function (code) {
if (code !== 0) {
console.log('ps process exited with code ' + code);
}
});
ps.on('close', function (code) {
grep.stdin.end();
});
Although not very well documented I read the following in the help file.
Event: 'exit'
Note that the child process stdio streams might still be open.
Event: 'close'#
This event is emitted when the stdio streams of a child process have all terminated. This is distinct from 'exit', since multiple processes might share the same stdio streams.
I have this simple script :
var exec = require('child_process').exec;
exec('coffee -cw my_file.coffee', function(error, stdout, stderr) {
console.log(stdout);
});
where I simply execute a command to compile a coffee-script file. But stdout never get displayed in the console, because the command never ends (because of the -w option of coffee).
If I execute the command directly from the console I get message like this :
18:05:59 - compiled my_file.coffee
My question is : is it possible to display these messages with the node.js exec ? If yes how ? !
Thanks
Don't use exec. Use spawn which is an EventEmmiter object. Then you can listen to stdout/stderr events (spawn.stdout.on('data',callback..)) as they happen.
From NodeJS documentation:
var spawn = require('child_process').spawn,
ls = spawn('ls', ['-lh', '/usr']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code.toString());
});
exec buffers the output and usually returns it when the command has finished executing.
exec will also return a ChildProcess object that is an EventEmitter.
var exec = require('child_process').exec;
var coffeeProcess = exec('coffee -cw my_file.coffee');
coffeeProcess.stdout.on('data', function(data) {
console.log(data);
});
OR pipe the child process's stdout to the main stdout.
coffeeProcess.stdout.pipe(process.stdout);
OR inherit stdio using spawn
spawn('coffee -cw my_file.coffee', { stdio: 'inherit' });
There are already several answers however none of them mention the best (and easiest) way to do this, which is using spawn and the { stdio: 'inherit' } option. It seems to produce the most accurate output, for example when displaying the progress information from a git clone.
Simply do this:
var spawn = require('child_process').spawn;
spawn('coffee', ['-cw', 'my_file.coffee'], { stdio: 'inherit' });
Credit to #MorganTouvereyQuilling for pointing this out in this comment.
Inspired by Nathanael Smith's answer and Eric Freese's comment, it could be as simple as:
var exec = require('child_process').exec;
exec('coffee -cw my_file.coffee').stdout.pipe(process.stdout);
I'd just like to add that one small issue with outputting the buffer strings from a spawned process with console.log() is that it adds newlines, which can spread your spawned process output over additional lines. If you output stdout or stderr with process.stdout.write() instead of console.log(), then you'll get the console output from the spawned process 'as is'.
I saw that solution here:
Node.js: printing to console without a trailing newline?
Hope that helps someone using the solution above (which is a great one for live output, even if it is from the documentation).
I have found it helpful to add a custom exec script to my utilities that do this.
utilities.js
const { exec } = require('child_process')
module.exports.exec = (command) => {
const process = exec(command)
process.stdout.on('data', (data) => {
console.log('stdout: ' + data.toString())
})
process.stderr.on('data', (data) => {
console.log('stderr: ' + data.toString())
})
process.on('exit', (code) => {
console.log('child process exited with code ' + code.toString())
})
}
app.js
const { exec } = require('./utilities.js')
exec('coffee -cw my_file.coffee')
After reviewing all the other answers, I ended up with this:
function oldSchoolMakeBuild(cb) {
var makeProcess = exec('make -C ./oldSchoolMakeBuild',
function (error, stdout, stderr) {
stderr && console.error(stderr);
cb(error);
});
makeProcess.stdout.on('data', function(data) {
process.stdout.write('oldSchoolMakeBuild: '+ data);
});
}
Sometimes data will be multiple lines, so the oldSchoolMakeBuild header will appear once for multiple lines. But this didn't bother me enough to change it.
child_process.spawn returns an object with stdout and stderr streams.
You can tap on the stdout stream to read data that the child process sends back to Node. stdout being a stream has the "data", "end", and other events that streams have. spawn is best used to when you want the child process to return a large amount of data to Node - image processing, reading binary data etc.
so you can solve your problem using child_process.spawn as used below.
var spawn = require('child_process').spawn,
ls = spawn('coffee -cw my_file.coffee');
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
ls.on('exit', function (code) {
console.log('code ' + code.toString());
});
Here is an async helper function written in typescript that seems to do the trick for me. I guess this will not work for long-lived processes but still might be handy for someone?
import * as child_process from "child_process";
private async spawn(command: string, args: string[]): Promise<{code: number | null, result: string}> {
return new Promise((resolve, reject) => {
const spawn = child_process.spawn(command, args)
let result: string
spawn.stdout.on('data', (data: any) => {
if (result) {
reject(Error('Helper function does not work for long lived proccess'))
}
result = data.toString()
})
spawn.stderr.on('data', (error: any) => {
reject(Error(error.toString()))
})
spawn.on('exit', code => {
resolve({code, result})
})
})
}