In my app, I need to spawn a debugger process given a dump file to debug, say "example.dmp". If the dump file is not found or not it is not a dump file. The process will be successfully spawned but it will exit immediately. I would like to throw an error message in an async function which can be try catched later.
const spawnDebuggerProcess = (debuggerConfigs) => {
let debuggerProcess = spawn(config.debuggerName, debuggerConfigs)
debuggerProcess.on('exit', (code, signal) => {
console.log('pid ' + `${debuggerProcess.pid}`)
console.log('child process exited with ' + `code ${code} and signal ${signal}`)
})
debuggerProcess.on('error', (error) => {})
if (debuggerProcess.pid !== undefined) {
return debuggerProcess
}
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Failed to spawn debugging process'
})
}
One of my thought is to time give this function a time window before returning. If the process exited before the time window I should throw an error. But since node.js is async. I don't have an idea of how can this be realized. Thanks!
==== EDIT =====
const spawnDebuggerProcess = async (debuggerConfigs) => {
let debuggerProcess = spawn(config.debuggerProcess.debuggerName, debuggerConfigs)
/** added a flag */
let exited = false
debuggerProcess.on('exit', (code, signal) => {
console.log('pid ' + `${debuggerProcess.pid}`)
console.log('child process exited with ' + `code ${code} and signal ${signal}`)
/** set flag to true if exited */
exited = true
})
debuggerProcess.on('error', (error) => {})
if (debuggerProcess.pid !== undefined) {
/** delay this promise for a certain amount of time, act as the time window */
await delay(config.debuggerProcess.immediateExitDelay)
/** check the flag */
if (exited) {
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Process exited immediately'
})
}
return debuggerProcess
}
throw externalError.createError({
name: externalError.SPAWN_DEBUGGER_PROCESS_ERROR,
description: 'Failed to spawn debugging process'
})
}
It seems to be working, but I am not sure if it is a good practice.
Related
When executing the written script, it exit with code 'one' while it should save the dump of the database
const cron = require('node-cron');
const { exit } = require('process');
const spawn = require('child_process').spawn;
let backupProcess = spawn('mongodump', [
'--host=localhost',
'--port=27017',
'--out=.',
'--db=testdb',
], {shell: true});
backupProcess.on('exit', (code, signal) => {
console.log('Function called');
if(code)
console.log('Backup process exited with code ', code);
else if (signal)
console.error('Backup process was killed with singal ', signal);
else
console.log('Successfully backedup the database')
});
while executing the same through VS code terminal, getting following error,
Function called
Backup process exited with code 1 while should save the dump at given location`your text`
Actual Result : Backup process exited with code 1
Expected result : Must dump the database
Provided I have the PID for an already running process, how can I detect the exit of that process in Node.js?
For example:
var pid = 1200; // any PID
getProcessByPID(pid).onExit = () => {
// Code on exit goes here
};
I know its an old post, just needed the information myself. You can do code on a process exiting by listening for the 'exit' keyword. 'code' is the exit code of the process.
const forked = fork(`${__dirname}/file.js`)
forked.on('message', (msg) => {
console.log('message from child = ', msg)
})
forked.on('exit', (code) => {
console.log('Exiting worker with code =', code)
})
I try to spawn a child process performing cpu intensive calculations through a job queue with Kue. My code at the moment looks like this:
consumer.js
var kue = require('kue');
var util = require('util');
var spawn = require('child_process').spawn;
var jobs = kue.createQueue();
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
work.stderr.on('data', function (data) {
job.log('stderr: ' + data);
});
work.stdout.on('data', function (data) {
job.log('stdout: ' + data);
});
work.on('exit', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(****How to get the stderr of the child process as an error here***);
} else {
done(Error());
}
});
});
The code somewhat do what i would like it to do, but is there a better way to report the job as failed (to Kue) and get the stderr from the spawned process?
You can use job.log method to send data directly to Kue.
I would also recommend you to switch from .spawn to .exec, because it returns stdout and stderr as strings in its final callback along with a good error, which suits your needs well:
var exec = require('child_process').exec;
jobs.process('calc', 2, function(job, done){
exec('Rscript opti2.R ' + job.data.file, function (error, stdout, stderr) {
if (stdout.length > 0) job.log('stdout: ' + stdout);
if (stderr.length > 0) job.log('stderr: ' + stderr);
done(error);
});
});
Though solution should work with .spawn as well: simply replace each console.log call in your code with job.log.
Though, you may want to bufferize your stderr in order to send it to Kue in one chunk:
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
var stderr = '';
work.stderr.on('data', function (data) {
stderr += data;
});
work.stdout.on('data', function (data) {
job.log(data); // sending arriving `stdout` chunks as normal log events
});
work.on('close', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(stderr); // sending all collected stderr as an explanation
} else {
done();
}
});
});
I would also recommend using close event instead of exit, because it waits for child's stdio streams.
For more information see Event: 'exit' docs:
This event is emitted after the child process ends.
Note that the child process stdio streams might still be open.
and Event: 'close' docs:
This event is emitted when the stdio streams of a child process have
all terminated.
In my below code error is not getting catched by Parent.js which is thrown by processChildOne.js
// Parent.js
var cp = require('child_process');
var childOne = cp.fork('./processChildOne.js');
var childTwo = cp.fork('./processChildTwo.js');
childOne.on('message', function(m) {
// Receive results from child process
console.log('received1: ' + m);
});
// Send child process some work
childOne.send('First Fun');
childTwo.on('message', function(m) {
// Receive results from child process
console.log('received2: ' + m);
});
// Send child process some work
childTwo.send('Second Fun');
// processChildOne.js
process.on('message', function(m) {
var conn = mongoose.createConnection('mongodb://localhost:27017/DB');
conn.on('error', console.error.bind(console, 'connection error:'));
// Pass results back to parent process
process.send("Fun1 complete");
});
If processChildOne.js fails, how to throw error to parent so that processChildOne.js and processChildTwo.js both should be killed. How can we keep track of how many child processes have executed and how many still are in pending.
Thanks in advance
I think whats going on, your child process is not really throwing an error, its writing to console.error, so there's no 'error' to catch in the parent process.
You may want to throw an error explicitly in the child, or an error will get thrown anyway by whatever library.. With this, I got the same problem you mention..
node.js
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(){
console.log('ya', arguments);
})
p1.js
console.error('bad stuff man')
But this at least threw the error as expected
p1.js
throw "bad stuff man";
This worked for catching the error in the client and sending to parent process.
node.js
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(){
console.log('error from client', arguments[0]);
})
p1.js
try{
throw "bad stuff man"
} catch(e){
process.send(e);
}
or for catching ALL errors in the client process and sending them to parent..
p1.js
process.on('uncaughtException', function(e){
process.send(e);
})
throw "bad stuff man";
For spawning multiple processes, and keeping track of the number, you should be able to do this..
node.js
var numprocesses = 5, running = 0;
for(var i = numprocesses; i--;){
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(pid){
console.log('error from client', pid, arguments[0]);
})
cp.on('exit', function(){
console.log('done');
running--;
console.log('number running', running, ', remaining', numprocesses-running);
})
running++;
}
p1.js
process.on('uncaughtException', function(e){
process.send(process.pid + ': ' + e);
})
// simulate this to be a long running process of random length
setTimeout(function(){}, Math.floor(Math.random()*10000));
throw "bad stuff man";
I have been tinkering around with node a little, and while trying to learn the child_process module, I ran into a problem. I was attempting to serialize many calls to 'ps -eF | grep ssh', but it crashes on my system with the error below. So two questions. First, is there a better way to do what I am attempting without a library? Second, why isn't it working :)
events.js:71
throw arguments[1]; // Unhandled 'error' event
^
Error: This socket is closed.
at Socket._write (net.js:519:19)
at Socket.write (net.js:511:15)
at Socket.<anonymous> (/home/me/tmp/test.js:10:16)
at Socket.EventEmitter.emit (events.js:96:17)
at Pipe.onread (net.js:397:14)
function callpsgrep(callback) {
var spawn = require('child_process').spawn,
ps = spawn('ls', ['-la']),
grep = spawn('grep', ['bananas']);
ps.stdout.on('data', function (data) {
grep.stdin.write(data);
});
ps.stderr.on('data', function (data) {
console.log('ps stderr: ' + data);
});
ps.on('exit', function (code) {
if (code !== 0) {
console.log('ps process exited with code ' + code);
}
grep.stdin.end();
});
grep.stdout.on('data', function (data) {
console.log('' + data);
});
grep.stderr.on('data', function (data) {
console.log('grep stderr: ' + data);
});
grep.on('exit', function (code) {
if (code !== 0) {
console.log('grep process exited with code ' + code);
}
callback();
});
}
function series(i) {
if (i < 1000) {
callpsgrep( function() {
return series(i+1);
});
}
}
series(0);
Close grep's stdin on the close event instead of the exit event.
ps.on('exit', function (code) {
if (code !== 0) {
console.log('ps process exited with code ' + code);
}
});
ps.on('close', function (code) {
grep.stdin.end();
});
Although not very well documented I read the following in the help file.
Event: 'exit'
Note that the child process stdio streams might still be open.
Event: 'close'#
This event is emitted when the stdio streams of a child process have all terminated. This is distinct from 'exit', since multiple processes might share the same stdio streams.