how to send a Map from child process to parent process - node.js

I fork a child process with worker module. I wanted to return the result of a map from the child process after it's done some work to the parent process but it doesn't work. The size of the map is pretty large and I am not sure if that's the reason. How do I send a map to the parent process from child process?
function doWork() {
var child = cp.fork(__dirname + '/work');
child.on('message', function(m) {
console.log('completed: ' + m.results);
});
child.send({
msg: 'do work',
name: self.myname
});
}
worker.js
var results = {};
process.on ("message", function (m){
// work populates results map
work(m.name);
process.send({
msg : 'DONE',
// somehow this doesn't work
results: JSON.stringify(results)
});
});
If I remove result then it works. I could get the msg in the parent process.
var results = {};
process.on ("message", function (m){
// work populates results map
work(m.name);
process.send({
msg : 'DONE',
});
});

Related

managing multiple node child processes

How can I manage multiple concurrent child processes that have been forked?
In this example, start_child() can be invoked multiple times, and each invocation can run indefinitely. When forking an arbitrary number of child processes like this, how can I communicate with / address each individual child process? Let's say I have 3 forked child processes running, and they keep running indefinitely, but I want to kill (or send a message to) child process number 2. How would I do that?
If stop_child() is invoked, it kills all of the currently running child processes. How do I refactor this code so that I can call stop_child() on an individual child process?
let app = require('express')();
let server = require('http').Server(app);
let io = require('socket.io')(server);
let fork = require('child_process').fork;
server.listen(80);
app.get('/', function(request, response) {
response.sendFile(__dirname + '/index.html');
});
io.on('connection', function(socket) {
socket.on('start_child', function () {
start_child();
});
socket.on('stop_child', function () {
child.kill();
}
}
function start_child() {
child = fork('./child_script.js');
//conditional logic to either run
//start_child() again or let it end
}
UPDATE
I tried this, based on some of the comments. But if I launch 3 processes and then call, for example, child[0].kill(), I get an error: Cannot read property 'kill' of undefined. I'm guessing my problem is that I'm not correctly passing the i variable to the io.on() call:
let app = require('express')();
let server = require('http').Server(app);
let io = require('socket.io')(server);
let fork = require('child_process').fork;
let i = 0;
let child = [];
server.listen(80);
app.get('/', function(request, response) {
response.sendFile(__dirname + '/index.html');
});
io.on('connection', function(socket) {
socket.on('start_child', function (i) {
start_child(i++);
});
//this is hard coded just for testing
socket.on('stop_child', function () {
child[0].kill();
}
}
function start_child(i) {
child[i] = fork('./child_script.js');
//conditional logic to either run
//start_child() again or let it end
}
UPDATE #2
Okay, I figured out that I need to send the incrementing variable from the client side, passed through an object coming from the emit call. Now when I call child[0].kill() there is no error. The problem is that the child process is not killed:
server.js
let app = require('express')();
let server = require('http').Server(app);
let io = require('socket.io')(server);
let fork = require('child_process').fork;
server.listen(80);
app.get('/', function(request, response) {
response.sendFile(__dirname + '/index.html');
});
io.on('connection', function(socket) {
socket.on('start_child', function (count) {
let num = count.count;
start_child(num);
});
//this is hard coded just for testing
socket.on('stop_child', function (count) {
let num = count.count;
child[num].kill();
}
}
function start_child(num) {
child[num] = fork('./child_script.js');
//conditional logic to either run
//start_child() again or let it end
}
index.html
$(function () {
let socket = io();
let i = 0;
$('#start').on('click', function () {
socket.emit('start_child', {"count": i++});
});
$('#stop').on('click', function () {
//the count is hard coded here just for testing purposes
socket.emit('stop_child', {"count": 0});
});
});
FINAL UPDATE - WITH RESOLUTION
Resolution #2 (right above this) is actually the solution. The problem I was having after that (where the child.kill() call didn't seem to do anything) was caused by a piece of the code that I had left out (in the code comment: 'conditional logic to either run start_child() again or let it end').
This is what was in there:
if (condition) {
setTimeout(start_child, 5000);
} else {
console.log('this child process has ended');
}
And this is what I changed it to (basically, I just had to pass the incrementing variable to the start_child() function so that it would have the same place in the child array when it restarted):
if (condition) {
setTimeout(function() {
start_child(num)
}, 5000);
} else {
console.log('this child process has ended');
}
fork() returns a ChildProcess object which has methods and events on that object for interprocess communication with that process. So you have to save each ChildProcess object from when you call start_child() and then use the appropriate object in order to communicate with the other process.
You can see events and methods of the ChildProcess object here. There are also numerous code examples.
In order to send message to the child process, use the following example :
parent.js
**
const { fork } = require('child_process');
const forked = fork('child.js');
forked.on('message', (msg) => {
console.log('Message from child', msg);
});
forked.send({ hello: 'world' });
**
child.js
process.on('message', (msg) => {
console.log('Message from parent:', msg);
});
let counter = 0;
setInterval(() => {
process.send({ counter: counter++ });
}, 1000);
For further information refer this article :
https://medium.freecodecamp.com/node-js-child-processes-everything-you-need-to-know-e69498fe970a

Nodejs events captured in any cluster process sent from cluster creating file

I am stuck here due to a simple event related issue. Here is the issue:
I have created a cluster using cluster.js and forked server.js from
cluster.js.
I have put a timer from cluster.js and after every 1 min I am
triggering an event 'testTimer'. I have used a event file to do
it.
I am trying to capture this event 'testTimer' from the child
process using the same file I have imported into server.js and doing
a .on('testTimer', callback)
However, the events are not captured in any of the processes. I have tried making the event global and assign the event globally to a symbol but was unable to get it work/capture event as well.
Here is the codes:
cluster.js (child process creator)
...require > events.js...
... create cluster logic...
setInterval(function () {
evt.emit('testTimer', {tester: 'test'});
evt.tester();
}, 1000);
server.js (child process)
...require > events.js...
evt.on('testTimer', function (data) {
console.log('Starting Sync ', data);
});
events.js (common file for events)
var util = require("util");
var EventEmitter = require("events").EventEmitter;
function test () {
EventEmitter.call(this);
}
test.prototype.tester = function (){
this.emit('testTimer', {missed: 'this'})
}
util.inherits(test, EventEmitter);
module.exports = test;
EventEmitter instances can't reach beyond the bounds of a process. If you want to communicate between parent and children, use worker.send():
// cluster.js
setInterval(function () {
for (const id in cluster.workers) {
cluster.workers[id].send({ type : 'testTimer', data : { tester : 'test' }});
}
}, 1000);
// server.js
process.on('message', function(message) {
if (message.type === 'testTimer') {
console.log('Starting Sync ', message.data);
}
})

How to detect if a Node spawned process is still running?

I can spawn a process like:
var spawn = require('child_process').spawn;
var topicListener = spawn('python', ['topic_listener.py','Node.js'], {env: {
TWITTER_CONSUMER_SECRET: process.env.TWITTER_CONSUMER_SECRET,
TWITTER_CONSUMER_KEY: process.env.TWITTER_CONSUMER_KEY,
TWITTER_TOKEN_SECRET: process.env.TWITTER_TOKEN_SECRET,
TWITTER_ACCESS_TOKEN: process.env.TWITTER_ACCESS_TOKEN
}});
topicListener.stdout.on('data', function (data) {
console.log(data.toString());
});
topicListener.stderr.on('data', function (data) {
console.log(data.toString());
});
topicListener.on('close', function (code) {
console.log("EXITED " + code);
});
So of course I can control it all asycnchronously with .on(close, ...) but is there any other way to control if a process is still alive?
topicListener.on('exit', function (code) {
topicListener = null;
console.log("EXITED " + code);
});
If topiclistener is null, the process is gone
spawn('python', ['topic_listener.py','Node.js'].. Return Child process Object. Use topicListener.pid to find unique ID associated with the process if it's alive.

Node Kue and Child Process - get error from spawned process

I try to spawn a child process performing cpu intensive calculations through a job queue with Kue. My code at the moment looks like this:
consumer.js
var kue = require('kue');
var util = require('util');
var spawn = require('child_process').spawn;
var jobs = kue.createQueue();
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
work.stderr.on('data', function (data) {
job.log('stderr: ' + data);
});
work.stdout.on('data', function (data) {
job.log('stdout: ' + data);
});
work.on('exit', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(****How to get the stderr of the child process as an error here***);
} else {
done(Error());
}
});
});
The code somewhat do what i would like it to do, but is there a better way to report the job as failed (to Kue) and get the stderr from the spawned process?
You can use job.log method to send data directly to Kue.
I would also recommend you to switch from .spawn to .exec, because it returns stdout and stderr as strings in its final callback along with a good error, which suits your needs well:
var exec = require('child_process').exec;
jobs.process('calc', 2, function(job, done){
exec('Rscript opti2.R ' + job.data.file, function (error, stdout, stderr) {
if (stdout.length > 0) job.log('stdout: ' + stdout);
if (stderr.length > 0) job.log('stderr: ' + stderr);
done(error);
});
});
Though solution should work with .spawn as well: simply replace each console.log call in your code with job.log.
Though, you may want to bufferize your stderr in order to send it to Kue in one chunk:
jobs.process('calc', 2, function(job, done){
var work = spawn('Rscript', ['opti2.R', job.data.file]);
var stderr = '';
work.stderr.on('data', function (data) {
stderr += data;
});
work.stdout.on('data', function (data) {
job.log(data); // sending arriving `stdout` chunks as normal log events
});
work.on('close', function (code, signal) {
console.log('child process exited with code ' + code + ' with singal ' + signal);
if(code != 0){
done(stderr); // sending all collected stderr as an explanation
} else {
done();
}
});
});
I would also recommend using close event instead of exit, because it waits for child's stdio streams.
For more information see Event: 'exit' docs:
This event is emitted after the child process ends.
Note that the child process stdio streams might still be open.
and Event: 'close' docs:
This event is emitted when the stdio streams of a child process have
all terminated.

Error handling using fork

In my below code error is not getting catched by Parent.js which is thrown by processChildOne.js
// Parent.js
var cp = require('child_process');
var childOne = cp.fork('./processChildOne.js');
var childTwo = cp.fork('./processChildTwo.js');
childOne.on('message', function(m) {
// Receive results from child process
console.log('received1: ' + m);
});
// Send child process some work
childOne.send('First Fun');
childTwo.on('message', function(m) {
// Receive results from child process
console.log('received2: ' + m);
});
// Send child process some work
childTwo.send('Second Fun');
// processChildOne.js
process.on('message', function(m) {
var conn = mongoose.createConnection('mongodb://localhost:27017/DB');
conn.on('error', console.error.bind(console, 'connection error:'));
// Pass results back to parent process
process.send("Fun1 complete");
});
If processChildOne.js fails, how to throw error to parent so that processChildOne.js and processChildTwo.js both should be killed. How can we keep track of how many child processes have executed and how many still are in pending.
Thanks in advance
I think whats going on, your child process is not really throwing an error, its writing to console.error, so there's no 'error' to catch in the parent process.
You may want to throw an error explicitly in the child, or an error will get thrown anyway by whatever library.. With this, I got the same problem you mention..
node.js
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(){
console.log('ya', arguments);
})
p1.js
console.error('bad stuff man')
But this at least threw the error as expected
p1.js
throw "bad stuff man";
This worked for catching the error in the client and sending to parent process.
node.js
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(){
console.log('error from client', arguments[0]);
})
p1.js
try{
throw "bad stuff man"
} catch(e){
process.send(e);
}
or for catching ALL errors in the client process and sending them to parent..
p1.js
process.on('uncaughtException', function(e){
process.send(e);
})
throw "bad stuff man";
For spawning multiple processes, and keeping track of the number, you should be able to do this..
node.js
var numprocesses = 5, running = 0;
for(var i = numprocesses; i--;){
var cp = require('child_process').fork('./p1.js');
cp.on('message', function(pid){
console.log('error from client', pid, arguments[0]);
})
cp.on('exit', function(){
console.log('done');
running--;
console.log('number running', running, ', remaining', numprocesses-running);
})
running++;
}
p1.js
process.on('uncaughtException', function(e){
process.send(process.pid + ': ' + e);
})
// simulate this to be a long running process of random length
setTimeout(function(){}, Math.floor(Math.random()*10000));
throw "bad stuff man";

Resources