NodeJS: Out of memory when sending messages to child_processes - node.js

My goal is to create a parent process that streams messages to the child processes indefinitely. To test this, i tried the below code for 150M messages to a child process that does nothing.
(Possible Answer Edited Below) Why would this code run out of memory?
I am using the ofe module but I do not know what to look for in the resulting heapdump, what method could I have used to search the heapdump for clues?
code:
server.js
// After 20M messages i get:
// FATAL ERROR: CALL_AND_RETRY_2 Allocation failed - process out of memory
var cp = require('child_process');
var xrange = require('xrange');
var maxChildren = require('os').cpus().length;
var util = require('util');
require('ofe').call();
var childError = function(err) {
console.log('Error: ' + err);
};
var childExit = function(code, signal) {
console.log('Exit Code: ' + code);
console.log('Exit Signal: ' + signal);
};
var childClose = function(code, signal) {
console.log('Close Code: ' + code);
console.log('Close Signal: ' + signal);
};
var childDisconnect = function() {
console.log('Disconnect');
};
var childMessage = function(msg, handle) {
console.log('Msg: ' + msg);
};
var createChild = function(){
var child = cp.fork("./child");
child.on('error', childError);
child.on('exit', childExit);
child.on('close', childClose);
child.on('disconnect', childDisconnect);
child.on('message', childMessage);
console.log("Child Created: " + child.pid);
return child;
}
var createChildren = function(){
var children = [];
xrange(maxChildren).each(function(i) {
children[i] = createChild();
});
return children;
}
var sendMessages = function(children) {
xrange(150000000).each(function(num) {
var idx = num % maxChildren;
if (num % 1000000 == 0) {
console.log(num);
}
children[idx].send(num);
});
};
child.js (contents)
process.on('message', function(msg) {});
Answer
Since this code is asynchronous , the parent process will send all the messages to the children without waiting for them to be processed, which seems to overload the clients. I believe the solution is to send a message back from the clients to 'pull' the next number after processing. Assuming this is the answer, I have a follow up question.
1) I'd like to write this but i'm not sure how to turn the xrange into a generator without the ability to yield (no-harmony nodejs), is there a callback solution? :
function getNextNumber(){
//pull the next available number from the 150M
}
child.on('message',function(msg) {
child.send(getNextNumber());
});
Followup
If I just wanted to iterate, this seems to be the way: https://stackoverflow.com/a/5784473/1578888 .
If I am reading this alternate answer correctly, it seems that implementing the xrange call as a true generator is not possible pre-harmony: https://stackoverflow.com/a/7442013/1578888
This is the code i ended up using (also renamed server.js to parent.js):
parent.js
var cp = require('child_process');
var xrange = require('xrange');
var maxChildren = require('os').cpus().length;
var util = require('util');
require('ofe').call();
var childError = function(err) {
console.log('Error: ' + err);
};
var childExit = function(code, signal) {
console.log('Exit Code: ' + code);
console.log('Exit Signal: ' + signal);
};
var childClose = function(code, signal) {
console.log('Close Code: ' + code);
console.log('Close Signal: ' + signal);
};
var childDisconnect = function() {
console.log('Disconnect');
};
var childMessage = function(msg, handle) {
//no output anymore!
//console.log('Msg: ' + msg);
};
var createChild = function(){
var child = cp.fork("./child.js");
child.on('error', childError);
child.on('exit', childExit);
child.on('close', childClose);
child.on('disconnect', childDisconnect);
child.on('message', childMessage);
console.log("Child Created: " + child.pid);
return child;
}
var getNextFn = (function () {
var i = 0
return function(cb) {
i = i + 1;
if (i < 150000000) {
return i;
} else {
return null;
}
}
})();
var createChildren = function(){
var children = [];
xrange(maxChildren).each(function(i) {
var child = createChild();
child.on('message', function(msg) {
var next = getNextFn();
if (next) {
if (next % 1000000 == 0) {
console.log(next + " " + new Date().toISOString());
}
child.send(next);
} else {
child.kill();
}
});
children[i] = child;
});
return children;
}
var c = createChildren();
child.js
process.on('message', function(msg) {
process.send({});
});
process.send({});

Related

Kafka consumer not consuming message after pause and resume

I am using this node-rdkafka library to implement the node kafka with consumer pause and resume method to handle the back-pressure. I have created the small demo where I can pause the consumer and resume the consumer but the problem is after resume the consumer It stops the consuming messages.
Here is my code.
const Kafka = require('node-rdkafka');
const topic = 'create_user_channel';
const log_divider = '-----------------------------------';
const consumer = new Kafka.KafkaConsumer({
'group.id':'gsuite_consumer',
'metadata.broker.list': '*******',
'sasl.mechanisms': 'PLAIN',
'sasl.username': '********',
'sasl.password': '********',
'security.protocol': 'SASL_SSL',
'enable.auto.commit':false
}, {});
// Connect the consumer.
consumer.connect({timeout: "1000ms"}, (err) => {
if (err) {
console.log(`Error connecting to Kafka broker: ${err}`);
process.exit(-1);
}
console.log("Connected to Kafka broker");
});
consumer.on('disconnected', (args) => {
console.error(`Consumer got disconnected: ${JSON.stringify(args)}`);
});
let max_queue_size = 3;
let current_queue = [];
let is_pause = false;
// register ready handler.
consumer.on('ready', (arg)=>{
console.log('consumer ready.' + JSON.stringify(arg));
console.log('Consumer is ready');
consumer.subscribe([topic]);
setInterval(function() {
console.log('consumer has consume on :'+timeMs());
consumer.consume();
}, 1000);
});
consumer.on('data',async (data)=>{
console.log('************consumer is consuming data***********:'+timeMs());
if(!is_pause) {
is_pause = true;
if(data && typeof data !== 'undefined') {
try {
console.log('consumer has received the data:'+timeMs());
consumer.pause([topic]);
console.log('consumer has pause the consuming:'+timeMs());
await processMessage(data);
console.log('consumer is resumed:'+timeMs());
consumer.resume([topic]);
console.log(log_divider);
is_pause = false;
} catch(error) {
console.log('data consuming error');
console.log(error);
}
} else {
is_pause = false;
}
}
});
async function processMessage(data) {
// await print_bulk(data);
await processData(0,data);
}
async function print_bulk(data) {
for(var i=0;i<data.length;i++) {
await processData(i,data[i]);
}
}
/**
* Wait specified number of milliseconds.
* #param ms
*/
async function wait(ms) {
console.log('wait for the 3 sec');
return new Promise((resolve) => setTimeout(resolve, ms));
}
var timeMs = ()=> {
var d = new Date();
var h = addZero(d.getHours(), 2);
var m = addZero(d.getMinutes(), 2);
var s = addZero(d.getSeconds(), 2);
var ms = addZero(d.getMilliseconds(), 3);
return h + ":" + m + ":" + s + ":" + ms;
}
var addZero = (x, n)=> {
while (x.toString().length < n) {
x = "0" + x;
}
return x;
}
async function processData(i,m) {
if (m) {
console.log('processing a data start:'+timeMs());
console.log('Received a message:');
console.log(' message: ' + m.value.toString());
console.log(' key: ' + m.key);
console.log(' size: ' + m.size);
console.log(' topic: ' + m.topic);
console.log(' offset: ' + m.offset);
console.log(' partition: ' + m.partition);
consumer.commitMessage(m);
}
await wait(3000);
console.log('process a data completed:'+timeMs());
// delete current_queue[i];
// console.log('after delting lenght of current queue:'+current_queue.length);
// console.log(log_divider);
return true;
}
can anybody help me, what I am doing wrong while resuming the consumer? When I start the consumer it just receives only one message and after resuming it still not consuming any further messages.
I have figure out the issue. Along with consumer.pause() & consumer.resume() method I need to use the consumer.assignments() method as well.
so It will be like this
consumer.pause(consumer.assignments());
consumer.resume(consumer.assignments());

Truncated Output from Nodejs ssh2-exec

Can someone please help me understand why the ssh command output is getting truncated when using result[ip] = data.toString(); but not console.log(data.toString());? I only get the first 50 bytes or so for the object assignment, but console.log gets everything. The reason I'm using Promise/await is because I'm planning to eventually have this reach out to several hosts simultaneously and get the results using Promise.all before moving on.
#!/usr/local/bin/node
var fs = require('fs');
var ssh_client = require('ssh2').Client;
var ssh_config = {
port: 22,
username: 'username',
password: 'password',
readyTimeout: 5000 };
var cmd = 'find /home/username -type f -iname "*.txt"';
async function main() {
let ip = '10.10.10.110';
let result = await sshExec(ip);
console.log(result[ip]);
}
function sshExec(ip) {
return new Promise(function(resolve, reject) {
let ssh = new ssh_client();
let config = Object.assign(ssh_config, {'host': ip});
let result = {};
ssh.connect(config);
ssh.on('ready', function() {
console.log("Connected to " + ip + ".");
ssh.exec(cmd, function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
//console.log(data.toString());
result[ip] = data.toString();
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
}).on('close', function(code, signal) {
ssh.end();
});
});
});
ssh.on('error', function(e) {
console.log(ip + ', connection failed, ' + e.message);
});
ssh.on('close', function(hadError) {
if (!hadError) {
console.log('Connection to ' + ip + ' closed without errors.');
resolve(result);
}
else {
console.log('Connection to ' + ip + ' closed with errors.');
reject(result.ip = 'failure');
}
});
});
}
main();

How to use openvpn in nodejs?

I have NodeJS App and want to start use OpenVPN connection in it.
To do that I found 2 modules on npm (openvpn-client and openvpn-bin) - but any of them has no good docs and examples, but I try as I can to use them and it was unsuccessful.
I have Ipvanish account (login/password) with 540 .opvn files, which I can use. I try this:
var openvpnmanager = require('node-openvpn');
var openvpnBin = require('openvpn-bin');
var path = require('path');
var filePath = path.normalize('../geo/ipvanish/ipvanish-AU-Sydney-syd-a16.ovpn');
var opts = {
host: 'syd-a16.ipvanish.com', // normally '127.0.0.1', will default to if undefined
port: 443, //port openvpn management console
timeout: 60000, //timeout for connection - optional, will default to 1500ms if undefined
config: filePath
};
var auth = {
user: 'email#gmail.com',
pass: 'password'
};
var openvpn = openvpnmanager.connect(opts)
openvpn.on('connected', function() {
// will be emited on successful interfacing with openvpn instance
console.log('connected')
openvpnmanager.authorize(auth).then(function(res){
});
});
I use this, more effective way (with it I can handle OpenVPN connection as child process, close and reconnect on the fly).
var exec = require('child_process').exec;
var psTree = require('ps-tree');
var kill = function (pid, signal, callback) {
signal = signal || 'SIGKILL';
callback = callback || function () {};
var killTree = true;
if(killTree) {
psTree(pid, function (err, children) {
[pid].concat(
children.map(function (p) {
return p.PID;
})
).forEach(function (tpid) {
try { process.kill(tpid, signal) }
catch (ex) { }
});
callback();
});
} else {
try { process.kill(pid, signal) }
catch (ex) { }
callback();
}
};
var ovpnProcess = null;
if(ovpnProcess != null){
console.log('close connection');
var isWin = /^win/.test(ovpnProcess.platform);
if(!isWin) {
kill(ovpnProcess.pid);
} else {
var cp = require('child_process');
cp.exec('taskkill /PID ' + ovpnProcess.pid + ' /T /F', function (error, stdout, stderr) {
// more debug if you need
// console.log('stdout: ' + stdout);
// console.log('stderr: ' + stderr);
// if(error !== null) {
// console.log('exec error: ' + error);
// }
});
}
}
// to open connection I use this code:
ovpnProcess = exec('openvpn ipvanish/'+account.ip+'.ovpn');
ovpnProcess.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
ovpnProcess.stderr.on('data', function(data) {
console.log('stdout: ' + data);
});
ovpnProcess.on('close', function(code) {
console.log('closing code: ' + code);
});
you can get help from https://www.npmjs.com/package/node-openvpn or OpenVPN with node, How it works?.

Nodejs - data transfer between server and client

I was given a task to send JSON string from client to server and from server to client, whenever there is a new record found to send.
I decided to build TCP connection(suggest me if there is any other better way in Node.js) between server and client to transfer data.
The problem is, I was supposed to use a delimiter to separate JSON strings one from another. I am afraid what if the json string contains the delimiter string inside the object. I am looking for a better way to separate two JSON strings.
Below is my code. Please help me.
Client
var net = require('net')
, client = new net.Socket();
var chunk = ''
, dlim_index = -1
, delimit = '~~';
client.connect(config.Port, config.IpAddress, function () {
console.log('Server Connected');
client.write('CLIENTID:' + process.argv[2]);
client.write(delimit);
});
client.on('data', function (data) {
var recvData = data.toString().trim();
chunk += recvData;
dlim_index = chunk.indexOf(recvData);
console.log(data);
while (dlim_index > -1) {
var useData = chunk.substring(0, dlim_index);
if (useData == 'SUCCESS') {
controller.listenOutQueue(function (dataToSend) {
var object = JSON.parse(dataToSend);
client.write(dataToSend);
client.write(delimit);
});
}
else {
var record = JSON.parse(useData);
controller.insertIntoQueue(record, function (status) {
});
}
chunk = chunk.substring(dlim_index + 2);
dlim_index = chunk.indexOf(delimit);
}
});
client.on('close', function () {
console.log('Connection closed');
});
client.setTimeout(50000, function () {
//client.destroy();
});
Server
var net = require('net')
, server = net.createServer()
, delimit = '~~'
, clients = [];
controller.listenOutQueue(function (dataToSend) {
client.write(dataToSend);
client.write(delimit);
});
server.on('connection', function (socket) {
var chunk = '';
var dlim_index = -1;
socket.on('data', function (data) {
var recvData = data.toString().trim();
chunk += recvData;
dlim_index = chunk.indexOf(delimit);
while (dlim_index > -1) {
var useData = chunk.substring(0, dlim_index);
if (useData.substring(0, 9) == 'CLIENTID:') {
socket.clientid = useData.replace('CLIENTID:', '');
console.log('Client Id: ' + socket.clientid);
clients.push(socket);
var successMessage = "SUCCESS";
socket.write(successMessage);
socket.write(delimit);
}
else {
controller.insertIntoQueue(JSON.parse(useData), function (status) {
});
}
chunk = chunk.substring(dlim_index + 2);
dlim_index = chunk.indexOf(delimit);
}
});
socket.on('end', function () {
console.log('Connection Closed (' + socket.clientid + ')');
});
socket.on('error', function (err) {
console.log('SOCKET ERROR:', err);
});
});
server.listen(config.Port, config.IpAddress);

Node.js how to handle packet fragmentation with net.Server

When a net.Server receives data that exceeds 1500 bytes (default mtu), the 'on data' event is executed with each fragment of the packet. Is there a way to receive the whole packet in a single 'on data' call?
Thanks.
Try this
var sys = require('sys');
var net = require('net');;
var socktimeout = 600000;
var svrport = your_port;
var svr = net.createServer(function(sock) {
var mdata = new Buffer(0);
//sys.puts('Connected: ' + sock.remoteAddress + ':' + sock.remotePort);
sock.setTimeout(socktimeout,function(){
sock.end("timeout");
sock.destroy();
});
sock.on('data', function(data) {
if(mdata.length != 0)
{
var tempBuf = Buffer.concat([mdata, data]);
mdata = tempBuf;
}
else
{
mdata = data;
}
var len=got_your_Packget_length(mdata);
if(mdata.length == len)
{
do_your_job(mdata)
mdata = new Buffer(0);
}
});
sock.on('error', function(err) { // Handle the connection error.
sys.puts('error: ' + err +'\n');
});
});
svr.listen(svrport);

Resources