Node child process exec async only called once - node.js

I have an array with some IDs like a = [abc,cde,efg]. I pass this array and a second one containing the names with fit to the IDs to a child process in Node.
I want to wait until the child process finished and processes the next array member after that.
The code to achieve it is (with the suggested Edit):
function downloadSentinel(promObj) {
return new Promise((resolve,reject) => {
function makeRequest(url, i, callback) {
url = promObj.ID;
name = promObj.Name;
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
child = exec(directory + '\\downloadProducts.sh' + promObj.ID[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
child.on("error", function (error) {
console.log(error);
})
child.stdout.on('data', function (data) {
console.log(data.toString());
});
child.on('exit', function(exit) {
console.log(exit);
callback();
})
})
}
async.eachOfLimit(promObj.requestURLS, 2, makeRequest, function (err) {
if (err) reject(promObj)
else {
resolve(promObj);
}
});
});
}
I am using npm-async to control the concurrency flow because I want to limit the curl requests I do inside the shell script. The shell sript works without an error. Now the script is only called twice because of the async.eachOfLimit limit. Then the other array IDs are not processed anymore.
EDIT
This is the code I tried at last, but then all possible urls are evoked instead of only 2. I found this here Another Stackoverflow question. I also tried with async.timesLimit
function downloadSentinel(promObj,req,res) {
async.eachOfLimit(promObj.requestURLS, 2,function (value,i,callback) {
console.log('I am here ' + i + 'times');
url = promObj.requestURLS;
name = promObj.Name;
console.log(promObj.requestURLS);
var sys = require('util'),
exec = require('child_process').exec,
child;
var directory = __dirname.substring(0, __dirname.indexOf("\\app_api"));
console.log(directory);
console.log("executing:", directory + '\\downloadProducts.sh ' + promObj.requestURLS[i] + ' ' + promObj.Name[i]);
child = exec(directory + '\\downloadProducts.sh' + ' ' + promObj.requestURLS[i] + ' ' + promObj.Name[i], function (error, stdout, stderr) {
if (error != null){
console.log(error);
}
// this task is resolved
return callback(error, stdout);
});
}, function (err) {
if (err) console.log('error')
else {
console.log('Done');
}
});
}
What have I missed? Thanks in advance.

exec is asynchronous, it does not wait for the launched process to finish.
Move the call to callback() inside the child.on('exit' ...) handler or use execSync

Related

Exit Node Process After Successful fs.appendFile

I'm having trouble create processes in parallel with Node while exiting when they're done with a simple HTTP GET request. I've noticed that if I fire a process.exit() inside of a callback for appendFile, some files will not be created or appended in a Node cluster setup. Ideally, the way below is how I would like to fire events since the process is exited as soon as the job is done:
var rp = require("request-promise");
config = require("./config"),
cluster = require("cluster"),
os = require("os"),
fs = require("fs");
var keywordArray = [
'keyword1',
'keyword2',
...
];
if (cluster.isMaster) {
var numCPUs = os.cpus().length;
var clusterDivision = Math.ceil(keywordArray.length/numCPUs);
// Reset the json if previously set
keywordArray.forEach(function(arrayItem) {
fs.unlink(config.dataDirectory + arrayItem + '.json', function(err) {
if (err) console.error(err);
console.log('successfully unlinked ' + arrayItem + '.json from ' + config.dataDirectory);
});
});
// Create a worker for each CPU
// Seperate the array out evenly for each worker
for (var j=1;j<=numCPUs;j++) {
var tempArray = [];
var removed = keywordArray.splice(0, clusterDivision);
if (removed.length > 0) {
// The array contains something so let's do something with the keyword
console.log('creating a worker');
cluster.fork().send(removed);
} else {
// We don't need a cluster here
}
}
process.on('exit', function() {
console.log('exited');
});
} else if (cluster.isWorker) {
// Code to run if we're in a worker process
// Send the object we created above from variables so they're available to the workers
process.on('message', function(seperatedArrayItem) {
seperatedArrayItem.forEach(function(arrayItem) {
function radarRequest(err, response, body) {
var responseBody = JSON.parse(body);
console.log(arrayItem);
fs.appendFileSync(config.dataDirectory + arrayItem + '.json', JSON.stringify(responseBody.results, null, '\t'), function (err) {
if (err) console.err(err);
console.log('success writing file');
});
}
rp({
url: config.radarSearchURI +
'?key='+ config.apiKey +
'&location=' + config.latitude + ',' + config.longitude +
'&radius=' + config.searchRadius +
'&keyword=' + arrayItem, headers: config.headers
}, radarRequest);
});
setTimeout(function() {
process.exit(0);
}, 5000);
});
}
The only way I can make sure all files are properly appended is by using a Timeout, which is exactly what I don't want to - and shouldn't - do. Is there another way I can ensure an appendFile has happened successfully and then kill the node process? Here's a way that works (assuming the process doesn't take longer than 5 seconds):
process.on('message', function(seperatedArrayItem) {
seperatedArrayItem.forEach(function(arrayItem) {
function radarRequest(err, response, body) {
var responseBody = JSON.parse(body);
console.log(arrayItem);
fs.appendFile(config.dataDirectory + arrayItem + '.json', JSON.stringify(responseBody.results, null, '\t'), function (err) {
if (err) console.err(err)
console.log('success writing file');
});
}
rp({
url: config.radarSearchURI +
'?key='+ config.apiKey +
'&location=' + config.latitude + ',' + config.longitude +
'&radius=' + config.searchRadius +
'&keyword=' + arrayItem, headers: config.headers
}, radarRequest);
});
setTimeout(function() {
process.exit(0);
}, 5000);
});
You can use an async flow control module like async to kill the process after all files are written. I'd also recomment cluster.worker.disconnect() so that the node process will simple exit gracefully, but that isn't a requirement.
async.forEach(seperatedArrayItem, function(item, done){
// append file and call 'done' when it is written.
}, function(){
// Will be called when all item 'done' functions have been called.
cluster.worker.disconnect();
});
Node fs.appendFile( ... ) is an asynchronous function. So it expects us to pass a callback for we know it has finished its main operation, to inform us of some error occurred, or another purpose.
This means we need to call Node process.exit( ... ) in the scope of the provided callback. I've written this code to test:
'use strict';
var fs = require('fs');
function jsonValue(obj) {
return JSON.stringify(obj, null, '\t');
}
fs.appendFile('file.json', jsonValue(['t', 'e', 's', 't']), function(error) {
if (error) {
throw error;
}
console.log('success writing file'); // no error, so log...
process.exit(); // and exit right now
console.log('exited?'); // this will not be printed
});
Well, it worked as defined.
Other way it works is to use the synchronous version of fs.appendFile( ... ) and call process.exit() in a sequential way:
fs.appendFileSync('file.json', jsonValue(['t', 'e', 's', 't']));
console.log('success writing file'); // no error (I hope so =), so log...
process.exit(); // and exit right now
console.log('exited?'); // this will not be printed
This is clean code and works, but you lose the robustness and convenience gained with the callback...

execute a batch file from nodejs

Would it be possible to run a batch file from a nodejs application?
After googling for some time we can use child_process to execute the commands. Tried the same module but without success.
Could somebody guide me?
This creates a NodeJS module with a single function named exec() to execute batch scripts.
var exec = require('child_process').exec,
path = require('path'),
os = require('os');
fs = require('fs');
// HACK: to make our calls to exec() testable,
// support using a mock shell instead of a real shell
var shell = process.env.SHELL || 'sh';
// support for Win32 outside Cygwin
if (os.platform() === 'win32' && process.env.SHELL === undefined) {
shell = process.env.COMSPEC || 'cmd.exe';
}
// Merges the current environment variables and custom params for the environment used by child_process.exec()
function createEnv(params) {
var env = {};
var item;
for (item in process.env) {
env[item] = process.env[item];
}
for(item in params) {
env[item] = params[item];
}
return env;
}
// scriptFile must be a full path to a shell script
exports.exec = function (scriptFile, workingDirectory, environment, callback) {
var cmd;
if (!workingDirectory) {
callback(new Error('workingDirectory cannot be null'), null, null);
}
if (!fs.existsSync(workingDirectory)) {
callback(new Error('workingDirectory path not found - "' + workingDirectory + '"'), null, null);
}
if (scriptFile === null) {
callback(new Error('scriptFile cannot be null'), null, null);
}
if (!fs.existsSync(scriptFile)) {
callback(new Error('scriptFile file not found - "' + scriptFile + '"'), null, null);
}
// transform windows backslashes to forward slashes for use in cygwin on windows
if (path.sep === '\\') {
scriptFile = scriptFile.replace(/\\/g, '/');
}
// TODO: consider building the command line using a shell with the -c argument to run a command and exit
cmd = '"' + shell + '" "' + scriptFile + '"';
// execute script within given project workspace
exec(cmd,
{
cwd: workingDirectory,
env: createEnv(environment)
},
function (error, stdout, stderr) {
// TODO any optional processing before invoking the callback
callback(error, stdout, stderr);
}
);
};
I have found the solution for it.. and its works fine for me. This opens up a new command window and runs my main node JS in child process. You need not give full path of cmd.exe. I was making that mistake.
var spawn = require('child_process').spawn,
ls = spawn('cmd.exe', ['/c', 'startemspbackend.bat']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code);
});
An easier way I know for executing that is the following code :
function Process() {
const process = require('child_process');
var ls = process.spawn('script.bat');
ls.stdout.on('data', function (data) {
console.log(data);
});
ls.stderr.on('data', function (data) {
console.log(data);
});
ls.on('close', function (code) {
if (code == 0)
console.log('Stop');
else
console.log('Start');
});
};
Process();

First function runs - never called

I am working through smashing node.js - the first book example shows a function that brings in all the current directory files into a list, then file(i) is run, without it being called. I don't know why? - the input parameter to the function is used within the program and incremented as well, but where does this value come from? How is this function called in the first place?
/**
* Module dependencies.
*/
var fs = require('fs')
, stdin = process.stdin
, stdout = process.stdout
/**
* Read the current directory.
*/
fs.readdir(__dirname, function (err, files) {
console.log('');
if (!files.length) {
return console.log(' \033[31m No files to show!\033[39m\n');
}
console.log(' Select which file or directory you want to see\n');
// called for each file walked in the directory
var stats = {};
function file(i) {
var filename = files[i];
fs.stat(__dirname + '/' + filename, function (err, stat) {
stats[i] = stat;
if (stat.isDirectory()) {
console.log(' '+i+' \033[36m' + filename + '/\033[39m');
} else {
console.log(' '+i+' \033[90m' + filename + '\033[39m');
}
if (++i == files.length) {
read();
} else {
file(i);
}
});
}
// read user input when files are shown
function read () {
console.log('');
stdout.write(' \033[33mEnter your choice: \033[39m');
stdin.resume();
stdin.setEncoding('utf8');
stdin.on('data', option);
}
// called with the option supplied by the user
function option (data) {
var filename = files[Number(data)];
if (!filename) {
stdout.write(' \033[31mEnter your choice: \033[39m');
} else {
stdin.pause();
if (stats[Number(data)].isDirectory()) {
fs.readdir(__dirname + '/' + filename, function (err, files) {
console.log('');
console.log(' (' + files.length + ' files)');
files.forEach(function (file) {
console.log(' - ' + file);
});
console.log('');
});
} else {
fs.readFile(__dirname + '/' + filename, 'utf8', function (err, data) {
console.log('');
console.log('\033[90m' + data.replace(/(.*)/g, ' $1') + '\033[39m');
});
}
}
}
// start by walking the first file
file(0);
});
I must be misunderstanding your question... cuz it seems to straightforward.
The line at the bottom:
// start by walking the first file
file(0);
Is the line the "kicks-off" the file(i) chain. That line is at the bottom of the callback of the fs.readdir and is called when it gets to that point after defining the other functions and variables in the main body of the fs.readdir callback.
Am I missing something to your question?

Node - how to wait on async operations?

Sorry, just starting with node. This might be a very novice question.
Let's say I have some code which reads some files from a directory in the file system:
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
module.exports.files.push(data);
});
});
});
Note that all of this occurs asynchronously. Let's also say I have a Mocha test which executes this code:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function () {
assert(fileProvider.files.length > 0, 'files.length is zero');
});
});
});
The mocha test runs before the files are finished being read. I know this because I see the console.log statement after I see the little dot that indicates a mocha test being run (at least I think that is what is being indicated). Also, if I surround the assert with a setTimeout, the assert passes.
How should I structure my code so that I can ensure the async file operations are completed? Note that this is not just a problem with testing - I need the files to be loaded fully before I can do real work in my app as well.
I don't think the right answer is to read files synchronously, because that will block the Node request / response loop, right?
Bonus question:
Even if I put the assert in a setTimeout with a 0 timeout value, the test still passes. Is this because just putting it in a setTimeout kicks it to the end of the processing chain or something so the filesystem work finishes first?
You can implement a complete callback after all files have been read.
exports.files = [];
exports.initialize = initialize;
function initialize(callback) {
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
exports.files.push(data);
if (exports.files.length == files.length) {
callback();
}
});
});
}
You can call the file operation method by doing something like:
var f = require('./files.js');
if (f.files.length < 1) {
console.log('initializing');
f.initialize(function () {
console.log('After: ' + f.files.length);
var another = require('./files.js');
console.log('Another module: ' + another.files.length);
});
}
EDIT: Since you want to only have to call this once, you could initialize it once when the application loads. According to Node.js documentation, modules are cached after the first time they are loaded. The two above examples have been edited as well.
To avoid being caught up in nested callbacks. You might want to use async's each that will allow you to do the tasks asynchronously in a non-blocking manner:
https://github.com/caolan/async#each
I think that's a good test, the same thing would happen in any app that used your module, i.e. it's code could be run before files is set. What you need to do is create a callback like #making3 suggests, or use promises. I haven't used mocha, but there's a section on ascynchronous calls. You could export the promise itself:
module.exports.getFiles = new Promise((resolve, reject) => {
datas = [];
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) {
reject(err);
return;
}
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) {
reject(err);
return;
}
console.log('finished reading file ' + fileName + ': ' + data);
datas.push(data);
if (datas.length == files.length) {
resolve(datas);
}
});
});
});
}
chai-as-promissed lets you work directly with promises using eventually, or you can use the callback passed to your test I think:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function (done) {
fileProvider.getFiles.then(function(value) {
assert(value.length > 0, 'files.length is zero');
done();
}, function(err) {
done(err);
})
});
});
});

node.js multiple SerialPorts

I am using the SerialPorts module for nodejs and need to be able to open, write and read from a variable number of serial ports.
So what I am doing is to first create an array object for the serialPort instances, and then process them in a loop:
var serialport = require("serialport");
var SerialPort = serialport.SerialPort; // localize object constructor
var devs = ["/dev/tty.SerialPort","/dev/tty.HHW-SPP-1800-2-DevB"];
var ports = [];
for (var i = 0; i < devs.length; i++) {
console.log(devs[i]);
var port = new SerialPort(devs[i],{ baudrate:9600, parser: serialport.parsers.readline("\n") });
ports.push(port);
}
Then I have another function that I call periodically to read / write from the ports:
function minute(){
for (var i = 0; i < ports.length; i++) {
console.log(i);
ports[i].on("open", function (path) {
console.log('opened');
ports[i].write("Helo World\n", function(err,res) {
if(err) console.log('err ' + err);
console.log('results ' + res);
});
ports[i].on("data", function (data) {
console.log("here: "+data);
});
});
}
}
The problem is the minute() function executes, however it does not attempt to open or read / write to the ports.
What am I doing wrong ?? and is there a better way of doing this ??
There are a couple misconceptions at play here.
Firstly, you don't need to periodically poll your ports. Nodejs uses an event loop (more or less), to handle IO, and will do the polling for you. So all you need to do is setup the callbacks for the open event, one time for each port. In your code, it looks like you are readding the callback each time minute() is being called. That is not necessary.
Secondly, javascript doesn't have block scoping for variables. Instead you are inadvertently creating a closure, and your code is in error. In this following block:
for (var i = 0; i < ports.length; i++) {
ports[i].on("open", function (path) {
ports[i].write("Helo World\n", function(err,res) {
if(err) console.log('err ' + err);
console.log('results ' + res);
});
ports[i].on("data", function (data) {
console.log("here: "+data);
});
});
}
When your callback for ports.on is invoked, the value of i in ports[i].write and ports[i].on("data") isn't the value of i when the callback is setup, as you are expecting. Instead, because you have created a closure, the value of i isn't bound(set) until the callback is executed. In this example, everyone of your callbacks, i will be set to ports.length, which was the last evaluated value for i
I've created a plunkr that illustrates the problem with your for loop.
One way to fix this problem is to use an anonymous method, and bind the value i to a new local variable. In the code below, (function(index){})(i); executes immediately, and binds the value index to the appropriate value of i.
ports[i].on("open", function (path) {
(function(index) {
ports[index].write("Helo World\n", function(err,res) {
if(err) console.log('err ' + err);
console.log('results ' + res);
});
ports[index].on("data", function (data) {
console.log("here: "+data);
});
})(i);
});
You could also instead pull that method out into a separate function. setupHandlers() executes immediately, and is bound to the proper port.
for (var i = 0; i < ports.length; i++) {
setupHandlers(ports[i]);
}
function setupHandlers(port) {
port.on("open", function (path) {
ports.write("Helo World\n", function(err,res) {
if(err) console.log('err ' + err);
console.log('results ' + res);
});
ports.on("data", function (data) {
console.log("here: "+data);
});
});
}

Resources