Node.js sequentially running multiple childProcess.execFile processes from q all - node.js

I am writing a program to batch print drawings. They need to be in a certain order ex. drawing A,B,C. The program plots the correct number of prints its just the order is random. I need the first in the array list to complete before next and so on.
startMe(newPaths)
.then(function(result){
})
function startMe(dwgPaths){
return q.all(buildCalls(dwgPaths));
}
var buildCalls = function(dwgPaths) {
var calls = [];
var scFiles = [tmpDir + "425011-fab.scr",tmpDir + "425011-pk.scr",tmpDir + "425011-sc.scr"];
for (var sc in scFiles){
for (var i in dwgPaths) {
calls.push(callAccoreConsole(dwgPaths[i],scFiles[sc]));
}
}
return calls;
};
function callAccoreConsole(dwgPath,scrFile){
var deferred = q.defer();
childProcess.execFile('C:/Program Files/Autodesk/AutoCAD 2015/accoreconsole.exe',['/i',dwgPath,'/s',scrFile], function(err, data) {
if(err)
deferred.resolve({success:false,reason: err});
deferred.resolve({success:true});
});
return deferred.promise;
}
The code below works the way I want. I print 2 files 3 copies each. File A using script 1,then File B using script 1. Then repeats for the other scripts. I have a total of 6 prints "three groups" A,B,A,B,A,B each with the appropriate script ran. With the code above I may get B,A,A,B,A,A.
callAccoreConsole(newPaths[0],scFiles2[0])
.then(function(result){
callAccoreConsole(newPaths[1],scFiles2[0])
.then(function(result){
callAccoreConsole(newPaths[0],scFiles2[1])
.then(function(result){
callAccoreConsole(newPaths[1],scFiles2[1])
.then(function(result){
callAccoreConsole(newPaths[0],scFiles2[2])
.then(function(result){
callAccoreConsole(newPaths[1],scFiles2[2])
.then(function(result){
})
})
})
})
})
});
I have been struggling with this for a while. I found the code below and got it to work for my application, But it doesn't seem the most efficient way to be written. If any one has a more compact way let me know please.Thanks
var itemsToProcess = [];
for (var sc in scFiles){
for (var i in newPaths) {
itemsToProcess.push( {file:newPaths[i],script:scFiles[sc]});
}
}
function getDeferredResult(a) {
return (function (items) {
var deferred;
if (items.length === 0) {
return q.resolve(true);
}
deferred = q.defer();
var payload = {
file:items[0].file,
script:items[0].script
};
callAccoreConsole2(payload)
.then(function(result){
deferred.resolve(items.splice(1));
});
return deferred.promise.then(getDeferredResult);
}(a));
}
q.resolve(itemsToProcess)
.then(getDeferredResult)
.then(function(result){
return res.send({success:true});
})

As you are constructing your array of promises you are simultaneously invoking the execFile method.
function callAccoreConsole(dwgPath,scrFile){
var deferred = q.defer();
childProcess.execFile('C:/Program Files/Autodesk/AutoCAD 2015/accoreconsole.exe',['/i',dwgPath,'/s',scrFile], function(err, data) {
if(err)
deferred.resolve({success:false,reason: err});
deferred.resolve({success:true});
});
return deferred.promise;
}
So, instead using callAccoreConsole to run the process and return a deffered, you need something that calls that method eventually -
calls.push(q.fcall(callAccoreConsole, dwgPaths[i],scFiles[sc]));
I haven't tried this specifically, but the gist is that you are calling your method at the same time you are creating the deferred for it.

Related

Issue with asynchronous mongodb query

I am trying to loop through an array and find the amount of tickets assigned to each person.
Unfortunately, I noticed that my taskcount is getting the same values but in different order, because of its asynchronous nature.
Some queries might take long and so the ones that gets finished first gets inserted and hence my array has the same values but in different order. Now, I want to avoid that and make it so, that once a query gets completed, only then the next value from the array is being picked up and pushed to search from the db. How can i modify my existing code.
exports.find_task_count = function(callback) {
var names = ['Evan', 'Surajit', 'Isis', 'Millie', 'Sharon', 'Phoebe', 'Angel', 'Serah']
var taskcount = []
var resultsCount = 0;
for (var i = 0; i < names.length; i++) {
_tasks.find({'assignee': names[i]}, function (err, tickets) {
resultsCount++
if (err) {
console.log(err)
return callback(err)
} else {
taskcount.push(tickets.length)
if (resultsCount === names.length) {
return callback(taskcount);
taskcount=[]
}
}
})
}
}
You can use the async module designed to handle such scenarios.
I have updated the code as follows
var async = require('async');
exports.find_task_count = function (callback) {
var names = ['Evan', 'Surajit', 'Isis', 'Millie', 'Sharon', 'Phoebe', 'Angel', 'Serah'];
async.map(names, function (name, iterateeCallback) {
_tasks.find({ 'assignee': name }, function (err, tickets) {
if (err) {
return iterateeCallback(err);
}
return iterateeCallback(null, tickets.length);
});
}, function (error, results) {
if (error) {
return callback(error);
}
return callback(null, results);
});
}
As per the documentation of async
Note, that since this function applies the iteratee to each item in parallel, there is no guarantee that the iteratee functions will complete in order. However, the results array will be in the same order as the original coll.
if you still want to process the array in series use mapSeries instead of map in the above code

NodeJS- How to sequentially create a file and then read from?

I'm entirely new to NodeJS and this problem has been bugging me for days now. I'm pulling my hairs to find a working solution. I'm trying to get information from the database and pass it to a text file where I later read from it. I cannot do it sequentially. It always reads it first and then creates it. I don't know what way I should take to overcome the issue. Any working solution/ways will help tremendously.
My connection file that retrieves information from the database:
this.getInfo = function() {
return new Promise(function(resolve, reject){
db.query('SELECT ai_code from user_code',
function(err,rows){
if(err)
reject(err);
resolve(rows);
});
});
}
module.exports =
{
getInfo: this.getInfo
}
Functions that calls the method to receive data.
function team1Code(){
db.getInfo().then(function(result){
var code = JSON.stringify(result[0]);
var c = json2plain(code, options);
c = c.replace('Ai_code:','');
fs.writeFile('./scr1.js', c, { overwrite: true, encoding: 'ascii' },function (err) {
if (err) return console.log(err);
});
});
}
function team2Code(){
db.getInfo().then(function(result){
var code = JSON.stringify(result[1]);
var c = json2plain(code, options);
c = c.replace('Ai_code:','');
fs.writeFile('./scr2.js', c, { overwrite: true, encoding: 'ascii' },function (err) {
if (err) return console.log(err);
});
});
}
Finally, this is where we try to read the content of the files.
vmHandler.init = function(apiValues) {
team1Code();
team2Code();
// Team 1
try{
vmHandler.team1.scriptCode = fs.readFileSync('./scr1.js');
vmHandler.team1.script = new vm.Script(vmHandler.team1.scriptCode);
vmHandler.team1.sandbox = { api: new Api(apiValues, 1) }
vmHandler.team1.context = new vm.createContext(vmHandler.team1.sandbox);
}catch(err){}
// Team 2
try {
vmHandler.team2.scriptCode = fs.readFileSync('./scr2.js');
vmHandler.team2.script = new vm.Script(vmHandler.team2.scriptCode);
vmHandler.team2.sandbox = { api: new Api(apiValues, 2) }
vmHandler.team2.context = new vm.createContext(vmHandler.team2.sandbox);
} catch(err) {
console.log("ERROR: " + err);
}
};
The approach you are taking is slightly unfavorable since the function calls
team1Code();
team2Code();
doesn't make sure to accomplish before the next try-catch block gets executed. This is because both the calls are asynchronous hence the next lines get executed before they finish even though they are working with promises. promises themselves are asynchronous, what they make easy is all the code inside any then won't be executed until the promises get settled but the rest of the code will be executed as usual. So, here is the way to do your tasks with updated code.
function writeFile(fileName,data){
return new Promise(function(resolve, reject){
var code = JSON.stringify(data);
var c = json2plain(code, options);
c = c.replace('Ai_code:','');
fs.writeFile(fileName, c, { overwrite: true, encoding: 'ascii' },function (err) {
if(err)
reject(err);
resolve();
});
})
}
//Finally, this is where we try to read the content of the files.
vmHandler.init = function(apiValues) {
var files = ['./scr1.js','./scr2.js'];
db.getInfo().then(function(result){
var allPromise = [];
for(var key in files){
allPromise.push(writeFile(files[key], result[key]));
}
return Promise.all(allPromise);
}).then(function(res){
// Team 1
try{
vmHandler.team1.scriptCode = fs.readFileSync('./scr1.js');
vmHandler.team1.script = new vm.Script(vmHandler.team1.scriptCode);
vmHandler.team1.sandbox = { api: new Api(apiValues, 1) }
vmHandler.team1.context = new vm.createContext(vmHandler.team1.sandbox);
}catch(err){}
// Team 2
try {
vmHandler.team2.scriptCode = fs.readFileSync('./scr2.js');
vmHandler.team2.script = new vm.Script(vmHandler.team2.scriptCode);
vmHandler.team2.sandbox = { api: new Api(apiValues, 2) }
vmHandler.team2.context = new vm.createContext(vmHandler.team2.sandbox);
} catch(err) {
console.log("ERROR: " + err);
}
});
};
In the wmHandler.init function, you are starting 2 asynchronous operations (querying and storing) and reading from the files that the aforementioned async operations should write.
However, the file reading is performed right after the 2 async operations are started. Therefore, it is expected that the files are read before written.
To resolve this, make team1Code and team2Code return Promises of their own, and do not read the files until they have been written.
team1Code().
.then(team2Code)
.then(readFiles)
Where readFiles is the function that does the file reading, and team1Code, team2Code return Promises that resolve when the files are written.
This answer explains the asynchronous callbacks in Javascript.

Send response after call back functions execution completion in nodejs

I have a problem with call back functions and loops in nodejs how can I do so that the response should be send after the call back functions execution completion
app.post('/data', function(req,res){
var send = []
for (var i =0; i < p_objReq.body.quantity; i++) {
Instruments.count({//condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
Instruments.find({id: 'rondom_generated_code'},function(err, instrumentsCount){
if(instrumentsCount.length == 0){
send.push(rondom_generated_code)
if(send.length == p_objReq.body.quantity)
p_objRes.json({success : true, data : send})
}
else{
Instruments.count({//condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
send.push(rondom_generated_code)
if(send.length == p_objReq.body.quantity)
p_objRes.json({success : true, data : send})
})
}
})
})
}
})
when i wrote like this its sending the same random code that is last generated one. I tried removing the whole thing and written in function and called back but its also not working
One solution is to use Q.js, which is one of Promise library. More APIs of Q.js, please refer to the link in it. Here is one sample codes may could help you, If I catch you correctly for your question.
var Promise = require('Q');
app.post('/data', function(req,res){
var send = [];
var p = function () {
var deferred = Q.defer();
Instruments.count({condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
if (err) {
deferred.reject(new Error(error));
} else {
send.push(randomnumber)
}
});
return deferred.promise;
}
var ps = [];
for (var i =0; i < p_objReq.body.quantity; i++) {
ps.push(p);
}
Q.all(ps).then(function(){ res.json({success : true, data : send}); });
});

How to handle callbacks in a for loop(Node.JS)

I am trying to write a code with NodeJS where I grab data from an external API and then populate them in MongoDB using Mongoose. In between that, I'll check to see if that particular already exists in Mongo or not. Below is my code.
router.route('/report') // the REST api address
.post(function(req, res) // calling a POST
{
console.log('calling report API');
var object = "report/" + reportID; // related to the API
var parameters = '&limit=100' // related to the API
var url = link + object + apiKey + parameters; // related to the API
var data = "";
https.get(url, function callback(response)
{
response.setEncoding("utf8");
response.on("data", function(chunk)
{
data += chunk.toString() + "";
});
response.on("end", function()
{
var jsonData = JSON.parse(data);
var array = jsonData['results']; // data is return in array of objects. accessing only a particular array
var length = array.length;
console.log(length);
for (var i = 0; i < length; i++)
{
var report = new Report(array.pop()); // Report is the schema model defined.
console.log('^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^');
console.log(i);
console.log('*****************************');
console.log(report);
console.log('*****************************');
// console.log(report['id']);
/*report.save(function(err)
{
if(err)
res.send(err);
});*/
Report.find({id:report['id']}).count(function(err, count) // checks if the id of that specific data already exists in Mongo
{
console.log(count);
console.log('*****************************');
if (count == 0) // if the count = 0, meaning not exist, then only save
{
report.save(function(err)
{
console.log('saved');
if(err)
res.send(err);
});
}
});
};
res.json({
message: 'Grabbed Report'
});
});
response.on("error", console.error);
});
})
My problem is that since NodeJS callbacks are parallel, it is not getting called sequentially. My end result would be something like this :
Calling report API
console.log(length) = 100
^^^^^^^^^^^^^^^^^^^^^^^^
console.log(i) = starts with 0
*******************************
console.log(report) = the data which will be stored inside Mongo
*******************************
number 3 - 7 repeats 100 times as the length is equals to 100
console.log(count) = either 0 or 1
number 9 repeats 100 times
console.log('saved')
number 11 repeats 100 times
Lastly, only the last out of 100 data is stored into Mongo
What I need is some sort of technique or method to handle these callbacks which are executing one after the other and not sequentially following the loop. I am pretty sure this is the problem as my other REST APIs are all working.
I have looked into async methods, promises, recursive functions and a couple others non which I could really understand how to solve this problem. I really hope someone can shed some light into this matter.
Feel free also to correct me if I did any mistakes in the way I'm asking the question. This is my first question posted in StackOverflow.
This problem is termed as the "callback hell".
There's lots of other approaches like using Promise and Async libraries you'll find.
I'm more excited about the native async ES7 will bring,
which you can actually start using today with transpiler library Babel.
But by far the simplest approach I've found is the following:
You take out the long callback functions and define them outside.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", response_on_end_callback); // --> take out
response.on("error", console.error);
});
}
function response_on_end_callback() { // <-- define here
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(Report_find_count_callback); // --> take out
};
res.json({
message: 'Grabbed Report'
});
}
function Report_find_count_callback(err, count) { // <-- define here
...
if (count == 0) {
report.save(function(err) { // !! report is undefined here
console.log('saved');
if (err)
res.send(err); // !! res is undefined here
});
}
}
A caveat is that you won't be able to access all the variables inside what used to be the callback,
because you've taken them out of the scope.
This could be solved with a "dependency injection" wrapper of sorts to pass the required variables.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", function(err, data){ // take these arguments
response_on_end(err, data, res); // plus the needed variables
});
response.on("error", console.error);
});
}
function response_on_end(err, data, res) { // and pass them to function defined outside
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(function(err, count){
Report_find_count(err, count, report, res); // same here
});
};
res.json({ // res is now available
message: 'Grabbed Report'
});
}
function Report_find_count(err, count, report, res) { // same here
...
if (count == 0) {
report.save(function(err) { // report is now available
console.log('saved');
if (err)
res.send(err); // res is now available
});
}
}
When I execute the response_on_end function, I am getting the undefined:1 unexpected token u error.
I am pretty much sure it has something to do with this line: var jsonData = JSON.parse(data)
My response_on_end is as below: var jsonData = JSON.parse(data); // problem here
I realize I made an error here:
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
//sponse.on("end", function(err, data){
response.on("end", function(err){ // data shouldn't be here
response_on_end(err, data, res);
});
response.on("error", console.error);
});
}
Another problem I could forsee, which actually may not arise here but still would be better to talk about anyways.
The data variable, since it's a string which is a primitive type unlike an object, it is "passed by value".
More info
It's better to wrap the variable in an object and pass the object, because objects in javascript are always "passed by reference".
function calling_a_POST(req, res) {
...
// var data = ""; //
var data_wrapper = {};
data_wrapper.data = {}; // wrap it in an object
https.get(url, function callback(response) {
...
response.on("data", function(chunk){
data_wrapper.data += chunk.toString() + ""; // use the dot notation to reference
});
response.on("end", function(err){
response_on_end(err, data_wrapper, res); // and pass that object
});
response.on("error", console.error);
});
}
function response_on_end_callback(err, data_wrapper, res) {
var data = data_wrapper.data; // later redefine the variable
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
You can use async library for controlling your execution flows. And there are also iterators for working with arrays.

Chaining an arbitrary number of promises in Q

I want to send an HTTP request N times. I want to eventually have information about the results of each of those requests.
Running the request function once works great. Here's the HTTP request function using Q.defer():
function runRequest() {
var deferred = Q.defer(),
start = (new Date).getTime(),
req = HTTP.request(options, function(res) {
var end = (new Date).getTime(),
requestDetails = {
reqStatus: res.statusCode,
reqStart: start,
reqEnd: end,
duration: end - start
}
deferred.resolve(requestDetails);
});
req.on('error', function(e) {
deferred.reject(e.message);
});
req.end();
return deferred.promise;
}
If I do this, I get back the data I expect:
runRequest().then(function(requestDetails) {
console.log('STATUS: ' + requestDetails.reqStatus);
console.log('Duration: ' + requestDetails.duration);
console.log('Start: ' + requestDetails.reqStart);
console.log('End: ' + requestDetails.reqEnd);
}, function(error) {
console.log('Problem with request: ' + error);
})
.done();
To iterate, I tried to fit that into a for loop:
function iterateRequests() {
var deferred = Q.defer();
var reqResults = [];
for (var iteration = 0; iteration < requests; iteration++) {
runRequest()
.then(function(requestDetails) {
console.log('STATUS: ' + requestDetails.reqStatus);
reqResults.push(requestDetails);
}, function(error) {
console.log('Problem with request: ' + error);
});
}
deferred.resolve(reqResults);
return deferred.promise;
}
Then I call it like this:
iterateRequests()
.then(function(results) {
console.log(results);
console.log("in the success callback after iterateRequests");
}, function() {
console.log("in the failure callback after iterateRequests");
})
.done();
I end up getting into the success callback (i.e., it logs "in the success callback after iterateRequests"). However, the console.log(results) prints before I get the logs from runRequest().then() callback and it's an empty array.
Any ideas or some guidance on chaining/iterating over promise-return functions?
Thanks!
Update
Follow up question in response to #abject_error's answer:
Checked out Q.all. Definitely looks like what I need. And it's much simpler that what I was working with. I made a simple test case to help me figure out how it works:
var Q = require("q");
function returner(number) {
var deferred = Q.defer();
deferred.resolve(number);
return deferred.promise;
}
function parent() {
return Q.all([
returner(1),
returner(2),
returner(4)
]);
}
parent()
.then(function(promises) {
// works - promises gives me [1, 2, 4]
console.log(promises);
});
So I see how I can use it if I know beforehand the number of times I need to call it (and which functions I'm going to call). Any tips on how to get a dynamic number of calls to returner (in this example) or runRequest (in my original example) in the array?
This answers the update part of the question:
var buildCalls = function() {
var calls = [];
for (var i in stories) {
calls.push(myFunc(i));
}
return calls;
}
return Q.all(buildCalls());
Q has other functions to aid in Promise based workflows. The method you need to use is Q#all. If you have an array of promises, and you want to call a function when all of them have successfully fulfilled, you do
Q.all(array_of_promises).then(success_callback, failure_callback);
After all the request promises are fulfilled, success_callback is called. If any of them rejects, the failure_callback is called immediately.

Resources