I have this below code, but as soon as widgetsAddCall is added into array, it gets executed, and promise.each is of no use.
Function widgetsAddCall is making async request to API server and returns bluebird promisified request. I want that API call is made one after another, so that each call sends chunk of data one after another.
var chunkCnt = Math.ceil(widgetsIds.length/4000);
var responseT = Array();
var Promise = require('bluebird');
for(var cntTemp =0 ; cntTemp<chunkCnt;cntTemp++){
var tempWidgs = widgetsIds.slice(cntTemp,cntTemp+4000);
var query = {
id: tempWidgs.join(',')
};
responseT.push(widgetsAddCall(tempWidgs,query,campRemoteId,campaign));
}
return Promise.each(responseT,function(responses) {
// Use the responses here
return getWidgets(campRemoteId,campaign).then((ids) => {
var toRemove = [];
for(var id of ids){
if(widgetsIds.indexOf(id)===-1){
toRemove.push(id);
}
}
if(toRemove.length) {
return removeWidgets(campaign, campRemoteId, toRemove);
}
});
})
I want that API call is made one after another, so that each call
sends chunk of data one after another.
The simplest way I can think of to serialize all your calls is to prebuild your chunks into an array and then use Bluebird's Promise.mapSeries() to serially iterate through the array:
var Promise = require('bluebird');
// pre-build chunks into an array
var chunks = [];
var chunkCnt = Math.ceil(widgetsIds.length / 4000);
for (var chunkIndex = 0; chunkIndex < chunkCnt; chunkIndex++) {
chunks.push(widgetsIds.slice(cntIndex,cntIndex + 4000));
}
// now serially iterate the array
Promise.mapSeries(chunks, function(item) {
return widgetsAddCall(item, {id: item.join(',')}, campRemoteId, campaign);
}).then(function(results) {
// now process the results of widgetsAddCall() here
});
FYI, your original use of Promise.each() was not making any sense to me because you were iterating an array of promises, but then not using any info from the iteration. There seemed to be no point to the iteration. Plus, you weren't properly serializing your calls to widgetsAddCall() anyway since you were launching all those calls in parallel.
Related
Little info, i have an arp.js file which takes a subnet address "192.168.2" and gets all strings returned from arp -a and stores in an array.
I can't figure out why my arpList function is returning an undefined value in my index.js file.
All the console.logs are returning the correct values in the arp.js page when called from the index.js, but the ipObj is coming up undefined. Even the console.log before i return of ipObj works.
Any help would be greatly appreciated.
var { spawn } = require('child_process');
const arpLs = spawn('arp', ['-a']);
var bufferData;
module.exports = {
arpList: function (subnet) {
arpLs.stdout.on('data', data => {
bufferData += data
})
arpLs.stderr.on('data', data => {
console.log('error: ' + data);
});
arpLs.on('exit', function (code) {
if (code != 0) {
console.log("Error exiting"); //if error occurs
}
console.log("exit start 1"); // checking internal processes at stages
var dataArray = bufferData.split(' ');
var ipArray = [];
for (i = 0; i < dataArray.length; i++) {
if (dataArray[i].includes(subnet)) {
ipArray.push(dataArray[i]);
console.log("loop working");
}
}
var ipObj = { "lanIps": ipArray };
console.log("Object is there: "+ipObj)
return ipObj; // this obj should be returned to the index.js call using
})
},
sayMyName: function () {
return "Hello";
}
}
//arpList(ipSubnet);
//INDEX.js
//the index page looks like this
//var arp = require('./arp.js);
//var ipSubnet = "192.168.2";
//var lanIps = arp.arpList(ipSubnet);
//console.log(lanIps);
I ended up adding a callback function to arpList - function (subnet, callback)
Then instead of returning the value pass it into the callback
Then on the index.js side instead of
var lanIps = arp.arpList(value)
i used
arp.arpList(value, function(res){lanIps = res}
return ipObj; // this obj should be returned to the index.js call using
It won't be returned. The reference say nothing about return value. Node-style callbacks rarely work like that because they are potentially asynchronous and returned value cannot be taken into account.
This a special case of this well-known problem. The process is asynchronous and is finished after arp.arpList(ipSubnet) call, there's nothing to assign to lanIps. This is a use case for promises. There are already third-party promisified counterparts like child-process-promise.
The problem can be also solved by moving to synchronous API. child_process functions have synchronous counterparts, including spawnSync.
My input is streamed from another source, which makes it difficult to use async.forEach. I am pulling data from an API endpoint, but I have a limit of 1000 objects per request to the endpoint, and I need to get hundreds of thousands of them (basically all of them) and I will know they're finished when the response contains < 1000 objects. Now, I have tried this approach:
/* List all deposits */
var depositsAll = [];
var depositsIteration = [];
async.doWhilst(this._post(endpoint_path, function (err, response) {
// check err
/* Loop through the data and gather only the deposits */
for (var key in response) {
//do some stuff
}
depositsAll += depositsIteration;
return callback(null, depositsAll);
}, {limit: 1000, offset: 0, sort: 'desc'}),
response.length > 1000, function (err, depositsAll) {
// check for err
// return the complete result
return callback(null, depositsAll);
});
With this code I get an async internal error that iterator is not a function. But in general I am almost sure the logic is not correct as well.
If it's not clear what I'm trying to achieve - I need to perform a request multiple times, and add the response data to a result that at the end contains all the results, so I can return it. And I need to perform requests until the response contains less than 1000 objects.
I also looked into async.queue but could not get the hang of it...
Any ideas?
You should be able to do it like that, but if that example is from your real code you have misunderstood some of how async works. doWhilst takes three arguments, each of them being a function:
The function to be called by async. Gets argument callback that must be called. In your case, you need to wrap this._post inside another function.
The test function (you would give value of response.length > 1000, ie. a boolean, if response would be defined)
The final function to be called once execution is stopped
Example with each needed function separated for readability:
var depositsAll = [];
var responseLength = 1000;
var self = this;
var post = function(asyncCb) {
self._post(endpoint_path, function(err, res) {
...
responseLength = res.length;
asyncCb(err, depositsAll);
});
}
var check = function() {
return responseLength >= 1000;
};
var done = function(err, deposits) {
console.log(deposits);
};
async.doWhilst(post, check, done);
Dears ,
How can i run promises in nodejs sequentially , in the following example am looping through array of hours then for each fetched hour get result from the database , the issue here : am getting results but i want it sequentially same order that i got hours .
angular.forEach(SharedVar.getCategories(), function (h) {
t = h.split('-', 2);
t = t[0];
RESTApi.getAnswerdCallsByHour(t).then(function (answerdTotal) {
$scope.answerdCallsTotalByHour.push(answerdTotal);
var d = SharedVar.getDataS();
d[count] = answerdTotal;
SharedVar.setDataS(d);
count++;
});
});
Thanks ,
var promise = Promise.resolve(); // make an empty promise in the way you do it with your promise library
angular.forEach(SharedVar.getCategories(), function (h) {
promise.then(function() {
return RESTApi.getAnswerdCallsByHour(t).then(function (answerdTotal) {});
});
});
The way to do it sequently would be to do one Request and do the next request inside the promise.
I think the better approach by far is to extend your SharedVar.setDataS(d) function in a way, that it does not depend on getting the data sequentially. Like having a SharedVar.setDataS(d, index) and using the config var in your $http.get (or whatever) functioncall inside your RESTApi to promote that index all the way to the promise.
If your RESTApi looks like this:
var RESTApi = {
getAnswerdCallsByHour : function(hour) {
var url = "bla.com/myservice?hour=" + hour;
return $http.get(url).data;
}
// Some other Methods...
Then you need a way to pass something to "reorder" your Data when it arrives asynchronously, this could be a index you count up or in your case maybe the hour Variable:
var RESTApi = {
getAnswerdCallsByHour : function(hour) {
var url = "bla.com/myservice?hour=" + hour;
var config = [];
config.hour = hour;
return $http.get(url, config); // Return the promise not just data or a specific field
}
// Some other Methods...
Now when your promise is fullfiled you can access your "hour" Variable like so:
var d = SharedVar.getDataS();
d[promise.config.hour] = promise.data;
SharedVar.setDataS(d);
Now you know what piece of data correlates to which request and you do not need to recieve Data in order. The last piece only works properly when hours runs sequential from 0 to 23, if that isn't the case you need to:
var RESTApi = {
getAnswerdCallsByHour : function(hour, index) {
var url = "bla.com/myservice?hour=" + hour;
var config = [];
config.index = index;
return $http.get(url, config);
}
// Some other Methods...
...
...
var d = SharedVar.getDataS();
d[promise.config.index] = promise.data;
SharedVar.setDataS(d);
Safari's answer is how I typically handle this. (Sorry, I don't have enough rep to comment yet...) You were experiencing problems with it because the example provided does not capture and use the new promise in subsequent loops. See my comments on the slightly modified version here:
var promise = Promise.resolve();
angular.forEach(SharedVar.getCategories(), function (h) {
t = h.split('-', 2);
t = t[0];
// You must capture the new promise here; the next loop will wait
// for the promise returned from getAnswerdCallsByHour to resolve.
promise = promise.then(function() {
// Halt downstream promises until this returned promises finishes
return RESTApi.getAnswerdCallsByHour(t).then(function (answerdTotal) {
$scope.answerdCallsTotalByHour.push(answerdTotal);
var d = SharedVar.getDataS();
d[count] = answerdTotal;
SharedVar.setDataS(d);
count++;
});
});
});
I've drawn a simple flow chart, which basically crawls some data from internet and loads them into the database. So far, I had thought I was peaceful with promises, however now I have an issue that I'm working for at least three days without a simple step.
Here is the flow chart:
Consider there is a static string array like so: const courseCodes = ["ATA, "AKM", "BLG",... ].
I have a fetch function, it basically does a HTTP request followed by parsing. Afterwards it returns some object array.
fetch works perfectly with invoking its callback with that expected object array, it even worked with Promises, which was way greater and tidy.
fetch function should be invoked with every element in the courseCodes array as its parameter. This task should be performed in parallel execution, since those seperate fetch functions do not affect each other.
As a result, there should be a results array in callback (or Promises resolve parameter), which includes array of array of objects. With those results, I should invoke my loadCourse with those objects in the results array as its parameter. Those tasks should be performed in serial execution, because it basically queries database if similar object exists, adds it if it's not.
How can perform this kind of tasks in node.js? I could not maintain the asynchronous flow in such a scenario like this. I've failed with caolan/async library and bluebird & q promise libraries.
Try something like this, if you are able to understand this:
const courseCodes = ["ATA, "AKM", "BLG",... ]
//stores the tasks to be performed.
var parallelTasks = [];
var serialTasks = [];
//keeps track of courses fetched & results.
var courseFetchCount = 0;
var results = {};
//your fetch function.
fetch(course_code){
//your code to fetch & parse.
//store result for each course in results object
results[course_code] = 'whatever result comes from your fetch & parse code...';
}
//your load function.
function loadCourse(results) {
for(var index in results) {
var result = results[index]; //result for single course;
var task = (
function(result) {
return function() {
saveToDB(result);
}
}
)(result);
serialTasks.push(task);
}
//execute serial tasks for saving results to database or whatever.
var firstSerialTask = serialTasks.shift();
nextInSerial(null, firstSerialTask);
}
//pseudo function to save a result to database.
function saveToDB(result) {
//your code to store in db here.
}
//checks if fetch() is complete for all course codes in your array
//and then starts the serial tasks for saving results to database.
function CheckIfAllCoursesFetched() {
courseFetchCount++;
if(courseFetchCount == courseCodes.length) {
//now process courses serially
loadCourse(results);
}
}
//helper function that executes tasks in serial fashion.
function nextInSerial(err, result) {
if(err) throw Error(err.message);
var nextSerialTask = serialTasks.shift();
nextSerialTask(result);
}
//start executing parallel tasks for fetching.
for(var index in courseCode) {
var course_code = courseCode[index];
var task = (
function(course_code) {
return function() {
fetch(course_code);
CheckIfAllCoursesFetched();
}
}
)(course_code);
parallelTasks.push(task);
for(var task_index in parallelTasks) {
parallelTasks[task_index]();
}
}
Or you may refer to nimble npm module.
I have node js files restservice.js and mysql.js
In mysql.js i have two functions as elementlevelpricing and pricingdetail
In restservice.js i have api which has the code as :
var workload = req.body;
var workloadinfo = {
workloadId: workload.workloadId,
ownerId: workload.ownerId,
uniqueName: workload.uniqueName,
name: workload.name
}
if(workload.elements && workload.elements.length > 0)
{
var elementlevelpricingSummary = {};
var elementArray = [];
var elementinfo = {};
var metadataModified = {};
var pricingDetail = {};
async.forEachSeries(workload.elements, createResponse, function (err) {
res.send(workloadinfo);
});
function createResponse(elements,callback) {
var resourceIdentifierArray = [];
elementinfo = elements;
resourceIdentifierArray.push(elements.uri);
var resourceIdentifiers = resourceIdentifierArray.join(',');
// Get element level pricing summary
mysql.elementlevelpricing(resourceIdentifiers, function(result){
// do some stuff here
return callback();
});
};
};
I need to call the function pricingdetail in mysql.js and append the result to global variable workloadinfo (which already should have result set of elementlevelpricing and Can thats what is sent within foreachSeries ). Can anyone suggest me the profession way to accomplish this?
Use asynchronous functions. The whole point of Node.js is to avoid blocking. Blocking in Node.js is worse than blocking in threaded environments, because there aren't any other threads (though there may be other clustered processes). You're blocking the only event loop available. That means that your whole server has to wait, doing absolutely no work until your I/O is done.