What is the variable scope for async tasks? - node.js

var itemIds, result, taskQueue, _i, _len;
itemIds = [];
taskQueue = async.queue(function(task, callback) {
console.log('Hello ' + task.name);
return callback();
}, 10);
for (_i = 0, _len = results.length; _i < _len; _i++) {
result = results[_i];
taskQueue.push({}, function(err) {
var item;
item = new Item(result);
return item.save(function(err, new_item) {
itemIds[itemIds.length] = new_item._id;
console.log(new_item._id);
return console.log(itemIds);
});
});
}
taskQueue.drain = function() {
console.log('Queue Done!');
return console.log(itemIds.length);
};
is my code. But itemIds shows as empty when the drain is run. This is using the async module for node.js by the way

I'd personally recommend you heavily simplify the code using after
var slice = Array.prototype.slice;
var cb = after(results.length, function() {
var items = slice.call(arguments);
console.log("All done");
console.log(items.length);
});
results.forEach(function(result) {
item = new Item(result);
item.save(function(err, newItem) {
cb(newItem);
});
});

The problem isn't variable scope, it's that async.queue doesn't know about all the async functions you're scheduling. Specifically, it doesn't know about the item.save() calls - it only knows about the outer function that schedules item.save(). The actual save and resulting callback invocation are done asynchronously, after drain() has been called, which is why itemIds appears empty. (Make sense?)
To solve this, I would suggest you use the Step module instead of async. Specifically, look at Step's group() feature, which allows you to indicate when nested asynchronous control flows like this have finished.

Related

Using promises to control flow is not working properly

I am trying to control the flow of the execution in my code below, meaning I want it to be serial.
I am reading and updating data from and to my DB, and ofc I want that to happen in the correct order. Below is the function I am calling my DB from, the queries functions are wrapped in callbacks.
I am pretty new to promises so perhaps the error might be something silly I am overlooking. If you need anything to ask please do so.
function my_function(array, array2)
{
var array3 = [];
return Promise.resolve(true)
.then(function()
{
console.log("1")
for(var i=0; i< array.length; i++)
{
get(array[i], function(results){
console.log("2")
array3.push(..);
});
}
return array3;
}).then(function()
{
console.log("3")
for(var i=0; i< array2.length; i+=2)
{
//...
get(array2[i], function(results){
console.log("4")
return array3.push(...);
});
}
return array3;
}).then(function(array3)
{
console.log("5")
for(var i=0; i<array3.length; i++)
{
get(array3[i], function(results){
console.log("6")
update(.., function(callb_result){
return;
});
});
}
});
}
And here is the way I am calling the queries.
function get(array, callback)
{
db.get(`SELECT .. FROM .. WHERE ..;`, function(error, row) {
...
return callback(something);
});
}
function update(.., callback)
{
db.run(`UPDATE .. SET ...`);
return callback("updated"); //I dont want to return anything
}
Whats printed in the log
1
3
5
2
4
6
I was thinking perhaps the way I ma calling the queries is async and that's messing up everything.
You're using for loops to run asynchronous tasks and return an array that is modified by them. But because they are asynchronous the return happens before they are finished. Instead you can create an array of promises where each promise is one of the asynchronous tasks that resolves once the task is done. To wait until every task is done you can call Promise.all with the array of promises, which returns a promise that resolves with an array of the resolved results.
For the first .then you can use Array.prototype.map to easily create an array of promises. Each item in the array needs to return a new Promise that resolves with the result from the callback of get.
.then(function() {
console.log("1");
const promiseArray = array.map(function(item) {
return new Promise(function(resolve) {
get(item, function(result) {
console.log("2");
resolve(result);
});
});
});
return Promise.all(promiseArray);
})
As you return Promise.all the next .then call be executed once all the promises in the promiseArray are fulfilled. It will receive the array of results as the first parameter to the function. That means you can use them there. The second .then is similar to the first one, except that you don't want to call get on every item. In this case map is not applicable, so the for loop will just create a promise and add it to the array of promises. Before you have used array3 to store the results that you want to update, but with promises you don't really need that. In this case you can simply concat the results of both arrays.
.then(function(resultsArray) {
console.log("3");
const promiseArray2 = [];
for (var i = 0; i < array2.length; i += 2) {
const promise = new Promise(function(resolve) {
get(array2[i], function(results) {
console.log("4");
resolve(results);
});
});
promiseArray2.push(promise);
}
// Wait for all promises to be resolved
// Then concatenate both arrays of results
return Promise.all(promiseArray2).then(function(resultsArray2) {
return resultsArray.concat(resultsArray2);
});
})
This returns a promise that resolves with the concatenated array, so you will have all the results (from both .then calls) as an array, which is passed to the next .then function. In the third and final .then you simply call update on each element of the array. You don't need to call get again, as you've already done this and you passed on the results.
.then(function(finalResults) {
console.log("5");
for (var i = 0; i < finalResults.length; i++) {
console.log("6");
update(finalResults[i], function(result) {
console.log(result);
});
}
});
Full runnable code (get uses a timeout to simulate asynchronous calls)
function myFunction(array, array2) {
return Promise.resolve(true)
.then(function() {
console.log("1");
const promiseArray = array.map(function(item) {
return new Promise(function(resolve) {
get(item, function(results) {
console.log("2");
resolve(results);
});
});
});
return Promise.all(promiseArray);
})
.then(function(resultsArray) {
console.log("3");
const promiseArray2 = [];
for (var i = 0; i < array2.length; i += 2) {
const promise = new Promise(function(resolve) {
get(array2[i], function(results) {
console.log("4");
resolve(results);
});
});
promiseArray2.push(promise);
}
return Promise.all(promiseArray2).then(function(resultsArray2) {
return resultsArray.concat(resultsArray2);
});
})
.then(function(finalResults) {
console.log("5");
for (var i = 0; i < finalResults.length; i++) {
console.log("6");
update(finalResults[i]);
}
});
}
function get(item, cb) {
// Simply call the callback with the item after 1 second
setTimeout(() => cb(item), 1000);
}
function update(item) {
// Log what item is being updated
console.log(`Updated ${item}`);
}
// Test data
const array = ["arr1item1", "arr1item2", "arr1item3"];
const array2 = ["arr2item1", "arr2item2", "arr2item3"];
myFunction(array, array2);
Improving the code
The code now works as expected, but there are many improvements that make it a lot easier to understand and conveniently also shorter.
To simplify the code you can change your get function to return a promise. This makes it a lot easier, since you don't need to create a promise in every step. And update doesn't need to be a promise, neither does it need a callback as it's synchronous.
function get(array) {
return new Promise(function(resolve, reject) {
db.get(`SELECT .. FROM .. WHERE ..;`, function(error, row) {
if (err) {
return reject(error);
}
resolve(something);
});
});
}
Now you can use get everywhere you used to create a new promise. Note: I added the reject case when there is an error, and you'll have to take care of them with a .catch on the promise.
There are still too many unnecessary .then calls. First of all Promise.resolve(true) is useless since you can just return the promise of the first .then call directly. All it did in your example was to automatically wrap the result of it in a promise.
You're also using two .then calls to create an array of the results. Not only that, but they perform exactly the same call, namely get. Currently you also wait until the first set has finished until you execute the second set, but they can be all executed at the same time. Instead you can create an array of all the get promises and then wait for all of them to finish.
function myFunction(array, array2) {
// array.map(get) is equivalent to array.map(item => get(item))
// which in turn is equivalent to:
// array.map(function(item) {
// return get(item);
// })
const promiseArray = array.map(get);
for (let i = 0; i < array2.length; i += 2) {
promiseArray.push(get(array2[i]));
}
return Promise.all(promiseArray).then(results => results.forEach(update));
}
The myFunction body has been reduced from 32 lines of code (not counting the console.log("1") etc.) to 5.
Runnable Snippet
function myFunction(array, array2) {
const promiseArray = array.map(get);
for (let i = 0; i < array2.length; i += 2) {
promiseArray.push(get(array2[i]));
}
return Promise.all(promiseArray).then(results => results.forEach(update));
}
function get(item) {
console.log(`Starting get of ${item}`);
return new Promise((resolve, reject) => {
// Simply call the callback with the item after 1 second
setTimeout(() => resolve(item), 1000);
});
}
function update(item) {
// Log what item is being updated
console.log(`Updated ${item}`);
}
// Test data
const testArr1 = ["arr1item1", "arr1item2", "arr1item3"];
const testArr2 = ["arr2item1", "arr2item2", "arr2item3"];
myFunction(testArr1, testArr2).then(() => console.log("Updated all items"));

Node.js Parallel calls to same child rest service and aggregating response

I want to call from a parent rest service a child rest service. The number of times child service is called depends on parameters to parent rest services. Once I call all child service instance concurrently with different parameters. I want to combine the responses from all instances of child service. I am using below snippet. But I don't want to use timeout. It should either be timeout or when all calls of child service are over which ever is lesser.
for( i=0; i<length; i++)
{
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
micro(url ,filter[i],function(resp)
{
this.resutlObject[count]=resp;
console.log("count"+count);
count=count+1;
}.bind( {resutlObject: resutlObject} ));
}//end of for
setTimeout(function () {
console.log("in time out");
res.end(JSON.stringify(resutlObject || {}, null, 2));
},500);
Also you could use Promises. Suppose service call returns promise, then you wait while all of them are fulfilled. Node.js supports promises starting from v4. If you have earlier version of node, just use some library.
//Instead of
function micro(url, filter, cb) {
var resp = "result of async job";//do some async work
cb(resp)
}
//Modify your service to return a promise
function micro(url, filter) {
return new Promise(function(resolve, reject) {
var resp = "result of async job using `url` and `filter`";
if (resp) {
resolve(resp);
} else {
reject("reason");
}
});
}
//Create a list of service calls.
var promises = [];
for( i=0; i<length; i++)
{
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
promises.push(micro(url, filter[i]));
}
//Wait for all off them to fulfill
Promise.all(promises)
.then(function(resultObject) {
//Response
res.end(JSON.stringify(resultObject || {}, null, 2));
}, function(reason) {
res.sendStatus(500);
console.error(reason);
});
you can use async module async. It provides the parallel foreach loop.
var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
var configs = {};
async.forEachOf(obj, function (value, key, callback) {
fs.readFile(__dirname + value, "utf8", function (err, data) {
if (err) return callback(err);
try {
configs[key] = JSON.parse(data);
} catch (e) {
return callback(e);
}
callback();
})
}, function (err) {
if (err) console.error(err.message);
// configs is now a map of JSON data
doSomethingWith(configs);
})
here in the example it is reading files listed in parameters.
similarly you can do for your task
You could use async module. It's designed to do the stuff you're after. Something like this:
var async = require('async');
var collection = [];
for(i=0;i<length;i++) {
collection.push(
(function(i) {
return function(callback) {
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
micro(url ,filter[i],function(resp) {
callback(null, resp);
});
}
})(i)
);
}//end of for
async.parallel(collection, function(err, results) {
console.log(results) // array of results from all requests
})
What happens
async.parallel takes an array of functions as an argument. Each function receives callback as an argument. Callback is a function, which takes error and result as an argument.
After all callback are executed async calls the final callback which receives the array of results from all other callbacks.
In the loop we are creating just that, a collection of functions. In this example the code is a bit more complex because we use closure in order to preserve the value of i for each of these functions.

How to get values from a promise with node.js without .then function

I have a problem with a promise using node.js. My code is below:
var p1 = new Promise(function(resolve, reject) {
// my function here
});
p1.then(function(result){
// my result
});
This code works but to get values from p1 I must use the .then method and my result values can be accessed just on p1.then. How do I access p1 values without .then?
Below are my expected results:
var p1 = new Promise(function(resolve, reject) {
// my function here
});
var abc = NextFunction(p1);
The p1 values will be used afterwards in code outside of the p1 variable.
p1 is a Promise, you have to wait for it to evaluate and use the values as are required by Promise.
You can read here: http://www.html5rocks.com/en/tutorials/es6/promises/
Although the result is available only inside the resolved function, you can extend it using a simple variable
var res;
p1.then(function(result){
res = result; // Now you can use res everywhere
});
But be mindful to use res only after the promise resolved, if you depend on that value, call the next function from inside the .then like this:
var res;
p1.then(function(result){
var abc = NextFunction(result);
});
You can use await after the promise is resolved or rejected.
function resolveAfter2Seconds(x) {
return new Promise(resolve => {
setTimeout(() => {
resolve(x);
}, 2000);
});
}
async function f1() {
var x = await resolveAfter2Seconds(10);
console.log(x); // 10
}
f1();
Be aware await expression must be inside async function though.
You can do this, using the deasync module
var node = require("deasync");
// Wait for a promise without using the await
function wait(promise) {
var done = 0;
var result = null;
promise.then(
// on value
function (value) {
done = 1;
result = value;
return (value);
},
// on exception
function (reason) {
done = 1;
throw reason;
}
);
while (!done)
node.runLoopOnce();
return (result);
}
function test() {
var task = new Promise((resolve, reject)=>{
setTimeout(resolve, 2000, 'Hello');
//resolve('immediately');
});
console.log("wait ...");
var result = wait(task);
console.log("wait ...done", result);
}
In nodejs 14.8.0+, you are able to use top level awaits.
So your script will now look like:
var p1 = await new Promise(function(resolve, reject) {
// my function here
});
// p1 is now the result of the promise, not a promise
If you are doing this in the REPL, it is not enabled by default, so you have to run the REPL with the --experimental-repl-await flag. Using nvm, I was able to test even node version 12.20.1 and the repl flag works.

node.js callback function at the after loop has ended

I have an array of URLs and I want to loop through them and fetch thr content. After I have looped through them and fetched thr content I want a callback function to be called.
I know I can do this via async library but I want to do this without using any library.
Sample of what kind of code I want is below
['yahoo.com', 'gmail.com'].each(function(item){
//code to fetch URL content
},someCallbackFunctionToBeExecutedAtTheEndOfLoop);
This is typically the type of thing you do using promises (But you would need a library), with a code like:
var ops = [];
urls.forEach(function(url) {
ops.push(fetchUrl(url));
});
P.all(ops).then(callback);
function fetchUrl(url) {
var defer = P.defer();
//do stuff
// call defer.resolve(result);
return defer.promise;
}
If you don't want to use promises, you can use a counter of operations, like:
var ops = urls.length;
urls.forEach(function(url) {
// do stuff
ops--;
if (ops === 0) {
callback();
}
});
If you chose the promises, I advice to use p-promise module, which is far more optimized than Q.
If you want to do it without any sort of library like async, then you have to write your own counter to keep track of when all the async responses have been completed:
var request = require('request');
function loadAll(list, fn) {
var cnt = list.length;
var responses = [];
list.forEach(function(url, index) {
request(url, function(error, response, body) {
if (error) {
fn(error);
} else {
responses[index] = response;
--cnt;
if (cnt === 0) {
fn(0, responses);
}
}
});
})
}
loadAll(['http://www.yahoo.com', 'http://www.gmail.com'], function(err, results) {
if (!err) {
// process results array here
}
});
If you're going to be doing many async operations in node.js, then getting a promise library like Bluebird will save you a lot of time. For example, I think you could do the above in something like this (untested):
var Promise = require("bluebird");
var requestP = Promise.promisfy(require("request"));
Promise.map(['http://www.yahoo.com', 'http://www.gmail.com'], requestP).then(function(results) {
// process the array of results here
});

How to deal with promises in loop?

This is what I would like to do
var response = [];
Model.find().then(function(results){
for(r in results){
MyService.getAnotherModel(results[r]).then(function(magic){
response.push(magic);
});
}
});
//when finished
res.send(response, 200);
however it returns just [] because the async stuff is not ready yet. I am using sails.js that uses Q promise. Any ideas how to return a response when all async calls are finished?
https://github.com/balderdashy/waterline#query-methods (promise methods)
Since waterline uses Q, you can use the allSettled method.
You can find more details on Q documentation.
Model.find().then(function(results) {
var promises = [];
for (r in results){
promises.push(MyService.getAnotherModel(results[r]));
}
// Wait until all promises resolve
Q.allSettled(promises).then(function(result) {
// Send the response
res.send(result, 200);
});
});
You simply can't do that, you have to wait for the asynchronous functions to complete.
You can either create something yourself, or use the async middleware, or use built in features, as noted in Florent's answer, but I'll add the other two here anyway :
var response = [];
Model.find().then(function(results){
var length = Object.keys(results).length,
i = 0;
for(r in results){
MyService.getAnotherModel(results[r]).then(function(magic){
response.push(magic);
i++;
if (i == length) {
// all done
res.send(response, 200);
}
});
}
});
or with async
var response = [];
Model.find().then(function(results){
var asyncs = [];
for(r in results){
asyncs.push(function(callback) {
MyService.getAnotherModel(results[r]).then(function(magic){
response.push(magic);
callback();
})
});
}
async.series(asyncs, function(err) {
if (!err) {
res.send(response, 200);
}
});
});
Take a look at jQuery deferred objects:
http://api.jquery.com/category/deferred-object/
Specifically, .when()
http://api.jquery.com/jQuery.when/

Resources