I have to loop an array, and compute to every element a very heavy task, within several functions, but I need the whole array finished before return the callback. The problem, is that node is not blocking there, is like treating this block of code like asynchronous, cause before it gets done, the callback is returned. What am I missing?
function one(data, callback){
for(var i=0; i < data.length; i++){
result = two(data[i]);
}
callback(result)
}
function two(data){
//process
return three(data);
}
function three(data){
//heavy task
return data;
}
callback(result) is called immediately and I need it blocked in order to have the array processed. This was already moved to child process, so it is pretended to work that way.
'use strict';
function makePromise(data) {
return Promise(resolve, reject) {
//here handle data
if ( // success ) {
resolve(result);
} else {
reject(error);
}
};
}
data.map((element) => makePromise(element))
.all(restuts => {
//here results is a array
})
.catch(error => {
console.log(error);
})
You can use Promise in ES6.
Related
I need to retrieve the actual value from a promise based function in a node 6 environment (Azure Functions), so I used co (https://www.npmjs.com/package/co) via generators (instead of the async/await paradigm) to handle the inner promise.
I need also to retry a few times that co/promise function using setTimeout before giving up definitively.
I am currently not able to make the following code work as expected. I am not sure where is the problem, but I can not "yield from the promise returned by co", so in the end the array that is passed around the recursive levels of the stack contains promises of values (1/0) rather than the actual values.
This is the wrapper for the "promise based function" that is handled with a try/catch to make sure we actually always return either 1 or 0.
const wannabeSyncFunc = () => {
console.log("outside co...");
return co(function *(){
console.log("inside co...");
try {
console.log("yielding...");
// promise that could be rejected hence try/catch
//
// I can not change this returned promise, so I must treat it
// as a promise that could potentially be rejected
let stuff = yield Promise.resolve();
console.log("stuff?", stuff);
console.log("returning 1");
return 1;
} catch (err) {
console.log("returning 0");
return 0;
}
console.log("after try/catch...");
});
}
This is the recursive/settimeout function that is supposed to try a few times before giving up.
const retryIntervalInMillis = 50;
const wannabeRecursiveFunc = (currTimes, attemptsArray) => {
return co(function *(){
console.log("Curr attemptsArray:", attemptsArray);
console.log("Curr attemptsArray[attemptsArray.length - 1]:", attemptsArray[attemptsArray.length - 1]);
console.log("Curr Promise.resolve(attemptsArray[attemptsArray.length - 1]):", Promise.resolve(attemptsArray[attemptsArray.length - 1]));
if (attemptsArray[attemptsArray.length - 1] == Promise.resolve(1)) {
console.log("Found the solution, returning straight away!")
return attemptsArray;
}
if (currTimes <= 0) {
console.log("Expired acquiring recursion");
return attemptsArray;
}
currTimes--;
const currValue = wannabeSyncFunc();
console.log(`First: currTimes: ${currTimes} currValue: ${currValue} curr attemptsArray: ${attemptsArray}`);
attemptsArray.push(currValue);
if (currValue === 1) {
return attemptsArray;
}
console.log(`Then: currTimes: ${currTimes} curr attemptsArray: ${attemptsArray}`);
return yield setTimeout(wannabeRecursiveFunc, currTimes*retryIntervalInMillis, currTimes, attemptsArray);
// return Promise.all(attemptsArray);
});
}
I've tried to invoke this in a few different ways like:
const numberOfAttempts = 3;
let theArray = wannabeRecursiveFunc(numberOfAttempts, []);
console.log(">>>", theArray);
Or assuming wannabeRecursiveFunc to return a promise and .then after the promise trying to print theArray.
I keep seeing inside the array these elements Promise { 1 } when printing it, but I would like to see either 1 or 0, so I hope those checks before the recursion could work as expected. At the moment those check don't work I think because I am comparing Promise { 1 } with 1.
However, I am not sure this is the reason why the whole thing is not working, and I am not even sure how to fix this. I am not sure whether co is needed (even in the node.js v6 environment), and how to make this promise/setTimeout work as expected.
I think I understand your objective: invoke a function that might fail, if it fails, wait a little bit and retry it. Do all of that with promises.
Here's a couple tools:
a promisified version of setTimeout...
function timeoutPromise(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
timeoutPromise(1000).then(() => {
console.log('time out expired');
});
A promise-returning dummy function that sometimes fails...
function fnThatMightFail() {
return new Promise((resolve, reject) => {
let fail = Math.random() < 0.40;
(fail)? reject('bad') : resolve('good');
});
}
fnThatMightFail().then(result => {
console.log(result);
}).catch(error => {
console.log(error);
});
And then, I think here's the recursive idea you're looking for. Pass in a function and a wait time between attempts, call recursively until we succeed...
function fnThatMightFail() {
return new Promise((resolve, reject) => {
let fail = Math.random() < 0.40;
(fail)? reject('bad') : resolve('good');
});
}
function timeoutPromise(ms) {
return new Promise((resolve) => {
setTimeout(() => resolve(), ms);
});
}
function fnRetryer(fn, tries, wait) {
if (tries <= 0) return Promise.reject('bad');
console.log('attempting fn');
return fn().then(result => {
console.log(`success: ${result}`);
return result;
}).catch(error => {
console.log(`error: ${error}, retrying after ${wait}ms`);
return timeoutPromise(wait).then(result => {
console.log(`${wait}ms elapsed, recursing...`);
return fnRetryer(fn, tries-1, wait);
});
});
}
fnRetryer(fnThatMightFail, 5, 1000).then(result => {
console.log(`we tried (and maybe tried) and got ${result}`);
}).catch(error => {
console.log('we failed after 5 tries, waiting 1s in between each try');
});
Note that you could add a parameter for a max number of attempts, decrement that on each recursive call and then don't recurse if that gets to zero. Also note, on the recursive call, you might opt to lengthen the wait time.
I'm writing a function that's returning and array of values. Some of the values are calculated in a callback. But I don't know how to make the program asynchronious so all of my results are in the array, and not added after they're returned.
let array = []
for (stuff : stuffs) {
if (condition) {
array.add(stuff)
} else {
api.compute(stuff, callback(resp) {
array.add(resp.stuff)
}
}
}
res.json({ "stuff": array })
In this example the array is written to the response before the async calls have finished.
How can I make this work asynchronously?
You have to use one of the approaches:
async library
Promise.all
coroutines/generators
async/await
The most cool yet, I think, is async/await. First we modify your function, so it returns a promise:
const compute = function(stuff) {
return new Promise( (resolve, reject) => {
api.compute(stuff, callback(resp){
resolve(resp.stuff)
});
});
};
Then we modify your route with async handler:
app.get('/', async function(req, res, next) {
const array = [];
for (const stuff of stuffs) {
if (condition) {
array.add(stuff);
} else {
const stuff = await compute(stuff);
array.push(stuff);
}
}
res.json({ stuff: array });
});
Note: You might need to update node version to latest.
UPDATE:
Those who are not awared, how event loop works, execute this snippet, and finish with that:
const sleep = async function(ms) {
console.log(`Sleeping ${ms}ms`);
return new Promise( resolve => setTimeout(resolve, ms));
};
async function job() {
console.log('start');
for (let t = 0; t < 10; t++) {
await sleep(100);
}
}
job();
console.log('oops did not expect that oO');
You will be surprised.
Here is an answer without package using callbacks
Create a function that's gonna recursively treat all your stuffs.
getArray(stuffs, callback, index = 0, array = []) {
// Did we treat all stuffs?
if (stuffs.length >= index) {
return callback(array);
}
// Treat one stuff
if (condition) {
array.add(stuffs[index]);
// Call next
return getArray(stuffs, callback, index + 1, array);
}
// Get a stuff asynchronously
return api.compute(stuffs[index], (resp) => {
array.add(resp.stuff);
// Call next
return getArray(stuffs, callback, index + 1, array);
});
}
How to call it?
getArray(stuffs, (array) => {
// Here you have your array
// ...
});
EDIT: more explanation
What we want to do to transform the loop you had into a loop that handle asynchronous function call.
The purpose is that one getArray call gonna treat one index of your stuffs array.
After treating one index, the function will call itself again to treat the next index, until all get treated.
-> Treat index 0 -> Treat index 1 -> Treat index 2 -> Return all result
We are using parameters to pass the infos through the process. Index to know which array part we have to treat, and array to keep a tract of what we did calculate.
EDIT: Improvement to 100% asynchronous soluce
What we have done here it's a simple transposition of your initial for loop into an asynchronous code. it can be improved so by making it totally asynchronous, which make it better but slightly more difficult.
For example :
// Where we store the results
const array = [];
const calculationIsDone = (array) => {
// Here our calculation is done
// ---
};
// Function that's gonna aggregate the results coming asynchronously
// When we did gather all results, we call a function
const gatherCalculResult = (newResult) => {
array.push(newResult);
if (array.length === stuffs.length) {
callback(array);
}
};
// Function that makes the calculation for one stuff
const makeCalculation = (oneStuff) => {
if (condition) {
return gatherCalculResult(oneStuff);
}
// Get a stuff asynchronously
return api.compute(oneStuff, (resp) => {
gatherCalculResult(resp.stuff);
});
};
// We trigger all calculation
stuffs.forEach(x => x.makeCalculation(x));
I am trying to control the flow of the execution in my code below, meaning I want it to be serial.
I am reading and updating data from and to my DB, and ofc I want that to happen in the correct order. Below is the function I am calling my DB from, the queries functions are wrapped in callbacks.
I am pretty new to promises so perhaps the error might be something silly I am overlooking. If you need anything to ask please do so.
function my_function(array, array2)
{
var array3 = [];
return Promise.resolve(true)
.then(function()
{
console.log("1")
for(var i=0; i< array.length; i++)
{
get(array[i], function(results){
console.log("2")
array3.push(..);
});
}
return array3;
}).then(function()
{
console.log("3")
for(var i=0; i< array2.length; i+=2)
{
//...
get(array2[i], function(results){
console.log("4")
return array3.push(...);
});
}
return array3;
}).then(function(array3)
{
console.log("5")
for(var i=0; i<array3.length; i++)
{
get(array3[i], function(results){
console.log("6")
update(.., function(callb_result){
return;
});
});
}
});
}
And here is the way I am calling the queries.
function get(array, callback)
{
db.get(`SELECT .. FROM .. WHERE ..;`, function(error, row) {
...
return callback(something);
});
}
function update(.., callback)
{
db.run(`UPDATE .. SET ...`);
return callback("updated"); //I dont want to return anything
}
Whats printed in the log
1
3
5
2
4
6
I was thinking perhaps the way I ma calling the queries is async and that's messing up everything.
You're using for loops to run asynchronous tasks and return an array that is modified by them. But because they are asynchronous the return happens before they are finished. Instead you can create an array of promises where each promise is one of the asynchronous tasks that resolves once the task is done. To wait until every task is done you can call Promise.all with the array of promises, which returns a promise that resolves with an array of the resolved results.
For the first .then you can use Array.prototype.map to easily create an array of promises. Each item in the array needs to return a new Promise that resolves with the result from the callback of get.
.then(function() {
console.log("1");
const promiseArray = array.map(function(item) {
return new Promise(function(resolve) {
get(item, function(result) {
console.log("2");
resolve(result);
});
});
});
return Promise.all(promiseArray);
})
As you return Promise.all the next .then call be executed once all the promises in the promiseArray are fulfilled. It will receive the array of results as the first parameter to the function. That means you can use them there. The second .then is similar to the first one, except that you don't want to call get on every item. In this case map is not applicable, so the for loop will just create a promise and add it to the array of promises. Before you have used array3 to store the results that you want to update, but with promises you don't really need that. In this case you can simply concat the results of both arrays.
.then(function(resultsArray) {
console.log("3");
const promiseArray2 = [];
for (var i = 0; i < array2.length; i += 2) {
const promise = new Promise(function(resolve) {
get(array2[i], function(results) {
console.log("4");
resolve(results);
});
});
promiseArray2.push(promise);
}
// Wait for all promises to be resolved
// Then concatenate both arrays of results
return Promise.all(promiseArray2).then(function(resultsArray2) {
return resultsArray.concat(resultsArray2);
});
})
This returns a promise that resolves with the concatenated array, so you will have all the results (from both .then calls) as an array, which is passed to the next .then function. In the third and final .then you simply call update on each element of the array. You don't need to call get again, as you've already done this and you passed on the results.
.then(function(finalResults) {
console.log("5");
for (var i = 0; i < finalResults.length; i++) {
console.log("6");
update(finalResults[i], function(result) {
console.log(result);
});
}
});
Full runnable code (get uses a timeout to simulate asynchronous calls)
function myFunction(array, array2) {
return Promise.resolve(true)
.then(function() {
console.log("1");
const promiseArray = array.map(function(item) {
return new Promise(function(resolve) {
get(item, function(results) {
console.log("2");
resolve(results);
});
});
});
return Promise.all(promiseArray);
})
.then(function(resultsArray) {
console.log("3");
const promiseArray2 = [];
for (var i = 0; i < array2.length; i += 2) {
const promise = new Promise(function(resolve) {
get(array2[i], function(results) {
console.log("4");
resolve(results);
});
});
promiseArray2.push(promise);
}
return Promise.all(promiseArray2).then(function(resultsArray2) {
return resultsArray.concat(resultsArray2);
});
})
.then(function(finalResults) {
console.log("5");
for (var i = 0; i < finalResults.length; i++) {
console.log("6");
update(finalResults[i]);
}
});
}
function get(item, cb) {
// Simply call the callback with the item after 1 second
setTimeout(() => cb(item), 1000);
}
function update(item) {
// Log what item is being updated
console.log(`Updated ${item}`);
}
// Test data
const array = ["arr1item1", "arr1item2", "arr1item3"];
const array2 = ["arr2item1", "arr2item2", "arr2item3"];
myFunction(array, array2);
Improving the code
The code now works as expected, but there are many improvements that make it a lot easier to understand and conveniently also shorter.
To simplify the code you can change your get function to return a promise. This makes it a lot easier, since you don't need to create a promise in every step. And update doesn't need to be a promise, neither does it need a callback as it's synchronous.
function get(array) {
return new Promise(function(resolve, reject) {
db.get(`SELECT .. FROM .. WHERE ..;`, function(error, row) {
if (err) {
return reject(error);
}
resolve(something);
});
});
}
Now you can use get everywhere you used to create a new promise. Note: I added the reject case when there is an error, and you'll have to take care of them with a .catch on the promise.
There are still too many unnecessary .then calls. First of all Promise.resolve(true) is useless since you can just return the promise of the first .then call directly. All it did in your example was to automatically wrap the result of it in a promise.
You're also using two .then calls to create an array of the results. Not only that, but they perform exactly the same call, namely get. Currently you also wait until the first set has finished until you execute the second set, but they can be all executed at the same time. Instead you can create an array of all the get promises and then wait for all of them to finish.
function myFunction(array, array2) {
// array.map(get) is equivalent to array.map(item => get(item))
// which in turn is equivalent to:
// array.map(function(item) {
// return get(item);
// })
const promiseArray = array.map(get);
for (let i = 0; i < array2.length; i += 2) {
promiseArray.push(get(array2[i]));
}
return Promise.all(promiseArray).then(results => results.forEach(update));
}
The myFunction body has been reduced from 32 lines of code (not counting the console.log("1") etc.) to 5.
Runnable Snippet
function myFunction(array, array2) {
const promiseArray = array.map(get);
for (let i = 0; i < array2.length; i += 2) {
promiseArray.push(get(array2[i]));
}
return Promise.all(promiseArray).then(results => results.forEach(update));
}
function get(item) {
console.log(`Starting get of ${item}`);
return new Promise((resolve, reject) => {
// Simply call the callback with the item after 1 second
setTimeout(() => resolve(item), 1000);
});
}
function update(item) {
// Log what item is being updated
console.log(`Updated ${item}`);
}
// Test data
const testArr1 = ["arr1item1", "arr1item2", "arr1item3"];
const testArr2 = ["arr2item1", "arr2item2", "arr2item3"];
myFunction(testArr1, testArr2).then(() => console.log("Updated all items"));
I want to call from a parent rest service a child rest service. The number of times child service is called depends on parameters to parent rest services. Once I call all child service instance concurrently with different parameters. I want to combine the responses from all instances of child service. I am using below snippet. But I don't want to use timeout. It should either be timeout or when all calls of child service are over which ever is lesser.
for( i=0; i<length; i++)
{
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
micro(url ,filter[i],function(resp)
{
this.resutlObject[count]=resp;
console.log("count"+count);
count=count+1;
}.bind( {resutlObject: resutlObject} ));
}//end of for
setTimeout(function () {
console.log("in time out");
res.end(JSON.stringify(resutlObject || {}, null, 2));
},500);
Also you could use Promises. Suppose service call returns promise, then you wait while all of them are fulfilled. Node.js supports promises starting from v4. If you have earlier version of node, just use some library.
//Instead of
function micro(url, filter, cb) {
var resp = "result of async job";//do some async work
cb(resp)
}
//Modify your service to return a promise
function micro(url, filter) {
return new Promise(function(resolve, reject) {
var resp = "result of async job using `url` and `filter`";
if (resp) {
resolve(resp);
} else {
reject("reason");
}
});
}
//Create a list of service calls.
var promises = [];
for( i=0; i<length; i++)
{
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
promises.push(micro(url, filter[i]));
}
//Wait for all off them to fulfill
Promise.all(promises)
.then(function(resultObject) {
//Response
res.end(JSON.stringify(resultObject || {}, null, 2));
}, function(reason) {
res.sendStatus(500);
console.error(reason);
});
you can use async module async. It provides the parallel foreach loop.
var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
var configs = {};
async.forEachOf(obj, function (value, key, callback) {
fs.readFile(__dirname + value, "utf8", function (err, data) {
if (err) return callback(err);
try {
configs[key] = JSON.parse(data);
} catch (e) {
return callback(e);
}
callback();
})
}, function (err) {
if (err) console.error(err.message);
// configs is now a map of JSON data
doSomethingWith(configs);
})
here in the example it is reading files listed in parameters.
similarly you can do for your task
You could use async module. It's designed to do the stuff you're after. Something like this:
var async = require('async');
var collection = [];
for(i=0;i<length;i++) {
collection.push(
(function(i) {
return function(callback) {
url=accountID[i] +'+'+sortcode[i] +'+' +accountHolderName[i];
micro(url ,filter[i],function(resp) {
callback(null, resp);
});
}
})(i)
);
}//end of for
async.parallel(collection, function(err, results) {
console.log(results) // array of results from all requests
})
What happens
async.parallel takes an array of functions as an argument. Each function receives callback as an argument. Callback is a function, which takes error and result as an argument.
After all callback are executed async calls the final callback which receives the array of results from all other callbacks.
In the loop we are creating just that, a collection of functions. In this example the code is a bit more complex because we use closure in order to preserve the value of i for each of these functions.
I have a loop that I need to run inside of a callback, unfortunately accessing the array outside of the callback leaves me with a blank array. I know why this happens, but I want to know the best solution to tackle this.
Gallery.prototype.getGallery = function(cb) {
self = this;
var cos = new pb.CustomObjectService();
var ms = new pb.MediaService();
var s = [];
cos.loadTypeByName('Gallery Image', function(err, gallery){
cos.findByType(gallery._id.toString(), function(err, rpy){
for(var i = 0; i < rpy.length; i++){
ms.loadById(rpy[i].Image, function(e,r){
s.push(r.location);
console.log(r.location); /* <-- logs expected data */
});
}
console.log(s[0]); /* <-- this is undefined */
});
});
};
Replace your for loop with a call to async.*; in this case async.map seems right. Pass a callback to async.map; it will be invoked when all the individual calls to ms.loadById are done, with the array of results.
async.map(
rpy,
function(elt, callback) {
ms.loadById(elt.Image, callback);
},
function(err, data) {
// comes here after all individual async calls have completed
// check errors; array of results is in data
}
);
If you want to go into the promises world, then wrap the calls to ms.loadById in a promise. Here's a roll-your-own version, but various versions of what is usually called promisify are also out there.
function loadByIdPromise(elt) {
return new Promise(function(resolve, reject) {
ms.loadById(elt.image, function(err, data) {
if (err) return reject(err);
resolve(data);
});
});
}
Then do a Promise.all on the resulting promises:
Promise.all(rpy.map(loadByIdPromise))
.then(function(data) {
// comes here when all individual async calls complete successfully
// data is your array of results
});
Using the promises style, your entire code would look like:
loadTypeByNamePromise('Gallery Image') .
then(function(gallery) { return findByTypePromise(gallery._id.toString(); }) .
then(function(rpy) { return Promise.all(rpy.map(loadByIdPromise)); }) .
then(function(results) { /* do something with [results] */ });