In my function i have to call async series inside async foreach to compute the final result and create my json.is that possible
async.series([
function(callback) {
});
},
function(callback) {
async.forEachSeries(temp,function(quest,callback) {
}, function(err) {
if (err) return next(err);
});
callback();
}
],
function(err) {
if (err) return next(err);
res.json(output);
});
You should be able to nest as much async functions into each other, however it is better to use a naming conventions so you can easily track which callbacks are passed where and to avoid collisions due to hoisting. So basically this should work as you'd expect:
async.series([
function first(seriesCallback) {
seriesCallback();
},
// other functions in series
// ...
function (seriesCallback) {
var someArray = [];
async.each(someArray, function (value, eachCallback) {
// process the value
// return an error if there is one
eachCallback(err);
}, function(err) {
// add any additional processing you might need
// pass the control to the parent async method and handle the errors
// in a more central place if there is an error here it will
// be processed in onSeriesDone as well as all other errors
seriesCallback(err);
});
}], function onSeriesDone(err) {
next(err);
});
Related
I am running a cron job with node with mongodb as the database. I am trying to close db connection and exit the process once the curr_1 each loop has executed completely.
However the exit() is called while function_2 is being executed. I understand this is due to the callback and is async in nature.
How do I make sure exit is called only once the curr_1.each is complete?
Any solution without promises?
function function_1(obj){
var curr_1 = coll_1.find({})
curr_1.each(function(err, doc) {
function_2(doc)
});
exit(obj)
}
function function_2(obj) {
coll_2.findOne({}, function(err, document) {
dosomeprocess(obj)
})
}
function exit(obj) {
// Close connection
console.log('closing connection')
obj.db.close();
process.exit();
}
It's a job for Node async....
For example:
async.each(
curr_1, // the collection to iterate over
function(doc, callback) { // the function, which is passed each
// document of the collection, and a
// callback to call when doc handling
// is complete (or an error occurs)
function_2(doc);
},
function(err) { // callback called when all iteratee functions
// have finished, or an error occurs
if (err) {
// handle errors...
}
exit(obj); // called when all documents have been processed
}
);
Without using any library:
function function_1(obj, callback) {
var curr_1 = coll_1.find({})
curr_1.each(function(err, doc) {
callback(err, doc);
});
}
function function_2(err, obj) {
coll_2.findOne({}, function(err, document) {
dosomeprocess(obj)
exit(err, obj);
})
}
function exit(err, obj) {
// Close connection
console.log('closing connection')
obj.db.close();
process.exit();
}
function_1(obj, function_2);
Using async module
var async = require('async');
async.waterfall([
function function_1(callback) {
var curr_1 = coll_1.find({})
curr_1.each(function(err, doc) {
if (err) {
callback(err, null)
} else {
allback(null, doc)
}
});
},
function function_2(obj, callback) {
coll_2.findOne({}, function(err, document) {
if (err) {
callback(err, null);
} else {
dosomeprocess(obj)
callback(null, obj);
}
})
}
], function done() {
obj.db.close();
process.exit();
});
Simply give a condition in your loop using counter.
function function_1(obj){
var curr_1 = coll_1.find({})
var curr_1Length = curr_1.length;
var counter = 0;
curr_1.each(function(err, doc) {
++counter;
//Check condition everytime for the last occurance of loop
if(counter == curr_1Length - 1){
exit(obj)
}
function_2(doc)
});
}
Hope it helps :)
I've got following code now:
exports.listByUser = function(req, res) {
Attack.find({user: req.user._id}, function(err, attacks) {
if(err)
return next(err);
for(var i in attacks) {
attacks[i].evaluateFight();
}
res.json(attacks);
});
};
the main problem is that attacks[i].evaluateFight() is called asynchronously, I want to transform it to make sure that [i-1] iteration is done ... and finally call res.json(attacks). I think, it can be done with async, but I don't know how :( Something like this should work, but how can I call attacks.method?
async.eachSeries(attacks, function (callback) {
//something??
callback();
}, function (err) {
if (err) { throw err; }
res.json(attacks);
});
You can leverage async whilst method call to implement the same. However, there is question I have about the callback of evaluateFight because if it is executed asynchronously then there has to be some callback associated with it which will notify if the previous call is succeeded.
The example code can be as follows assuming evaluateFight returns a callback when completed -
exports.listByUser = function(req, res) {
Attack.find({user: req.user._id}, function(err, attacks) {
if(err)
return next(err);
var attacksLength = attacks.length;
var count = 0;
async.whilst(function () {
return count < attacksLength;
},
function (callback) {
attacks[count].evaluateFight(function(err, result){
count++;
callback();
}); // assuming it returns a callback on success
},
function (err) {
// all the iterations have been successfully called
// return the response
res.json(attacks);
});
};
I'm fixing some bugs in one project. I need synchronous cursor, to load data in loop.
I have code:
var mongo = require('mongojs'),
dataMaps = [];
mongo.penguin_friends.find({ user_id_1: id_user }, function(err, friends) {
friends.forEach(function(data) {
var cursor = mongo.penguin_user_maps3.find({ user_id: data.id_user }, { fields: { maps: 1 } });
//I need to do dataMaps.push(cursor.data);
});
console.log("processingThisSomething()");
processSomething();
});
I need to complete this request before calling processSomething(); So I need to process mongodb query inside a loop synchronously.
It's not possible to make the queries synchronous as the API doesn't support it.
You'll have to provide a callback to .find() or a cursor method to receive the results:
cursor.toArray(function (err, maps) {
if (!err) {
dataMaps.push(maps);
}
});
But, you can replace the iterator with one that's asynchronous-aware, such as async.each(), to continue when they've completed:
async.each(
friends,
function (data, callback) {
var cursor = mongo....;
cursor.toArray(function (err, maps) {
if (!err) {
dataMaps.push(maps);
}
callback(err);
});
},
function (err) {
if (!err) {
processSomething();
}
}
);
I have a situation like:
function smth(data) {
// save data to db.
Object.findOne({ _id: ceva['id']}, function(obj) {
obj.save();
});
}
This function is called from various async calls. There is a race condition where the second findOne call runs before a previous save() runs.
Is there a way to work around this? Maybe using the async library to run things in series?
You can make use one of async control flows to ensure every iteration of smth() happens in order.
If you're not in favor of using a flow control library, you can easily achieve series execution of each event. Consider following code snippet:
function smth(data, cb) {
// save data to db.
findOne({
id: data.id
}, function (err, obj) {
if (!err && obj) {
savedb(obj, cb);
} else {
cb(err);
}
});
}
function findOne(filter, cb) {
// simulate find
setTimeout(function () {
cb(null, {
id: filter.id,
name: 'test',
role: 'test'
});
}, 500);
}
function savedb(obj, cb) {
//simulate db save
setTimeout(function () {
cb(null, obj);
}, 500);
}
// iterations count
var count = parseInt(process.argv[2], 10) || 3;
(function iterate(i) {
console.log(i);
if (i === count) {
// iterations complete
process.exit(1);
}
var data = {
id: 123 + i
};
smth(data, function (err, res) {
console.log(err || res);
iterate(++i);
});
})(0);
//make this follow async conventions with callback argument last
function smth(data, callback) {
//pseudocode database API here
db.save(data, function (error) {
if (error) {
callback(error);
return;
}
Object.findOne({ _id: ceva['id']}, function(obj) {
obj.save(callback);
});
});
}
That's the basic callback approach. You can use async.js if you like to clean it up a bit or study callbackhell.com for more ways to avoid the nested functions.
I wrote a little async script to batch insert a lot of JSON files into a MongoDB sharded cluster. This is my first time with this module (and I'm still learning Node.js). I don't know if I'm doing it right.
The code is the last part of a waterfall (1): previous functions end
up with an object with db, coll and files properties.
files array contains hundred of file paths and the function to
apply to each element of the array is, again, a waterfall (2).
Waterfall (2) is made of the following: read, parse, insert. When this waterfall ends (3) I call complete to finalize the processing of a single item in the array, passing the error (if any).
So far so good, correct?
What I can't understand is what happens inside the async.eachLimit callback (4). From the documentation:
A callback which is called after all the iterator functions have
finished, or an error has occurred.
That is, when all functions have finished, the next() call (5) ends the script. But the same callback (4) is invoked when a single error occurred, as per documentation. That is my script stops when a fail with a single file happens.
How can I avoid this?
async.waterfall([ // 1
// ...
function (obj, next) {
async.eachLimit(obj.files, 1000,
function (file, complete) {
async.waterfall([ // 2
function (next) {
fs.readFile(file, {}, function (err, data) {
next(err, data);
});
},
function (data, next) { // Parse (assuming all well formed)
next(null, JSON.parse(data));
},
function (doc, next) { // Insert
obj.coll.insert(doc, {w: 1}, function (err, doc) {
next(err);
});
}
], function (err, result) { // 3
complete(err);
});
},
function (err) { // 4
if (err) console.error(err);
next(null, obj); // 5
}
);
}
], function (err, obj) { // Waterfall end
if (err) console.error(err);
obj.db.close(); // Always close the connection
});
If you don't want it to break in case of an error you should just invoke the callback with a falsy first parameter, like so (look after // 3).
Is this ok with you / did I understand correctly?
async.waterfall([ // 1
// ...
function (obj, next) {
async.eachLimit(obj.files, 1000,
function (file, complete) {
async.waterfall([ // 2
function (next) {
fs.readFile(file, {}, function (err, data) {
next(err, data);
});
},
function (data, next) { // Parse (assuming all well formed)
next(null, JSON.parse(data));
},
function (doc, next) { // Insert
obj.coll.insert(doc, {w: 1}, function (err, doc) {
next(err);
});
}
], function (err, result) { // 3
if (err) {
console.log(file + ' threw an error');
console.log(err);
console.log('proceeding with execution');
}
complete();
});
},
function (err) { // 4
next(null, obj); // 5
}
);
}
], function (err, obj) { // Waterfall end
if (err) console.error(err);
obj.db.close(); // Always close the connection
});