I'm fixing some bugs in one project. I need synchronous cursor, to load data in loop.
I have code:
var mongo = require('mongojs'),
dataMaps = [];
mongo.penguin_friends.find({ user_id_1: id_user }, function(err, friends) {
friends.forEach(function(data) {
var cursor = mongo.penguin_user_maps3.find({ user_id: data.id_user }, { fields: { maps: 1 } });
//I need to do dataMaps.push(cursor.data);
});
console.log("processingThisSomething()");
processSomething();
});
I need to complete this request before calling processSomething(); So I need to process mongodb query inside a loop synchronously.
It's not possible to make the queries synchronous as the API doesn't support it.
You'll have to provide a callback to .find() or a cursor method to receive the results:
cursor.toArray(function (err, maps) {
if (!err) {
dataMaps.push(maps);
}
});
But, you can replace the iterator with one that's asynchronous-aware, such as async.each(), to continue when they've completed:
async.each(
friends,
function (data, callback) {
var cursor = mongo....;
cursor.toArray(function (err, maps) {
if (!err) {
dataMaps.push(maps);
}
callback(err);
});
},
function (err) {
if (!err) {
processSomething();
}
}
);
Related
Probably this is a promise implementation but would like to check with experts before doing so.
Need to do:
Read entire file line-by-line into MongoDB collection A.
Upon completion of step 1, Insert/Update/Delete documents from collection B based on state in collection A. If document not present in A delete from B.
Problem: Even before completion of step 1 above, step 2 starts execution and starts deleting records from B.
Tried so far: Async.series does not work. Below given is my code.
MongoClient.connect(config.mongodb.uri, function (err, db) {
if (err) {
logger.error('Unable to connect to the mongoDB server. Error:', err);
reject(err);
} else {
let startTime = new Date();
async.series([
function(callback) {
console.log('First in series');
db.collection('eligibilityStage').drop({}, function (err, oldObject) {
debugger;
var lr = new LineByLineReader(config.eligibiltyFile.fileRemoteLocation + '/' + latestEligibilityfileName);
lr.on('error', function (err) {
console.log(err);
});
var lineCount;
lr.on('line', function (line) { //** --> Jumps from here to second function in series, line#43**
if (line.length == config.eligibiltyFile.detailRecordlineWidth) {
var document = require('fixy').parse({
map: mapData, options: {
skiplines: null, fullwidth: config.eligibiltyFile.detailRecordlineWidth
}
}, line);
db.collection('eligibilityStage').insertOne(document[0], function (err, records) {
lineCount++;
if (err) {
console.log(err);
}
});
}
});
lr.on('end', function () {
console.log('File is closed, read lines:'+lineCount);
console.log('File is closed, rowcount:'+db.eigibilityStage.Count());
});
callback(null, 'loadStage');
});
},
function(callback) {
// Deletes
console.log('Series 2 function, read lines:'+lineCount);
console.log('Series 2 function, rowcount:'+db.eigibilityStage.Count());
callback(null, 'processStage');
}
],
function(err, results){
});
}
})
Am I doing it wrong? Or is this a standard problem to be solved using promise?
In my function i have to call async series inside async foreach to compute the final result and create my json.is that possible
async.series([
function(callback) {
});
},
function(callback) {
async.forEachSeries(temp,function(quest,callback) {
}, function(err) {
if (err) return next(err);
});
callback();
}
],
function(err) {
if (err) return next(err);
res.json(output);
});
You should be able to nest as much async functions into each other, however it is better to use a naming conventions so you can easily track which callbacks are passed where and to avoid collisions due to hoisting. So basically this should work as you'd expect:
async.series([
function first(seriesCallback) {
seriesCallback();
},
// other functions in series
// ...
function (seriesCallback) {
var someArray = [];
async.each(someArray, function (value, eachCallback) {
// process the value
// return an error if there is one
eachCallback(err);
}, function(err) {
// add any additional processing you might need
// pass the control to the parent async method and handle the errors
// in a more central place if there is an error here it will
// be processed in onSeriesDone as well as all other errors
seriesCallback(err);
});
}], function onSeriesDone(err) {
next(err);
});
I have a requirement to make several API requests and then do some processing on the combines result sets. In the example below, you can see that 3 requests are made (to /create) by duplicating the same request code however I would like to be able to specify how many to make. For example, I may wish to run the same API call 50 times.
How can I make n calls without duplicating the API call function n times?
async.parallel([
function(callback){
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
},
function(callback){
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
},
function(callback){
request.post('http://localhost:3000/api/store/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
}
],
function(err, results){
if (err) {
console.log(err);
}
// do stuff with results
});
First, wrap the code that you want to call many times in a function:
var doRequest = function (callback) {
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err);
}
callback(null, res.body.id);
});
}
Then, use the async.times function:
async.times(50, function (n, next) {
doRequest(function (err, result) {
next(err, result);
});
}, function (error, results) {
// do something with your results
}
Create an array with as many references to the function as you need tasks in your workload. Then pass them to async.parallel. For example:
var async = require("async");
var slowone = function (callback) {
setTimeout(function () {
callback(null, 1);
}, 1000);
};
async.parallel(
dd(slowone, 100),
function (err, r) {
console.log(JSON.stringify(r));
}
);
// Returns an array with count instances of value.
function dd(value, count) {
var result = [];
for (var i=0; i<count; i++) {
result.push(value);
}
return result;
}
Note again that there is only one instance of the slow running function, in spite of there being many references to it.
I have an array of documents with unique _id and I want to insert them to my database. Some of them already in db, and for those I want to update an array property (push in array an item). All of this I need to make asyncronuosly, so after all inserted/updated I want to write response back (with callback) to client than all ok or write an error. After googling on subject I've found this solution with async module I've tried to implement it for my case. Now my code looks like this:
function processUsers(arr, listName, callback) {
var users = global.db.collection('vkusers');
var q = async.queue(function(task, cb) {
console.log('upsert butch');
users.insert(task.doc, function(err, doc) {
if (err) {
users.update({
_id : task.doc._id
}, {
$addToSet : {
mLists : listName
}
}, function(error, result){ console.log(error); console.log(result); });
}
});
}, arr.length);
for ( var doc in arr) {
q.push({
doc : arr[doc]
}, function(err) {
if (err)
callback(err, null);
})
}
q.drain = function() {
// this is the queue's callback, called when the queue is empty,
// i.e. when all your documents have been processed.
console.log('drain');
callback(null, { result: "success", upserted: arr.length });
}
}
Callback has signature callback(error, result), arr - my array of documents. I've tested it and with database everything is OK, i am getting the right result. But callback, and q.drain never fired!
You need to call async.queue's callback (cb in your code) when your insert/update is complete. Something like this:
var q = async.queue(function(task, cb) {
console.log('upsert butch');
users.insert(task.doc, function(err, doc) {
if (err) {
users.update({
_id : task.doc._id
}, {
$addToSet : {
mLists : listName
}
}, function(error, result) {
console.log(error);
console.log(result);
cb(error); // Update finished; call cb and pass in "error" so that it can bubble up if it exists
});
} else {
cb(); // Insert succeeded; call cb
}
});
}, arr.length);
I have a situation like:
function smth(data) {
// save data to db.
Object.findOne({ _id: ceva['id']}, function(obj) {
obj.save();
});
}
This function is called from various async calls. There is a race condition where the second findOne call runs before a previous save() runs.
Is there a way to work around this? Maybe using the async library to run things in series?
You can make use one of async control flows to ensure every iteration of smth() happens in order.
If you're not in favor of using a flow control library, you can easily achieve series execution of each event. Consider following code snippet:
function smth(data, cb) {
// save data to db.
findOne({
id: data.id
}, function (err, obj) {
if (!err && obj) {
savedb(obj, cb);
} else {
cb(err);
}
});
}
function findOne(filter, cb) {
// simulate find
setTimeout(function () {
cb(null, {
id: filter.id,
name: 'test',
role: 'test'
});
}, 500);
}
function savedb(obj, cb) {
//simulate db save
setTimeout(function () {
cb(null, obj);
}, 500);
}
// iterations count
var count = parseInt(process.argv[2], 10) || 3;
(function iterate(i) {
console.log(i);
if (i === count) {
// iterations complete
process.exit(1);
}
var data = {
id: 123 + i
};
smth(data, function (err, res) {
console.log(err || res);
iterate(++i);
});
})(0);
//make this follow async conventions with callback argument last
function smth(data, callback) {
//pseudocode database API here
db.save(data, function (error) {
if (error) {
callback(error);
return;
}
Object.findOne({ _id: ceva['id']}, function(obj) {
obj.save(callback);
});
});
}
That's the basic callback approach. You can use async.js if you like to clean it up a bit or study callbackhell.com for more ways to avoid the nested functions.