Any better solution about asynchronous query form main table and sub table? - node.js

I have two tables, tb_article and tb_attach_url, like this
==============tb_article=================
id title content
1 "echo" "test"
2 "foo" "bar"
==============tb_attach_url==============
id article_id url_val
1 2 "http://.../foo.png"
2 2 "http://.../bar.png"
3 1 "http://.../test.png"
My scenario is like this,
I want show a list of tb_article,
but tb_attach_url's information need also.
My solution is like this:
function load_article_urls(id, idx, callback){
connection.query("select url_str from tb_attach_url where article_id = ?", [id], function(err, results){
if (err) throw err;
var images = [];
for (var j = 0; j < results.length; j++){
images.push(results[j]["url_str"]);
}
callback(idx, images)
});
}
function load_article(req, res){
connection.query('select * from tb_article where id = ?', [req.query.id], function(err, results){
if (err) throw err;
var cnt = 0;
for (var i = 0; i < results.length; i++){
load_article_urls(results[i].id, i, function(idx, urls){
results[idx]["urlset"] = urls;
cnt++;
if (cnt == results.length){
//waiting for all back
res.render('list_article', { article_list: results});
}
});
}
});
}
I think this implement is little ugly, and want to know some better ways.
Thanks for any help.

You can use async.map (GitHub) to do something like this:
async.map(results,
function (err, callback) { /* Call callback with the urls as a parameter here */ },
function (err, allUrls) {
// Combine results and allUrls - they are in the same order.
})
Or use some of the other functionalities of async to achieve a similar effect.

Related

MongoDB Collation loop

I am trying to run a find inside another find and I am not getting any results from the second find operation.
User.find({}, function (err, docs) {
for (i = 0; i < docs.length; i++) {
var tmp = '';
UserGroups.find({userName: docs[i].userName}, function (errin, groups) {
for (g = 0; g < groups.length; g++) {
tmp += ", " + groups[g].groupName;
//console.log(groups[g].groupName);
}
});
console.log(tmp);
//docs[i].group = that;
docs[i].username = decrypt(docs[i].username);
docs[i].password = '';
}
res.render('users', {users: docs});
});
Your UserGroups.find is going to be run asynchronously therefore console.log(tmp) is going to be run before your UserGroups.find has a chance to finish and your call is going to return before you get any results. If you want the results of the UserGroup.find you need to move all of your logic into that callback.
EDIT
This is I believe a far better approach in terms of predictability and query performance. Your previous approach the UserGroup.find being called n number of times. N being the number of users in your database. This approach the database is only queried twice. Once to get all the users and second to get all the groups.
User.find({}, function (err, docs) {
//Get all the usernames before executing the UserGroups query
var userNames = [];
users.forEach(function(element) {
userNames.push(element.userName);
});
UserGroups.find({userName: {$in : userNames}}, function (errin, groups) {
for (var i = 0; i < docs.length; i++) {
//get all the groups that belong to this user
var userGroups = groups.filter(function(value) {
return value.userName === docs[i].userName;
});
var tmp = "";
userGroups.forEach(function(element){
tmp += "," + element.groupName
});
//docs[i].group = that;
docs[i].username = decrypt(user[i].username);
docs[i].password = '';
}
res.render('users', {users: docs});
});
});
Also since it appears you are using Mongoose you can use the built in populate feature in Mongoose to "join" collections together

Synchronous for loop in node js

So let's say I have the following for loop
for(var i = 0; i < array.length; i++){
Model.findOne({ _id = array[i].id}, function(err, found){
//Some stuff
});
}
How do I make this code work? Every time I run it I get array[i] = undefinedbecause the mongo-db query is asynchronous and the loop has already iterated 5 times by the time the first query is even completed. How do I go about tackling this issue and waiting for the query to complete before going on to the next iteration?
This doesn't specifically answer your question, but addresses your problem.
I'd use an $in query and do the filtering all at once. 20 calls to the db is pretty slow compared to 1:
// grab your ids
var arrayIds = myArray.map(function(item) {
return item._id;
});
// find all of them
Model.find({_id: {$in: arrayIds}}, function(error, foundItems) {
if (error) {
// error handle
}
// set up a map of the found ids
var foundItemsMap = {};
foundItems.forEach(function(item) {
foundItemsMap[item._id] = true;
});
// pull out your items that haven't been created yet
var newItems = [];
for (var i = 0; i < myArray.length; i++) {
var arrayItem = myArray[i];
if ( foundItemsMap[arrayItem._id] ) {
// this array item exists in the map of foundIds
// so the item already exists in the database
}
else {
// it doesn't exist, push it into the new array
newItems.push(arrayItem);
}
}
// now you have `newItems`, an array of objects that aren't in the database
});
One of the easiest ways to accomplish something like you want is using promises. You could use the library q to do this:
var Q = require('q');
function fetchOne(id) {
var deferred = Q.defer();
Model.findOne({ _id = id}, function(err, found){
if(err) deferred.reject(err);
else deferred.resolve(found);
});
return deferred.promise;
}
function fetch(ids, action) {
if(ids.length === 0) return;
var id = ids.pop();
fetchOne(id).then(function(model) {
action(model);
fetch(ids, action);
});
}
fetch([1,2,3,4,5], function(model) { /* do something */ });
It is not the most beautiful implementation, but I'm sure you get the picture :)
Not sure if this is the right way, it could be a bit expensive but this how i did it.
I think the trick is to pull all your data and then looking for an id match.
Model.find(function(err, data) {
if (err) //handle it
for (var i=0; i<array.length; i++) {
for (var j=0; ij<data.length; j++) {
if(data[j].id == array[i].id) {
// do something
}
}
}
}

Asynchronous Iteration of Array of JSON

I'm using Node.js with MongoDB, to be more specific, MongoLab and Mongoose.
In the DB, I have two collections, pours and users. A user object would be linked to multiple pour objects with a shared cid.
I need to iterate through an array of user objects. But for loop somehow doesn't work with asyn functions that I would like to use to modify my array.
express.leaderboard = new Array();
mongoose.Users.find(function(err, users) {
for (var i = 0; i < users.length; i++) {
express.leaderboard[express.leaderboard.length] = users[i];
};
for (i = 0; i < express.leaderboard.length; i++){
updateOunces(i, function(a, fluidOunces){
console.log(a);
express.leaderboard[a].set('totalOunces', fluidOunces);
});
}
});
And this is my function that would retrieve the total fluidOunces for a user.
function updateOunces(i, callback){
//console.log(express.leaderboard[b].cid);
mongoose.Pours.find({
"cid": express.leaderboard[i].cid
}).exec(function(err, result) {
var userOunces = 0.0;
if (!err) {
for (i = 0; i < result.length; i += 1) {
for(j = 0; j < result[i].pour.length; j += 1){
userOunces += result[i].pour[j].fluidOunces;
}
}
callback(i, userOunces);
return;
express.leaderboard[i].set ('totalOunces' , userOunces);
} else {
console.log(err)
};
});
};
Is there a way to iterate and add a new property to each object in the leaderboard array? Using ASYN? Thank you!
use async library.
Example)
mongoose.Users.find(function(err, users) {
async.each(users, function(user, callback) {
// perform updates here, per user.
callback();
}, function(err) {
// everything is complete.
});
});

mongodb nodejs each vs toArray

I've had a quick look around and not found anything that's satisfied me with an answer but basically I've started to use node.js with express and mongodb to create a webapi rather than the usual .Net MVC Web API route.
One thing I've noted though is in order to return a collection of results I'm doing it in a rather bulky way, or that's how it feels at least.
app.get('/property', function (req, res) {
var propArray = [];
MongoClient.connect(settings.connection,
function (err, db) {
if (err) throw err;
var properties = db.collection("PROPERTIES");
var searchParams = {
Active: true,
Deleted: false
}
properties.count(searchParams, function (err, count) {
properties.find(searchParams).toArray(function (err, result) {
for (i = 0; i < count; i++)
propArray.push(new models.propertyModel(result[i]));
db.close();
return res.json(propArray);
});
});
}
);
});
Now I've noted that there's a .each function rather than .toArray which I would prefer to use as I could cut out the .count function but obviously you can only return a response once. I wondered if you guys could enlighten me with some of your mongo knowledge.
properties.find(searchParams).each(function (err, result) {
return res.json(result);
});
Something like that, cutting out 6 lines of code and an extra call to the database.
The count() can still be cut out with toArray():
properties.find(searchParams).toArray(function (err, result) {
var i, count;
for (i = 0, count = result.length; i < count; i++) {
propArray.push(new models.propertyModel(result[i]));
}
db.close();
return res.json(propArray);
});

recoding nested for loop to use async in node.js

I'm new to Node.js and Async coding. I need to write the equivalent of a nested for loop which will work with Node. I understand that my question is very similar to the one posted here: nested loops asynchronusly in nodejs, next loop must start only after one gets completed, but even after looking at that post in detail, I was unable to fix my code.
I am working with an XML feed. The 'parser' uses the xml2js package. The
loop runs exactly as expected if I remove the sql query (for which I'm using the mysql node package), but when I put the sql query in, then all the orders get processed first, the the "DONE" is output, and then the query fails as it tries to look up items for just the last order repeatedly.
I've tried replacing the for loops with async.forEach loops, but this did not help.
Any help or advice on how to recode this in a way more idiomatic to node would be greatly appreciated.
Many thanks!
Sixhobbits
parser.parseString(data, function (err, result) {
if(err) throw(err);
var numOrders = result['Root']['Orders'][0]['Order'].length;
var curr, currItem, currOrdId, items, sellersCode;
console.log("Logging IDs of", numOrders, "orders");
// for each order
for (var j=0; j<numOrders; j++){
//current order
curr = result['Root']['Orders'][0]['Order'][j];
currOrdId = curr['OrderId'][0]
items = curr['Items'][0]['Item'];
console.log("Order ID:", currOrdId, "--",items.length, "Items");
// for each item
for (var k=0; k<items.length; k++){
currItem = items[k];
sellersCode = currItem['SellersProductCode'][0];
var sqlQuery = 'select data_index, fulltext_id, cataloginventory_stock_item.product_id from catalogsearch_fulltext inner join cataloginventory_stock_item where catalogsearch_fulltext.data_index like "' + sellersCode + '|%"' + 'and cataloginventory_stock_item.item_id = catalogsearch_fulltext.product_id';
var query = connection.query(sqlQuery,function(err,rows,fields){
if (err) throw(err);
console.log(" Item ID :",currItem['ItemId'][0]);
console.log(" Full Text ID :", rows[0]['fulltext_id']);
console.log(" Product ID :", rows[0]['product_id']);
});
}//for
}//for
console.log("DONE");
});//parseString
You were on the right track by looking to use async.forEach. Here's how you would rework this code to use that:
parser.parseString(data, function (err, result) {
if(err) throw(err);
var numOrders = result['Root']['Orders'][0]['Order'].length;
var currOrdId, items, sellersCode;
console.log("Logging IDs of", numOrders, "orders");
// for each order
async.forEach(result['Root']['Orders'][0]['Order'], function (curr, callback1) {
currOrdId = curr['OrderId'][0];
items = curr['Items'][0]['Item'];
console.log("Order ID:", currOrdId, "--",items.length, "Items");
async.forEach(items, function (currItem, callback2) {
sellersCode = currItem['SellersProductCode'][0];
var sqlQuery = 'select data_index, fulltext_id, cataloginventory_stock_item.product_id from catalogsearch_fulltext inner join cataloginventory_stock_item where catalogsearch_fulltext.data_index like "' + sellersCode + '|%"' + 'and cataloginventory_stock_item.item_id = catalogsearch_fulltext.product_id';
var query = connection.query(sqlQuery,function(err,rows,fields){
console.log(" Item ID :",currItem['ItemId'][0]);
console.log(" Full Text ID :", rows[0]['fulltext_id']);
console.log(" Product ID :", rows[0]['product_id']);
callback2(err);
});
}, callback1);
}, function (err) {
console.log("DONE");
});
});//parseString
Each iteration of async.forEach must call its callback parameter when all of its async processing has completed. You've got two levels in this case which makes it a little more difficult to keep track of in your head, but it's the same concept.
This is a classic closure-in-a-loop problem. You need to break the closure by passing currItem as an argument:
for (var k=0; k<items.length; k++){
currItem = items[k];
sellersCode = currItem['SellersProductCode'][0];
var sqlQuery = 'select data_index, fulltext_id, cataloginventory_stock_item.product_id from catalogsearch_fulltext inner join cataloginventory_stock_item where catalogsearch_fulltext.data_index like "' + sellersCode + '|%"' + 'and cataloginventory_stock_item.item_id = catalogsearch_fulltext.product_id';
var query = connection.query(sqlQuery,(function(CI){
return function(err,rows,fields){
if (err) throw(err);
console.log(" Item ID :",CI['ItemId'][0]);
console.log(" Full Text ID :", rows[0]['fulltext_id']);
console.log(" Product ID :", rows[0]['product_id']);
}
})(currItem)); // Break closure by passing currItem as argument
}//for
I realize this is an old post, but you might find this function useful
eachKVAsync = function(elements,userInfo,onKeyValue,ondone) {
var onDone = ondone;
var ret = null;
var done=false;
var isArray = typeof elements.forEach===$f$;
var keys = isArray ? null : [],
values = isArray ? elements : [];
if (keys) {
for (var k in elements) {
keys.push(k);
values.push(elements[k]);
}
}
var aborted=false;
var endLoop = function (userInfo){
aborted=true;
if (onDone) {
onDone(userInfo,aborted);
onDone = null;
}
}
var i = 0;
var iterate = function (userInfo) {
if (i < values.length) {
var ix=i;
i++;
onKeyValue((keys?keys[ix]:i),values[ix],userInfo,iterate,endLoop);
} else {
if (onDone) {
onDone(userInfo,aborted);
onDone = null;
return;
}
}
}
iterate(userInfo);
},
use example
eachKVAsync(
elements, {
aValue: 2004
},
function onItem(key, value, info, nextItem, endLoop) {
if (value.isOk) {
info.aValue += value.total;
setTimeout(nextItem,1000,info);
} else {
endLoop(info);
}
},
function afterLastItem(info, loopEndedEarly) {
if (!loopEndedEarly) {
console.log(info.aValue);
}
}
);

Resources