I've had a quick look around and not found anything that's satisfied me with an answer but basically I've started to use node.js with express and mongodb to create a webapi rather than the usual .Net MVC Web API route.
One thing I've noted though is in order to return a collection of results I'm doing it in a rather bulky way, or that's how it feels at least.
app.get('/property', function (req, res) {
var propArray = [];
MongoClient.connect(settings.connection,
function (err, db) {
if (err) throw err;
var properties = db.collection("PROPERTIES");
var searchParams = {
Active: true,
Deleted: false
}
properties.count(searchParams, function (err, count) {
properties.find(searchParams).toArray(function (err, result) {
for (i = 0; i < count; i++)
propArray.push(new models.propertyModel(result[i]));
db.close();
return res.json(propArray);
});
});
}
);
});
Now I've noted that there's a .each function rather than .toArray which I would prefer to use as I could cut out the .count function but obviously you can only return a response once. I wondered if you guys could enlighten me with some of your mongo knowledge.
properties.find(searchParams).each(function (err, result) {
return res.json(result);
});
Something like that, cutting out 6 lines of code and an extra call to the database.
The count() can still be cut out with toArray():
properties.find(searchParams).toArray(function (err, result) {
var i, count;
for (i = 0, count = result.length; i < count; i++) {
propArray.push(new models.propertyModel(result[i]));
}
db.close();
return res.json(propArray);
});
Related
I am in the process of learning Node, Express and Mongoose and creating a web application. Sometimes, in one page, I need to display data from two or more of my collections. Although it works just fine, right now I use a bunch of nested if statements and have realized that the code has become very messy looking.
Example:
app.get("/jobs/:id/edit", function(req, res){
Job.findById(req.params.id, function(err, foundJob){
if (err){
console.log(err)
} else {
User.find({}, function(err, users){
if(err){
console.log(err);
} else {
Client.find({}, function(err, clients){
if(err) {
console.log(err);
} else {
let start_date = foundJob.start_date;
let end_date = foundJob.end_date;
start_date = moment(start_date).format("MM-DD-YYYY");
end_date = moment(end_date).format("MM-DD-YYYY");
// Redirect
res.render("edit_job", {job: foundJob, users: users, clients: clients, start_date, end_date});
}
});
}
});
}
});
});
This example is for a page that displays information from just three collections. Is there a better way to write this kind of code? I feel like using a table of collection names and using a for loop might work, but I am unsure how I would write that.
As an update, I tried the following logic, but it did not work:
app.get("/", function(req, res){
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
collections[i].find({}, function(err, foundCollection){
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
No matter what I do, endCollections[i] remains undefined even though I have it set to be foundCollection, which is not undefined.
Thanks.
in the for-loop, you're executing an asynchronous block of code (collection.find()), so javaScript will not wait till this asynchronous code executed then do the next block of code which is the render, that's why you got an empty array
you need to use async/await to force javaScript to wait until the asynchronous block of code executed, then do the rest
just add async to the main function to be able to use await inside this function
something like this
app.get("/", async function(req, res){ // <== note the async keyword here
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
await collections[i].find({}, function(err, foundCollection){ // <== note the await keyword here
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
hope it helps
I'm new to node.js and currently working on a project using keystonejs cms and MongoDB. Now I'm stuck in getting data related to multiple collections. Because of this callback functions, I couldn't return an array with relational data. My code something similar to this sample code.
var getAgenda = function(id, callback){
callback = callback || function(){};
if(id){
AgendaDay.model.find({summit:id}).exec(function (err, results3) {
var arr_agenda = [];
var arr_agenda_item = [];
for(var key3 in results3){
AgendaItem.model.find({agendaDay:results3[key3]._id}).exec(function (err, results2){
for(var key2 in results2){
arr_agenda_item.push(
{
item_id: results2[key2]._id,
item_name: results2[key2].name,
from_time: results2[key2].time_from,
to_time: results2[key2].time_to,
desc: results2[key2].description,
fatured: results2[key2].featured,
}
);
}
arr_agenda.push(
{
name: results3[key3].name,
date: results3[key3].date,
description: results3[key3].description,
item_list:arr_agenda_item
}
);
return callback(arr_agenda);
});
}
});
}
}
exports.list = function (req, res) {
var mainarray = [];
Summit.model.find().exec(function (err, resultssummit) {
if (err) return res.json({ err: err });
if (!resultssummit) return res.json('not found');
Guest.model.find().exec(function (err, resultsguset) {
for(var key in resultssummit){
var agen_arr = [];
for(var i=0; i<resultssummit[key].guests.length; i++){
var sumid = resultssummit[key]._id;
//this is the function im trying get data and assign to mainarray
getAgenda(sumid, function(arr_agenda){
agen_arr = arr_agenda;
});
mainarray.push(
{
id: resultssummit[key]._id,
name: resultssummit[key].name,
agenda_data: agen_arr,
}
);
}
res.json({
summit: mainarray,
});
}
});
}
}
If anyone can help me out, that would be really great :)
You need to restructure this whole thing. You should not be calling mongo queries in a for loop and expecting their output at the end of the loop. Also, your response is in a for loop. That won't work.
I'll tell you how to do it. I cannot refactor all of that code for you.
Instead of putting mongodb queries in a for loop, you need to convert it in a single query. Just put the _ids in a single array and fire a single query.
AgendaItem.model.find({agendaDay:{$in:ARRAY_OF_IDS}})
You need to do the same thing for AgendaDay.model.find({summit:id}) as well.
I'm following this link for finding data in mongoDB using node.js
My code is:
var counter = 0;
var findMongo = function(db, callback) {
var cursor =db.collection('new').find( { "_id": ObjectId("56da6fd166efee0350399c21") } );
//var cursor =db.collection('new').find();
cursor.each(function(err, doc) {
counter = counter + 1;
console.log(counter);
assert.equal(err, null);
if (doc != null) {
//console.dir(doc);
//console.log(doc);
} else {
console.log("in else,not found");
callback();
}
});
};
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
findMongo(db, function() {
db.close();
});
});
Since I'm searching the DB with _id, findMongo should only run once.
I'm getting following result:
counter 1
counter 2
in else,not found
Why is the findMongo function called twice?
Two things to be noticed:
1 - You are using counter = counter + 1; twice, its just creating confusion.
2 - You should use findOne instead find, it makes sense and is good approach because you are interested in finding one-record only whereas there is no harm in using later one.
Here is how to use db.collection.findOne()
I'm somewhat new to node, and so far I love it, but maybe I'm running into a case of it being a hammer when I need a wrench.
I've got a bunch of data in text files, and I need to load these files into a database - very simple in a normal imperative language. I want to write idiomatic node for this, so would rather use async fs calls (fs.readdir and fs.readFile rather than fs.readdirSync and fs.readFileSync). However, how do I know when all of those operations are done, so then (and only then) it's safe to close the DB connection?
In short (in pseudo-code):
MongoClient.connect(url, function(err, db) {
if (err) throw err;
fs.readDir(path, function(err, files) {
for file in files {
if (interesting(file)) {
fs.readFile(file, function(err, data) {
doc = turnDataIntoDocument(data);
db.collection('foo').insert(doc);
});
}
}
});
// This is the part that won't work right:
db.close()
});
Obviously, the db.close() could happen at any time, probably before all the files are processed, or usually before the directory is even fully-read.
I know there are libraries for dealing with control flow, but I feel like I should understand how to do this at a more fundamental level rather than depending on a library for something so simple - don't close the connection until I'm done with it.
A simple counter should work in this case:
function doInserts(cb) {
MongoClient.connect(url, function(err, db) {
if (err) throw err;
fs.readDir(path, function(err, files) {
var count = files.length;
if (count === 0) {
db.close();
return cb();
}
for (var i = 0; i < files.length; ++i) {
var file = files[i];
if (interesting(file)) {
fs.readFile(file, function(err, data) {
doc = turnDataIntoDocument(data);
db.collection('foo').insert(doc);
if (--count === 0) {
db.close();
cb();
}
});
} else if (--count === 0) {
db.close();
cb();
}
}
});
});
}
Maybe the results ain't weird, but I started using Node 1-2 months ago so for me they are...
I have a loop which sorts out every other value of the array returned by hgetall (Redis command) and in that loop I call a function to get all values from another table with keys stored in the sorted array. This was more difficult to explain than I thought. Here's my code:
Pastebin: http://pastebin.com/tAVhSUV1 (or see below)
function getInfo (cn, callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
getInfo(vars[i], function (hej) {
rArr.push(hej);
});
}
}
});
The callback from the call to getInfo() is executed after the entire loop. Am I missing out on something here? Because it can't do that, right? (when I use rArr (right after the loop) it's empty, nbBut if I log it in the callback it gets logged after everything else written after the loop)
Yes, that's probably normal.
Understand that callbacks are executed after the hgetall call. Which mean that when the redis functions receive somehting it will call the callbacks. In other words, all the callbacks can be executed later.
As javascript only works in one thread, the calls to hgetall should be blocking to be executed as they come in the for loop. But as you're more certainly using async IO. The for loop ends and then it will start calling each callbacks that were queued inside the javascript event loop.
Edit
Unfortunately, to achieve what you're trying to do, you should wrap your code inside many other callbacks. You can use this project to make it easier: https://github.com/caolan/async
You should be able to install it using npm install async.
You'd have to do something like that:
function getInfo (cn) {
return function(callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
};
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [],
callbacks = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
callbacks.push(getInfo(vars[i]));
}
}
async.series(callbacks, function (err, results) {
// Final code here
});
});