Error When Retrieving Multiple Collections in Mongo/Express/Node - node.js

I am building a page which needs to retrieve and list data from several different Mongodb collections. I've been doing separate queries in the Express router function then bunging the results into an array which is passed to the page where the relevant data for each section are accessed. This has worked ok so far with up to three queries, but, if I add a fourth query I get an error.
The router function looks like this:
router.get('/thetest',function(req,res){
var finalResult = {};
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/myapp';
MongoClient.connect(url, function(err, db){
if(err){
console.log("Connection Error",err);
}else{
var collection = db.collection('cats');
collection.find({}).toArray(function(err,result){
if(err){
console.log("Error retrieving cats");
}else if (result.length){
finalResult.ctlist = result;
console.log("cats OK");
}
});
var collection = db.collection('mice');
collection.find({}).toArray(function(err,result){
if(err){
console.log("Error retrieving mice");
}else if (result.length){
finalResult.mclist = result;
console.log("mices OK");
}
});
var collection = db.collection('cheese');
collection.find({}).toArray(function(err,result){
if(err){
console.log("Error retrieving cheese");
}else if (result.length){
finalResult.chlist = result;
console.log("Cheese OK");
}else{
console.log('No Documents');
}
});
var collection = db.collection('mice');
collection.find({}).sort({tailLength:1}).limit(3).toArray(function(err,result){
if(err){
console.log("Error retrieving alert bookings");
}else if (result.length){
finalResult.mtlist = result;
console.log("Mouse tail length ok");
res.render('thetest',{
"thelist":finalResult
});
}
db.close();
});
}
});
});
(using dummy collection names)
So, there are four queries made to three different collections. The results are listed in a Jade template using an each loop.
If I comment out any one of the four queries and just run the other three it works fine: the results are listed on the page. If I uncomment and run all four queries then Node will choke and, obviously, nothing is listed on the page. None of the collections are above about half a dozen documents in size with a handful of fields in each.
I'm pretty new to all this and I understand that this may not be the best way to do what I am attempting.
Can anybody a) explain where I'm going wrong and/or b) suggest a better approach?
Thanks

I think the problem is due to your code.
Node.js is an asynchronous programming language, so all the operations works parallely.
In your code 'db.close();' close the database connection. For eg. if all the db operation works parallely and the 4th one (in your code) execute firstly then it close the database connection. So it is not a good programming style.
So you can use 'async' library for solving this.
https://github.com/caolan/async
Sample code :
async.parallel([
function(callback){
dbOperation1(query, callback);
},
function(callback){
dbOperation2(query2, callback);
},
function(callback){
dbOperation3(query3, callback);
},
function(callback){
dbOperation4(query4, callback);
}
],
function(err, results){
if (err) {
renderError(err);
} else {
renderHTML(results[0], results[1], results[2], results[4]);
}
});

If you comment all four condition then I am pretty sure that you are commenting res.render('thetest',{
"thelist":finalResult
});
Hence code is chocked. Because express API always wait for response and if you not provide response sentence, then for some time is going to wait and then crash out.
Solution : You must write your response statement out of condition.

Related

What is the best way to query mongodb with mongoose with an array of IDs?

I query one collection (messages) with mongoose. The result is an array of documents. Each document contains an ID for a different collection (users). Now I want to query the users collection for each ID from the messages collection.
The idea is to update each message object with the information from the user collection before returning it to the front end.
I tried using async.each. For some reason the final function is never called even though I am making sure the callback() function is called after each iteration.
app.get('/getmsg', function(req, res){
messages.find({query})
.exec(function(err, ms){
if(err) throw err;
async.each(ms, function(m, callback){
users.findOne({_id : m.userId})
.lean()
.exec(function(err, user){
if(err) {
console.log('error' , err);
callback();
} else {
m.userName = user.name;
// everything is working up to here
callback();
}
}), function(err){
res.send(ms); // this is never returned!
}
});
});
});
Is there a better way of achieving this? I assume this must be a common issue.
Thanks!
You can't use res.send. Instead create a function to get notified about it. Something like this.
// 1st para in async.each() is the array of items
async.each(items,
// 2nd param is the function that each item is passed to
function(item, callback){
// Call an asynchronous function, often a save() to DB
item.someAsyncCall(function (){
// Async call is done, alert via callback
callback();
});
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

Node.js callback unexpected behaviour with Mongoose

I am a nodejs beginner and I ran into some callback behaviour that I don't understand. I am using an Express router to write Mongoose objects to Mongodb using a POST request. In the body of the request I am passing in a nested json structure with two fields - jobDetails and examples. The data in jobDetails is used to create a Job Mongoose object and the data in examples is used to create several Example Mongoose objects. The Job and Example objects are linked in that a Job contains a list of Example objects in one of it's fields.
The way I tried to implement this was with callbacks in the following way. Basically I save the Job object to mongo first, then iterate over the examples - each time creating an Example object and linking it to the job via the .job field and also saving the Example object to mongo. Then in the callback to the Example object save function I updated the Job object with the new Example object and saved the updated version to mongo.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
job.examples.push(eg);
job.save(function(err, job){
console.log(err);
});
console.log(err);
});
}
});
This did not perform as I would expect. Specifically, double the number of examples were actually saved to mongo with several duplicates and some missing. I understand that callbacks are asynchronous but to me that still doesn't explain why double the number of examples would be saved and some would be duplicated and some would be missing.
I eventually got it to work correctly without using callbacks at all in the following way.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
console.log(err);
});
job.examples.push(eg);
job.save(function(err,job){
console.log(err);
});
}
});
And I'm not sure if this is the optimal solution either. But I would like to know why my original approach resulted in the unintended behaviour.
This should work..
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function(err, result) {
if (!err) {
//save examples
for (i = 0; i < examples.length; i++) {
var eg = new Example({
content: examples[i]
});
eg.job = job;
eg.save(function(err, eg) {
job.examples.push(eg);
job.save(function(err, job) {
if (!err)
job.examples = [];
});
console.log(err);
});
}
}
});
});
I would suggest you use a library like async to perform such save operations in a step by step manner. Follow this approach for better readability of code and better results
var async = require('async');
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
var savedExamples = [];
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
async.eachSeries(examples, function iteratee(example, callback) {
var eg = new Example({
content: example
});
eg.job = job;
eg.save(function(err, savedEg) {
if(!err) {
savedExamples.push(savedEg);
}
callback(err)
});
}, function(err) {
if(err) {
//handle errors
}
job.examples = savedExamples;
job.save(function(err,job) {
if(err) {
//handle errors
}
//success callback
});
});
});
Using this approach you will have to call the save function for job only once after all other operations are completed. If an error is triggered at any point the whole flow is stopped. For more info regarding async library refer this!

save updated models to mongodb

I have following code to fetch some data from the db (mongo).
function getAllUsers(){
var UsersPromise = Q.defer();
UserSchema.find({}, function(err, data){
if(err){
UsersPromise .reject(err);
}else {
UsersPromise .resolve(data);
}
});
return UsersPromise .promise;
}
Then I modify each of these models. I add certain fields to the model depending on the type of user. (This is working correctly).
function buildUsers(users){
// my code iterates over users and adds
// properties as required.
// Working fine.
return users; // updated users.
}
Now I want to save these updated models back to mongo and this is where it's making me pull my hair.
function saveUsers(users){
// here, the users are received correctly. But the following line to save the users fails.
var SaveUsersPromise = Q.defer();
UserSchema.save(users, function(err, data){
if(err){
SaveUsersPromise .reject(err);
} else {
SaveUsersPromise .resolve(data);
}
});
return SaveUsersPromise .promise;
}
Lastly I call these functions like:
DB.connect()
.then(getAllUsers)
.then(buildUsers)
.then(saveUsers)
.catch(errorHandler);
Everything works correctly untill I call UserSchema.save. What could be the problem?
PS: I am using mongoose.
TIA.
UserSchema.save accepts single instance, you have to loop through users and save each. Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Here's simple implementation using async.eachSeries
function saveUsers(users){
var async = require('async'); // <== npm install async --save
var SaveUsersPromise = Q.defer();
async.eachSeries(users, function(user, done){
UserSchema.save(user, done);
// or
user.save(done); // if user is Mongoose-document object
}, function(err){
if(err){
SaveUsersPromise.reject(err);
} else {
SaveUsersPromise.resolve();
}
});
return SaveUsersPromise.promise;
}

node js mongo find last inserted

I am having a hard time trying to find the last inserted element into mongo. I am using an example code I found and trying to make the query and display the item but I am getting an error. I understand I am suppose to do something like this.
db.collectionName.findOne({}, {sort:{$natural:-1}})
But this is what I have so far and it's not working.
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) {
if(err) { return console.dir(err); }
var collection = db.collection('test');
var doc1 = {'hello':'doc1'};
var doc2 = {'hello':'doc2'};
var lotsOfDocs = [{'hello':'doc3'}, {'hello':'doc4'}];
collection.insert(doc1);
collection.insert(doc2, {w:1}, function(err, result) {});
collection.insert(lotsOfDocs, {w:1}, function(err, result) {});
collection.find({}).toArray(function(err, docs) {
console.log(docs[0]);
});
db.close();
});
This is the error.
nodejs/node_modules/mongodb/lib/mongodb/connection/base.js:246
throw message;
^
TypeError: Cannot read property '0' of null
I checked to make sure the database is not empty so I am not sure why it's returning null.
I've found a possible solution(here) to your problem. It might be due to the fact that the database connection closes before the operations that you have issued finish.
You can fix it by including the db.close() call inside the find query.
collection.find({}).toArray(function(err, docs) {
console.log(docs[0]);
db.close();
});

mongodb native how to work with query

I'm trying to retrieve data from my mongolab DB using Node-mongodb-native
var findAll = function () {
var ddocs;
collection.find({}).each(function (arr, docs) {
ddocs = docs;
});
console.log(ddocs);
};
But it seems that when I log ddocs, it will give me undefined, but if I log docs it will show me the data.
Please help
How should I use this function ?
Thanks
Tzelon Machluf
You're basically trying to create a function that will return all of the documents in a collection? If so, the below should do work. However, I agree with #hgoebl and that you should work on your understanding of node as this is not likely the best way to accomplish what you're trying to do.
var ddocs;
var findAll = collection.find().toArray( function(err, docs) {
if(err)
throw err;
console.log('Collection returned');
return ddocs = docs;
});
setTimeout( function(err) {
if(err) throw err;
console.log(ddocs);
db.close();
}, 1000);
One thing in particular to note: collection.find is asynchronous, so the problem in your code (why it says ddocs is undefined) is that you're logging ddocs before the collection.find ever finished; thus, it's initialized, but no assigned any values. The Node convention is to nest callbacks so that everything happens in the right order and when it's ready.

Resources