MongoDB find function running twice in node.js - node.js

I'm following this link for finding data in mongoDB using node.js
My code is:
var counter = 0;
var findMongo = function(db, callback) {
var cursor =db.collection('new').find( { "_id": ObjectId("56da6fd166efee0350399c21") } );
//var cursor =db.collection('new').find();
cursor.each(function(err, doc) {
counter = counter + 1;
console.log(counter);
assert.equal(err, null);
if (doc != null) {
//console.dir(doc);
//console.log(doc);
} else {
console.log("in else,not found");
callback();
}
});
};
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
findMongo(db, function() {
db.close();
});
});
Since I'm searching the DB with _id, findMongo should only run once.
I'm getting following result:
counter 1
counter 2
in else,not found
Why is the findMongo function called twice?

Two things to be noticed:
1 - You are using counter = counter + 1; twice, its just creating confusion.
2 - You should use findOne instead find, it makes sense and is good approach because you are interested in finding one-record only whereas there is no harm in using later one.
Here is how to use db.collection.findOne()

Related

How to efficiently close the db connection (mongodb and node)

I have a small program that reads each of the record and update each of the record. Given the async nature of node and callback. what is the efficient and the correct way to close the db connection?
Sample Program:
var MongoClient = require('mongodb').MongoClient;
var updateCount = 0;
MongoClient.connect('mongodb://localhost:27017/school', function(err, db) {
if(err) throw err;
var query = { };
// get all the students in the database
var cursor = db.collection('students').find(query);
cursor.each(function(err, doc) {
if(err) throw err;
if(doc == null) {
return;
}
// filter out only the homework scores
var homeworksOnly = doc.scores.filter(function(scores){
if (scores.type === "homework") return true;
return false;
})
// filter out the non homework scores
var notHomeWorks = doc.scores.filter(function(scores){
if (scores.type !== "homework") return true;
return false;
})
// sort the homework score to remove the min score from the list.
homeworksOnly.sort(function(a,b){
if (a.score > b.score) return 1;
if (b.score > a.score) return -1;
return 0;
});
console.log("Before removing the min score"+doc._id);
console.dir(homeworksOnly);
console.log("After removing the min score"+doc._id);
homeworksOnly.splice(0,1);
console.dir(homeworksOnly);
console.log("Merge the homework with other scores"+doc._id);
var newScores = homeworksOnly.concat(notHomeWorks);
console.dir(newScores);
console.log("*****");
// Now update the database for this student with the new scores
var search = {"_id":doc._id};
var operator = { '$set' : { 'scores' : newScores } };
db.collection('students').update(search, operator, function(err, updated) {
if(err) throw err;
updateCount++;
console.dir("Successfully updated " + updated + " document! count: "+updateCount);
});
});
});
Now the program works but I need to hit the Ctrl+C to terminate the program. Is there a way to know that all the callbacks have completed so that the program can be terminated?
There are better libaries you can integrate with nodejs to handle the callback flow better, but simply working with the basic driver as a dependency, all you need is the basic node stream interface which is already built in to the cursor.
This allows .pause() and .resume()for flow control when processing, and an "end" event when the cursor stream is complete:
var MongoClient = require('mongodb').MongoClient;
var updateCount = 0;
MongoClient.connect('mongodb://localhost:27017/school', function(err, db) {
if(err) throw err;
var query = { };
// get all the students in the database
var cursor = db.collection('students').find(query);
// called on errors
cursor.on("error",function(err) {
throw err;
});
// called on stream complete
cursor.on("end",function() {
db.close();
});
// process each document in the stream
cursor.on("data",function(data) {
cursor.pause(); // stops the cursor stream while processing
// filter out only the homework scores
var homeworksOnly = doc.scores.filter(function(scores){
if (scores.type === "homework") return true;
return false;
})
// filter out the non homework scores
var notHomeWorks = doc.scores.filter(function(scores){
if (scores.type !== "homework") return true;
return false;
})
// sort the homework score to remove the min score from the list.
homeworksOnly.sort(function(a,b){
if (a.score > b.score) return 1;
if (b.score > a.score) return -1;
return 0;
});
console.log("Before removing the min score"+doc._id);
console.dir(homeworksOnly);
console.log("After removing the min score"+doc._id);
homeworksOnly.splice(0,1);
console.dir(homeworksOnly);
console.log("Merge the homework with other scores"+doc._id);
var newScores = homeworksOnly.concat(notHomeWorks);
console.dir(newScores);
console.log("*****");
// Now update the database for this student with the new scores
var search = {"_id":doc._id};
var operator = { '$set' : { 'scores' : newScores } };
db.collection('students').update(search, operator, function(err, updated) {
if(err) throw err;
updateCount++;
console.dir("Successfully updated " + updated + " document! count: "+updateCount);
cursor.resume(); // restarts the stream processing now we are done
});
});
});
After the update statement is done use
db.collection('students').update(search, operator, function(err, updated) {
if(err) throw err;
updateCount++;
console.dir("Successfully updated " + updated + " document! count: "+updateCount);
});
db.close();

Synchronous for loop in node js

So let's say I have the following for loop
for(var i = 0; i < array.length; i++){
Model.findOne({ _id = array[i].id}, function(err, found){
//Some stuff
});
}
How do I make this code work? Every time I run it I get array[i] = undefinedbecause the mongo-db query is asynchronous and the loop has already iterated 5 times by the time the first query is even completed. How do I go about tackling this issue and waiting for the query to complete before going on to the next iteration?
This doesn't specifically answer your question, but addresses your problem.
I'd use an $in query and do the filtering all at once. 20 calls to the db is pretty slow compared to 1:
// grab your ids
var arrayIds = myArray.map(function(item) {
return item._id;
});
// find all of them
Model.find({_id: {$in: arrayIds}}, function(error, foundItems) {
if (error) {
// error handle
}
// set up a map of the found ids
var foundItemsMap = {};
foundItems.forEach(function(item) {
foundItemsMap[item._id] = true;
});
// pull out your items that haven't been created yet
var newItems = [];
for (var i = 0; i < myArray.length; i++) {
var arrayItem = myArray[i];
if ( foundItemsMap[arrayItem._id] ) {
// this array item exists in the map of foundIds
// so the item already exists in the database
}
else {
// it doesn't exist, push it into the new array
newItems.push(arrayItem);
}
}
// now you have `newItems`, an array of objects that aren't in the database
});
One of the easiest ways to accomplish something like you want is using promises. You could use the library q to do this:
var Q = require('q');
function fetchOne(id) {
var deferred = Q.defer();
Model.findOne({ _id = id}, function(err, found){
if(err) deferred.reject(err);
else deferred.resolve(found);
});
return deferred.promise;
}
function fetch(ids, action) {
if(ids.length === 0) return;
var id = ids.pop();
fetchOne(id).then(function(model) {
action(model);
fetch(ids, action);
});
}
fetch([1,2,3,4,5], function(model) { /* do something */ });
It is not the most beautiful implementation, but I'm sure you get the picture :)
Not sure if this is the right way, it could be a bit expensive but this how i did it.
I think the trick is to pull all your data and then looking for an id match.
Model.find(function(err, data) {
if (err) //handle it
for (var i=0; i<array.length; i++) {
for (var j=0; ij<data.length; j++) {
if(data[j].id == array[i].id) {
// do something
}
}
}
}

Searching then adding record in Redis using Node.js

I may be over tired but for the life of me I cannot understand why the following is not working. I am trying to search if a string exists and if it does not, add it to a redis database
options = options || {};
var counter = 1,
client = redis.getClient();
options.name = options.name || '';
if (_.isEmpty(options.name)) {
return callback('Cannot add name. No name supplied');
} else {
options.name = options.name.trim();
}
client.get('mySavedKeys' + options.name, function (err, data) {
if (err) {return callback(err); }
if (!_.isNull(data)) {
console.log('Name found', options.name);
return callback(null, data);
} else {
counter += 1;
console.log('Name not found', options.name);
console.log('ID', counter)
client2.set('mySavedKeys' + options.name, counter, function (err) {
if (err) {return callback(err); }
console.log('Added', options.name);
return callback(null, counter);
});
}
});
If I run an array of names to add using async.each then it seems to run all the 'get' functions and then run the 'set' function so I am getting duplicate insertions.
I'm sure the answer is obvious but I cannot see the problem.
If you use async.eachSeries you would insure that the get/set happen atomically rather than all gets running in parallel.

mongodb nodejs each vs toArray

I've had a quick look around and not found anything that's satisfied me with an answer but basically I've started to use node.js with express and mongodb to create a webapi rather than the usual .Net MVC Web API route.
One thing I've noted though is in order to return a collection of results I'm doing it in a rather bulky way, or that's how it feels at least.
app.get('/property', function (req, res) {
var propArray = [];
MongoClient.connect(settings.connection,
function (err, db) {
if (err) throw err;
var properties = db.collection("PROPERTIES");
var searchParams = {
Active: true,
Deleted: false
}
properties.count(searchParams, function (err, count) {
properties.find(searchParams).toArray(function (err, result) {
for (i = 0; i < count; i++)
propArray.push(new models.propertyModel(result[i]));
db.close();
return res.json(propArray);
});
});
}
);
});
Now I've noted that there's a .each function rather than .toArray which I would prefer to use as I could cut out the .count function but obviously you can only return a response once. I wondered if you guys could enlighten me with some of your mongo knowledge.
properties.find(searchParams).each(function (err, result) {
return res.json(result);
});
Something like that, cutting out 6 lines of code and an extra call to the database.
The count() can still be cut out with toArray():
properties.find(searchParams).toArray(function (err, result) {
var i, count;
for (i = 0, count = result.length; i < count; i++) {
propArray.push(new models.propertyModel(result[i]));
}
db.close();
return res.json(propArray);
});

How to drop all collections via mongodb and node.js?

I am new to node.js and mongodb and I have the following problem:
I need to drop all collections from my mongodb from node.js file. I have such a function:
service.dropCollections = function(db, colls){
for(var i = 0; i < colls.length; i++){
var name = colls[i].name;
db.dropCollection(name, function(err) {
if(!err) {
console.log( name + " dropped");
} else {
console.log("!ERROR! " + err.errmsg);
}
});
}
}
And I am using it in the following function:
service.clearDB = function() {
var MongoClient = require('mongodb').MongoClient
, format = require('util').format;
MongoClient.connect('mongodb://127.0.0.1:27017/shiny_d', function(err, db){
if(err) throw err;
db.collectionNames(function(err, collections){
if(!err){
service.dropCollections(db, collections);
} else {
console.log("!ERROR! "+ err.errmsg);
}
service.showCollections();
});
});
}
As an output I have
!ERROR! ns not found
shiny_db.physicalinfos
Dunno what to do right now. I'll be very thankful for your help.
Ain't it more faster, easier and less error prone if you just drop the entire database?
db.dropDatabase();
At least from the Mongo CLI, whenever you access an inexistent DB, it'll be persisted the soon you create data. That's the same as dropping all collections from it.
I haven't tried MongoDB for anything except studying yet, so I don't know much about permissions. So, probably the only problem of dropping the entire DB would be the permissions of your users that would be lost (I believe).
If this script you're trying to create is not for production, then you're good to go with dropping the DB.
I found an answer. First of all I've made mistake in my connection it should be like following: 'mongodb://127.0.0.1:27017/shiny_db'. The second mistake was in the name of collection. It was like 'db_name.coll_name', that's why db.dropCollection(name, callback) couldn't find particular collection and because of it I had mistake ns not found. So I've used following mechanism to separate db_name from coll_name:
var name = colls[i].name.substring('shiny_db.'.length); and I've added checking for "system" collection.
Final code looks like following:
service.clearDB = function() {
var MongoClient = require('mongodb').MongoClient
, format = require('util').format;
MongoClient.connect('mongodb://localhost/shiny_db', function(err, db) {
if(err) throw err;
db.collectionNames(function(err, collections){
if(!err){
service.dropCollections(db, collections);
} else {
console.log("!ERROR! "+ err.errmsg);
}
});
});
}
service.dropCollections = function(db, colls){
for(var i = 0; i < colls.length; i++){
var name = colls[i].name.substring('shiny_db.'.length);
if (name.substring(0, 6) !== "system") {
db.dropCollection(name, function(err) {
if(!err) {
console.log( name + " dropped");
} else {
console.log("!ERROR! " + err.errmsg);
}
});
} else {
console.log(name + " cannot be dropped because it's a system file");
}
}
}
Hope it will help someone!
listCollections gives you an array of collection names as strings.
It looks like you may be confusing it with something that returns an array of collection objects like maybe db.collections()

Resources