MongoDB expire event on node js - node.js

I want to catch mongodb collection expire event on node.js using mongoose model.
For example if I have this collection
var mySchema = mongoose.Schema({
name: String,
time: {
type: Date,
default: Date.now,
expires: 60*60,
}
})
When document created it will be deleted after 1 hour. I want to call node js function when this document is deleted. For example I have this function
function log(name) {
console.log('document with name ' + name + ' was successfully removed');
}
And I want to call this function for each document when they are being removed.
Thank you.

As I mentioned in a comment, MongoDB doesn't have any notify features built-in. You could implement the same method Meteor uses by tailing MongoDBs oplog, but that's going to take a lot of work.
The only fairly simple and reasonable solution is to do this on the application-level and not database-level by using something like node-schedule (npm install node-schedule --save).
Here's an example:
var schedule = require('node-schedule');
var Model = mongoose.model('Model', mySchema);
Model.create({ name: 'Michael' }, function(err, doc) {
if (err) return console.error(err); // Handle the error
var now = new Date();
var when = new Date(now).setHours(now.getHours() + 1);
schedule.scheduleJob(when, function() {
// This callback will fire in one hour
});
});
The issue with this is that the callback will fire even if MongoDB has crashed or similar and couldn't remove the document. Another issue is that if your app crashes, your schedule callback is lost.

Related

Mongoose find() takes a few minutes before it returns latest documents

If I add a document to the DB (either via my app, or via the DB admin tool RockMongo), I immediately see the documents being added to the DB (as shown by RockMongo), no problem. However, when I call a simple model.find() on the corresponding Mongoose model, the latest additions to the DB are not returned. They eventually show up after a couple of minutes.
It looks like the documents are being read from some kind of cache/buffer which isn't kept updated. Is there anything like that in Mongoose which I'm overlooking?
My backend is something like this:
var mongoose = require('mongoose');
require('./models/Locations');
mongoose.Promise = global.Promise;
mongoose.connect(mongoUrl,{ config: { autoIndex: false } });
var Locations = mongoose.model('Locations');
[...]
app.get('/locations', function(req, res, next)
{
Locations.find(function(err, results)
{
if(err){return res.status(500).json(err);}
res.json(results);
});
});
The model:
var mongoose = require('mongoose');
var LocationsSchema = new mongoose.Schema({
name: String
});
mongoose.model('Locations', LocationsSchema);
If I add an item to Locations manually in the DB, and point my browser to /locations, I don't see the new item until a few minutes later...
Change this:
Locations.find(function(err, results)
{
if(err){return res.status(500).json(err);}
res.json(results);
});
To this:
Locations.find({},function(err, results)
{
if(err){return res.status(500).json(err);}
res.json(results);
});
It's been a long time, but I noticed I never answered this question, so I thought I might in case someone else runs into this. It is very specific to my setup though. The issue was that Gandi uses a caching engine (Varnish) with relatively aggressive settings by default, leading to the slow update I was seeing in my webapp.
To solve this, one can explicitly set a header to deactivate the caching, for instance:
res.setHeader('Cache-Control', 'max-age=0');

Node.js + mongoose find freezes node when more than 100 results

I have a simple mongoose model on which I call find with limit max 100 it calls the done callback:
this.find({}).limit(100).exec(done);
The callback is never called If I modify this line into (or any higher number)
this.find({}).limit(101).exec(done);
There is no error anywhere, the database keeps working, but this node app freezes and must be restarted.
If I ssh into the server to connect to the same database and connect to mongo shell, on the same collection find({}) returns all ~700 collections in less than a sec.
When I cloned the same database to my local PC and run the app to connect to local database it worked, but the app freezes on the server if its connect to the database on the same server.
Any idea how to debug this one?
Edit1: Added model file:
Model file:
'use strict';
let mongoose = require('mongoose');
let Schema = mongoose.Schema;
let foodSchema = new Schema(
{
name: Object,
type: String,
description: Object,
price: Number,
priceBig: Number,
active: Boolean
},
{
collection: 'foods'
}
);
let model = mongoose.model('food', foodSchema);
model.getAllFoods = function (done) {
this.find({}, done);
};
model.getActiveFoods = function (done) {
this.find({active: true}, done);
};
model.getFoodById = function (id, done) {
this.findOne({_id: id}, done);
};
module.exports = model;
Usage:
foodModel.getAllFoods(function (err, docs) {
if (err) {
res.sendStatus(500);
return;
}
res.send(docs);
});
getActiveFoods works just fine (returns 96 docs)
After the tip from JohnnyK I updated Mongoose from 4.1.11 to 4.3.7 and that fixed the issue.

Incorrect Subdocument Being Updated?

I've got a Schema with an array of subdocuments, I need to update just one of them. I do a findOne with the ID of the subdocument then cut down the response to just that subdocument at position 0 in the returned array.
No matter what I do, I can only get the first subdocument in the parent document to update, even when it should be the 2nd, 3rd, etc. Only the first gets updated no matter what. As far as I can tell it should be working, but I'm not a MongoDB or Mongoose expert, so I'm obviously wrong somewhere.
var template = req.params.template;
var page = req.params.page;
console.log('Template ID: ' + template);
db.Template.findOne({'pages._id': page}, {'pages.$': 1}, function (err, tmpl) {
console.log('Matched Template ID: ' + tmpl._id);
var pagePath = tmpl.pages[0].body;
if(req.body.file) {
tmpl.pages[0].background = req.body.filename;
tmpl.save(function (err, updTmpl) {
console.log(updTmpl);
if (err) console.log(err);
});
// db.Template.findOne(tmpl._id, function (err, tpl) {
// console.log('Additional Matched ID: ' + tmpl._id);
// console.log(tpl);
// tpl.pages[tmpl.pages[0].number].background = req.body.filename;
// tpl.save(function (err, updTmpl){
// if (err) console.log(err);
// });
// });
}
In the console, all of the ID's match up properly, and even when I return the updTmpl, it's saying that it's updated the proper record, even though its actually updated the first subdocument and not the one it's saying it has.
The schema just in case:
var envelopeSchema = new Schema({
background: String,
body: String
});
var pageSchema = new Schema({
background: String,
number: Number,
body: String
});
var templateSchema = new Schema({
name: { type: String, required: true, unique: true },
envelope: [envelopeSchema],
pagecount: Number,
pages: [pageSchema]
});
templateSchema.plugin(timestamps);
module.exports = mongoose.model("Template", templateSchema);
First, if you need req.body.file to be set in order for the update to execute I would recommend checking that before you run the query.
Also, is that a typo and req.body.file is supposed to be req.body.filename? I will assume it is for the example.
Additionally, and I have not done serious testing on this, but I believe your call will be more efficient if you specify your Template._id:
var template_id = req.params.template,
page_id = req.params.page;
if(req.body.filename){
db.Template.update({_id: template_id, 'pages._id': page_id},
{ $set: {'pages.$.background': req.body.filename} },
function(err, res){
if(err){
// err
} else {
// success
}
});
} else {
// return error / missing data
}
Mongoose doesn't understand documents returned with the positional projection operator. It always updates an array of subdocuments positionally, not by id. You may be interested in looking at the actual queries that mongoose is building - use mongoose.set('debug', true).
You'll have to either get the entire array, or build your own MongoDB query and go around mongoose. I would suggest the former; if pulling the entire array is going to cause performance issues, you're probably better off making each of the subdocuments a top-level document - documents that grow without bounds become problematic (at the very least because Mongo has a hard document size limit).
I'm not familiar with mongoose but the Mongo update query might be:
db.Template.update( { "pages._id": page }, { $set: { "pages.$.body" : body } } )

Mongoose ODM use

I read an article in this link http://theholmesoffice.com/mongoose-and-node-js-tutorial/
here there is a code:
exports.teamlist = function(gname,callback){
db.once('open', function(){
var teamSchema = new mongoose.Schema({
country: String,
GroupName: String
});
var Team = db.model('Team', teamSchema);
Team.find({'GroupName':gname}, function (err, teams) {
if(err){
onErr(err,callback);
}else{
mongoose.connection.close();
console.log(teams);
callback("",teams);
}
})// end Team.find
});// end db.once open
};
Here it calls db.once method whereas in other places its used like this
var mongoose = require('mongoose')
,Schema = mongoose.Schema
,ObjectId = Schema.ObjectId;
var postSchema = new Schema({
thread: ObjectId,
date: {type: Date, default: Date.now},
author: {type: String, default: 'Anon'},
post: String
});
module.exports = mongoose.model('Post', postSchema);
In the router part its used like this
exports.show = (function(req, res) {
Thread.findOne({title: req.params.title}, function(error, thread) {
var posts = Post.find({thread: thread._id}, function(error, posts) {
res.send([{thread: thread, posts: posts}]);
});
})
});
And in the app.js there is
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/norum');
I dont understand why these two have different approach and which one is the better one and why? Can Anyone please help me. One thing that I have observed is that the second approach is the one most used. SO please help me as to which one is a better approach.
I know this is mainly concerned regarding creation of the schemes in Mongodb, and so the once method looks little better. But still I am not at all sure. Please help.
Basically, in the first approach - it mearly shows you what you can do with the mongoose. It presumes that somewhere you were opening a connection to the database (presumably a few string of code back). Then it listens to the event and does all the job later. It's only there to show you what you can do.
For intance, this creates a mongoose model (it doesnt rely on the database connection being open) - hence it could be included from a separate file with ease and generally be written as a module as it is in the second approach
var teamSchema = new mongoose.Schema({
country: String,
GroupName: String
});
var Team = db.model('Team', teamSchema);
Later on in the first approach you close the connection to the database, but thats not the thing you would want to do in the real application. You have a connection pool, and use it to frequently query the database, closing the connection is only needed when you plan to stop the application from running.
Team.find({'GroupName':gname}, function (err, teams) {
if(err){
onErr(err,callback);
}else{
mongoose.connection.close();
console.log(teams);
callback("",teams);
}
First line - you perform find on the database, if no err was returned - you close the connection, log the results, etc..
Anyway, to sum up - first approach is generally a show-off, second - is what its like in a real world - where you separate logic, make models reusable and includable, where you have router logic

How can I save multiple documents concurrently in Mongoose/Node.js?

At the moment I use save to add a single document. Suppose I have an array of documents that I wish to store as single objects. Is there a way of adding them all with a single function call and then getting a single callback when it is done? I could add all the documents individually but managing the callbacks to work out when everything is done would be problematic.
Mongoose does now support passing multiple document structures to Model.create. To quote their API example, it supports being passed either an array or a varargs list of objects with a callback at the end:
Candy.create({ type: 'jelly bean' }, { type: 'snickers' }, function (err, jellybean, snickers) {
if (err) // ...
});
Or
var array = [{ type: 'jelly bean' }, { type: 'snickers' }];
Candy.create(array, function (err, jellybean, snickers) {
if (err) // ...
});
Edit: As many have noted, this does not perform a true bulk insert - it simply hides the complexity of calling save multiple times yourself. There are answers and comments below explaining how to use the actual Mongo driver to achieve a bulk insert in the interest of performance.
Mongoose 4.4 added a method called insertMany
Shortcut for validating an array of documents and inserting them into
MongoDB if they're all valid. This function is faster than .create()
because it only sends one operation to the server, rather than one for each
document.
Quoting vkarpov15 from issue #723:
The tradeoffs are that insertMany() doesn't trigger pre-save hooks, but it should have better performance because it only makes 1 round-trip to the database rather than 1 for each document.
The method's signature is identical to create:
Model.insertMany([ ... ], (err, docs) => {
...
})
Or, with promises:
Model.insertMany([ ... ]).then((docs) => {
...
}).catch((err) => {
...
})
Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Since you know the number of documents you're saving, you could write something like this:
var total = docArray.length
, result = []
;
function saveAll(){
var doc = docArray.pop();
doc.save(function(err, saved){
if (err) throw err;//handle error
result.push(saved[0]);
if (--total) saveAll();
else // all saved here
})
}
saveAll();
This, of course, is a stop-gap solution and I would recommend using some kind of flow-control library (I use q and it's awesome).
Bulk inserts in Mongoose can be done with .insert() unless you need to access middleware.
Model.collection.insert(docs, options, callback)
https://github.com/christkv/node-mongodb-native/blob/master/lib/mongodb/collection.js#L71-91
Use async parallel and your code will look like this:
async.parallel([obj1.save, obj2.save, obj3.save], callback);
Since the convention is the same in Mongoose as in async (err, callback) you don't need to wrap them in your own callbacks, just add your save calls in an array and you will get a callback when all is finished.
If you use mapLimit you can control how many documents you want to save in parallel. In this example we save 10 documents in parallell until all items are successfully saved.
async.mapLimit(myArray, 10, function(document, next){
document.save(next);
}, done);
I know this is an old question, but it worries me that there are no properly correct answers here. Most answers just talk about iterating through all the documents and saving each of them individually, which is a BAD idea if you have more than a few documents, and the process gets repeated for even one in many requests.
MongoDB specifically has a batchInsert() call for inserting multiple documents, and this should be used from the native mongodb driver. Mongoose is built on this driver, and it doesn't have support for batch inserts. It probably makes sense as it is supposed to be a Object document modelling tool for MongoDB.
Solution: Mongoose comes with the native MongoDB driver. You can use that driver by requiring it require('mongoose/node_modules/mongodb') (not too sure about this, but you can always install the mongodb npm again if it doesn't work, but I think it should) and then do a proper batchInsert
Newer versions of MongoDB support bulk operations:
var col = db.collection('people');
var batch = col.initializeUnorderedBulkOp();
batch.insert({name: "John"});
batch.insert({name: "Jane"});
batch.insert({name: "Jason"});
batch.insert({name: "Joanne"});
batch.execute(function(err, result) {
if (err) console.error(err);
console.log('Inserted ' + result.nInserted + ' row(s).');
}
Use insertMany function to insert many documents. This sends only one operation to the server and Mongoose validates all the documents before hitting the mongo server. By default Mongoose inserts item in the order they exist in the array. If you are ok with not maintaining any order then set ordered:false.
Important - Error handling:
When ordered:true validation and error handling happens in a group means if one fails everything will fail.
When ordered:false validation and error handling happens individually and operation will be continued. Error will be reported back in an array of errors.
Here is another way without using additional libraries (no error checking included)
function saveAll( callback ){
var count = 0;
docs.forEach(function(doc){
doc.save(function(err){
count++;
if( count == docs.length ){
callback();
}
});
});
}
You can use the promise returned by mongoose save, Promise in mongoose does not have all, but you can add the feature with this module.
Create a module that enhance mongoose promise with all.
var Promise = require("mongoose").Promise;
Promise.all = function(promises) {
var mainPromise = new Promise();
if (promises.length == 0) {
mainPromise.resolve(null, promises);
}
var pending = 0;
promises.forEach(function(p, i) {
pending++;
p.then(function(val) {
promises[i] = val;
if (--pending === 0) {
mainPromise.resolve(null, promises);
}
}, function(err) {
mainPromise.reject(err);
});
});
return mainPromise;
}
module.exports = Promise;
Then use it with mongoose:
var Promise = require('./promise')
...
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
Promise.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
})
Add a file called mongoHelper.js
var MongoClient = require('mongodb').MongoClient;
MongoClient.saveAny = function(data, collection, callback)
{
if(data instanceof Array)
{
saveRecords(data,collection, callback);
}
else
{
saveRecord(data,collection, callback);
}
}
function saveRecord(data, collection, callback)
{
collection.save
(
data,
{w:1},
function(err, result)
{
if(err)
throw new Error(err);
callback(result);
}
);
}
function saveRecords(data, collection, callback)
{
save
(
data,
collection,
callback
);
}
function save(data, collection, callback)
{
collection.save
(
data.pop(),
{w:1},
function(err, result)
{
if(err)
{
throw new Error(err);
}
if(data.length > 0)
save(data, collection, callback);
else
callback(result);
}
);
}
module.exports = MongoClient;
Then in your code change you requires to
var MongoClient = require("./mongoHelper.js");
Then when it is time to save call (after you have connected and retrieved the collection)
MongoClient.saveAny(data, collection, function(){db.close();});
You can change the error handling to suit your needs, pass back the error in the callback etc.
This is an old question, but it came up first for me in google results when searching "mongoose insert array of documents".
There are two options model.create() [mongoose] and model.collection.insert() [mongodb] which you can use. View a more thorough discussion here of the pros/cons of each option:
Mongoose (mongodb) batch insert?
Here is an example of using MongoDB's Model.collection.insert() directly in Mongoose. Please note that if you don't have so many documents, say less than 100 documents, you don't need to use MongoDB's bulk operation (see this).
MongoDB also supports bulk insert through passing an array of
documents to the db.collection.insert() method.
var mongoose = require('mongoose');
var userSchema = mongoose.Schema({
email : { type: String, index: { unique: true } },
name : String
});
var User = mongoose.model('User', userSchema);
function saveUsers(users) {
User.collection.insert(users, function callback(error, insertedDocs) {
// Here I use KrisKowal's Q (https://github.com/kriskowal/q) to return a promise,
// so that the caller of this function can act upon its success or failure
if (!error)
return Q.resolve(insertedDocs);
else
return Q.reject({ error: error });
});
}
var users = [{email: 'foo#bar.com', name: 'foo'}, {email: 'baz#bar.com', name: 'baz'}];
saveUsers(users).then(function() {
// handle success case here
})
.fail(function(error) {
// handle error case here
});

Resources