Get total count along with Mongoose Query skip & limt - node.js

I have a json data which contains many objects. I want to limit the data for pagination and I need the total items count. Please help.
Model.find().skip((pageNumber-1)*limit).limit(limit).exec()
I want the count and skipped data in response.

You can use async library for running 2 queries at once. In your case you can run one query to get the number of documents and another for pagination.
Example with 'User' model:
var async = require('async');
var User = require('./models/user');
var countQuery = function(callback){
User.count({}, function(err, count){
if(err){ callback(err, null) }
else{
callback(null, count);
}
}
};
var retrieveQuery = function(callback){
User.find({}).skip((page-1)*PAGE_LIMIT)
.limit(PAGE_LIMIT)
.exec(function(err, doc){
if(err){ callback(err, null) }
else{
callback(null, doc);
}
}
};
async.parallel([countQuery, retrieveQuery], function(err, results){
//err contains the array of error of all the functions
//results contains an array of all the results
//results[0] will contain value of doc.length from countQuery function
//results[1] will contain doc of retrieveQuery function
//You can send the results as
res.json({users: results[1], pageLimit: PAGE_LIMIT, page: page, totalCount: results[0]});
});
async allows you to run a number of queries in parallel depending on the hardware you are using. This would be faster than using 2 independent queries to get count and get the required documents.
Hope this helps.

I have solved it with $facet and aggregate the following way in mongoose v3+:
const [{ paginatedResult, [{ totalCount }] }] = await Model.aggregate([{
$facet: {
paginatedResult: [
{ $match: query },
{ $skip: skip },
{ $limit: limit }
],
totalCount: [
{ $match: query },
{ $count: 'totalCount' }
]
}
}])
where the totalCount refers the total number of records matching the search query while the paginatedResult is only the paginated slice of them.

The problem with these solutions is that for every request you are doing two queries. This becomes problematic when you have a complex data structure and large data set as performance becomes an issue. Consider instead creating a special function that listens for the /resource?count=true or /resource/count GET methods and returns only the count.

You need to perform 2 queries to achieve that. One to get results and another to get total items amount with .count().
For example code you can watch at on of "paginator" for mongoose mongoose-paginate.

To performe only one query, you may use the find() method associated with promises and array slices. A small example would be:
getPaginated(query, skip, limit){
return this.model.find(query)
.lean()
.then((value)=>{
if (value.length === 0) return {userMessage: 'Document not found'};
const count = value.length;
//skip===0 must be handled
const start = parseInt(limit)*parseInt(skip - 1);
const end = start + parseInt(reqQuery.pagesize);
//slicing the array
value = value.slice(start,end);
//could return it another way...
value.push( { 'querySize': count });
return value;
})
.catch((reason)=>{
//...handling code
});
}

Related

Query with Mongoose multiple times without nesting

I'm trying to generate a document with node.js that needs to run multiple unrelated database queries from a mongo database.
Here is my current code:
Data.find({}, function(err, results) {
if (err) return next(err);
//finished getting data
res.render('page');
}
}
The problem is if I try to run another query, I seem to have to nest it within the first one so that it waits for the first one to finish before starting, and then I have to put res.render() within the innermost nested query (if I don't, res.render() will be called before the database is finished grabbing data, and it wont be rendered with the page).
What I have to do:
Data.find({}, function(err, results) {
if (err) return next(err);
//finished getting data
Data2.find({}, function(err, results2) {
if (err) return next(err);
//finished getting data 2
res.render('page');
}
}
}
}
I am going to have more than 2 queries, so if I keep nesting them it's going to get really messy really fast. Is there a cleaner way to do this, such as a way to make the code wait until all the data is returned and the function is run before continuing with the script?
For mongoose you can probably just do a Promise.all() and use .concat() on the resulting arrays of each query.
As a full demo:
var async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
var d1Schema = new Schema({ "name": String });
var Data1 = mongoose.model("Data1", d1Schema);
var d2Schema = new Schema({ "title": String });
var Data2 = mongoose.model("Data2", d2Schema);
mongoose.set('debug',true);
mongoose.connect('mongodb://localhost/test');
async.series(
[
// Clean
function(callback) {
async.each([Data1,Data2],function(model,callback) {
model.remove({},callback)
},callback);
},
// Setup some data
function(callback) {
async.each([
{ "name": "Bill", "model": "Data1" },
{ "title": "Something", "model": "Data2" }
],function(data,callback) {
var model = data.model;
delete data.model;
mongoose.model(model).create(data,callback);
},callback);
},
// Actual Promise.all demo
function(callback) {
Promise.all([
Data1.find().exec(),
Data2.find().exec()
]).then(function(result) {
console.log([].concat.apply([],result));
callback()
}).catch(callback);
}
],
function(err) {
if (err) throw err;
mongoose.disconnect();
}
)
I'm just mixing in async there for brevity of example, but the meat of it is in:
Promise.all([
Data1.find().exec(),
Data2.find().exec()
]).then(function(result) {
console.log([].concat.apply([],result));
})
Where the Promise.all() basically waits for and combines the two results, which would be an "array of arrays" here but the .concat() takes care of that. The result will be:
[
{ _id: 59420fd33d48fa0a490247c8, name: 'Bill', __v: 0 },
{ _id: 59420fd43d48fa0a490247c9, title: 'Something', __v: 0 }
]
Showing the objects from each collection, joined together in one array.
You could also use the async.concat method as an alternate, but unless you are using the library already then it's probably just best to stick to promises.

How to update collection value

Assume I have db as;
{ name: "alex" , id: "1"}
I want to update the collection. I want to add "Mr." to the value in name field.
{ name: "Mr.alex" , id: "1"}
How can I do this? Should I wrote 2 query as;
db.collection("user").find({id : "1"}).toArray(function(err, result){
var name = result[0].name;
db.collection("user").updateOne({id : "1"}, {name: "Mr." + name},function(err, result){
})
})
Isn't there any better way to do this as x = x+1 in mongodb?
AFAIK there would be two queries, update operator won't take any field's value and need provided value.
However if you need to do it for all the document or large amount of document, you can write a script and use cursor forEach and for each document change the name and call db.user.save() by passing the argument user object.
Bottom line remains same.
try below:
var cursor = db.users.find({});
while (cursor.hasNext()) {
var user = cursor.next();
user.name = "Mr." + user.name;
user.save();
}
Not possible using one query. You have to iterate through the documents and save them with updated result.
Currently there is no way to reference the retrieved document in the same query, which would allow you to find and update a document within the same operation.
Thus, you will have to make multiple queries to accomplish what you are looking for:
/*
* Single Document
*/
// I'm assuming the id field is unique and can only return one document
var doc = db.collection('user').findOne({ id: '1' });
try {
db.collection('user').updateOne({ id: '1' }, { $set: { name: 'Mr. ' + doc.name }});
} catch (e) {
print(e);
}
If you want to handle multiple update operations, you can do so by using a Bulk() operations builder:
/*
* Multiple Documents
*/
var bulk = db.collection('user').initializeUnorderedBulkOp();
// .forEach is synchronous in MongoDB, as it performs I/O operations
var users = db.collection('user').find({ id: { $gt: 1 } }).forEach(function(user) {
bulk.find({ id: user.id }).update({ $set: { name: 'Mr. ' + user.name }});
});
bulk.execute();
The key to updating the collection with the existing field is to loop through the array returned from the find().toarray() cursor method and update your collection using the Bulk API which allows you to send many update operations within a single request (as a batch).
Let's see with some examples how this pens out:
a) For MongoDB server version 3.2 and above
db.collection("user").find({id : "1"}).toArray(function(err, result){
var operations = [];
result.forEach(function(doc){
operations.push({
"updateOne": {
"filter": {
"_id": doc._id,
"name": doc.name
},
"update": {
"$set": { "name": "Mr." + doc.name }
}
}
});
// Send once in 500 requests only
if (operations.length % 500 === 0 ) {
db.collection("user").bulkWrite(operations, function(err, r) {
// do something with result
}
operations = [];
}
});
// Clear remaining queue
if (operations.length > 0) {
db.collection("user").bulkWrite(operations, function(err, r) {
// do something with result
}
}
})
In the above, you initialise your operations array which would be used by the Bulk API's bulkWrite() function and holds the update operations.
The result from the find().toarray() cursor function is then iterated to create the operations array with the update objects. The operations are limited to batches of 500.
The reason of choosing a lower value than the default batch limit of 1000 is generally a controlled choice. As noted in the documentation there, MongoDB by default will send to the server in batches of 1000 operations at a time at maximum and there is no guarantee that makes sure that these default 1000 operations requests actually fit under the 16MB BSON limit.
So you would still need to be on the "safe" side and impose a lower batch size that you can only effectively manage so that it totals less than the data limit in size when sending to the server.
a) If using MongoDB v3.0 or below:
// Get the collection
var col = db.collection('user');
// Initialize the unordered Batch
var batch = col.initializeUnorderedBulkOp();
// Initialize counter
var counter = 0;
col.find({id : "1"}).toArray(function(err, result){
result.forEach(function(doc) {
batch.find({
"_id": doc._id,
"name": doc.name
}).updateOne({
"$set": { "name": "Mr. " + doc.name }
});
counter++;
if (counter % 500 === 0) {
batch.execute(function(err, r) {
// do something with result
});
// Re-initialize batch
batch = col.initializeOrderedBulkOp();
}
});
if (counter % 1000 != 0 ){
batch.execute(function(err, r) {
// do something with result
});
}
});

Mongoose text search pagination in MongoDB/Node.js

I have this problem , i don't know how to make a pagination in Mongoose text search module, i am totally beginner, so please help, this is my code:
searchModul.create({ title: title, description: description }, function (err) {
if (err) return handleError(err);
searchModul.textSearch(title, function (err, output) {
if (err) return handleError(err);
res.render('search', {
title : 'title',
results : output.results
});
});
});
and also i want to know how to display that pagination in the search.ejs view. and thanks in advance.
To implement pagination, use $limit to define a limit for each query, and $skip to navigate pages.
From the docs:
$limit takes a positive integer that specifies the maximum number of documents to pass along.
$skip takes a positive integer that specifies the maximum number of documents to skip.
There are previous questions like this one, check out the answers here and a more detailed tutorial here.
Hope that helps!
I think you want something like this:
searchModul.find({ $text: { $search: title }}).skip(50).limit(50).exec(function (err, output) {
if (err) return handleError(err);
res.render('search', {
title : 'title',
results : output.results
});
});
This will return the second 50 items that match. .skip(50) the number of items skip and .limit(50) is the number of items to return.
const field = req.query.field;
const search = {};
search[field] = req.query.searchValue;
search["role"] = req.params._role;
search["status"] = parseInt(req.query.status);
user = await Users.paginate(search, {
limit: parseInt(req.query.limit),
page: parseInt(req.query.page),
});
OR
var ObjectId = mongoose.Types.ObjectId;
let _id = new ObjectId(req.query.q);;
user = await Users.paginate(
{
role: req.params._role,
_id: _id,
},
{
limit: parseInt(req.query.limit),
page: parseInt(req.query.page),
});

Return count before limit and skip applied with mongoose

I have a schema defined in mongoose. When I want to retrieve all the items I call
Item
.find()
.limit(limit)
.skip(skip)
.exec(function (err, Items) {
if(err) { return handleError(res, err); }
return res.json(200, Items);
});
In future I want perform some filtering and then count the number of results. How can I include the number of results in the response before limit and skip applied?
I tried including count() after find, but had no luck making it working. What am I missing?
I believe two queries are necessary, you can execute them in parallel:
var Promise = require('bluebird');
Promise.all([
Item.find().limit(limit).skip(skip).exec(),
Item.count().exec()
]).spread(function(items, count) {
res.json(200, { items: items, count: count });
}, function(err) {
handleError(res, err);
});
Note that I've adapted the JSON response format in order to fit in a new property containing the count -- I've changed the response from an array to { items: Array, count: Number }.
Calling .exec() without passing a callback returns a Mongoose Promise. In the example above, I'm using Bluebird (npm install bluebird) to manage the asynchronous flow, but you could use any other library of your choice as well.

How can I save multiple documents concurrently in Mongoose/Node.js?

At the moment I use save to add a single document. Suppose I have an array of documents that I wish to store as single objects. Is there a way of adding them all with a single function call and then getting a single callback when it is done? I could add all the documents individually but managing the callbacks to work out when everything is done would be problematic.
Mongoose does now support passing multiple document structures to Model.create. To quote their API example, it supports being passed either an array or a varargs list of objects with a callback at the end:
Candy.create({ type: 'jelly bean' }, { type: 'snickers' }, function (err, jellybean, snickers) {
if (err) // ...
});
Or
var array = [{ type: 'jelly bean' }, { type: 'snickers' }];
Candy.create(array, function (err, jellybean, snickers) {
if (err) // ...
});
Edit: As many have noted, this does not perform a true bulk insert - it simply hides the complexity of calling save multiple times yourself. There are answers and comments below explaining how to use the actual Mongo driver to achieve a bulk insert in the interest of performance.
Mongoose 4.4 added a method called insertMany
Shortcut for validating an array of documents and inserting them into
MongoDB if they're all valid. This function is faster than .create()
because it only sends one operation to the server, rather than one for each
document.
Quoting vkarpov15 from issue #723:
The tradeoffs are that insertMany() doesn't trigger pre-save hooks, but it should have better performance because it only makes 1 round-trip to the database rather than 1 for each document.
The method's signature is identical to create:
Model.insertMany([ ... ], (err, docs) => {
...
})
Or, with promises:
Model.insertMany([ ... ]).then((docs) => {
...
}).catch((err) => {
...
})
Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Since you know the number of documents you're saving, you could write something like this:
var total = docArray.length
, result = []
;
function saveAll(){
var doc = docArray.pop();
doc.save(function(err, saved){
if (err) throw err;//handle error
result.push(saved[0]);
if (--total) saveAll();
else // all saved here
})
}
saveAll();
This, of course, is a stop-gap solution and I would recommend using some kind of flow-control library (I use q and it's awesome).
Bulk inserts in Mongoose can be done with .insert() unless you need to access middleware.
Model.collection.insert(docs, options, callback)
https://github.com/christkv/node-mongodb-native/blob/master/lib/mongodb/collection.js#L71-91
Use async parallel and your code will look like this:
async.parallel([obj1.save, obj2.save, obj3.save], callback);
Since the convention is the same in Mongoose as in async (err, callback) you don't need to wrap them in your own callbacks, just add your save calls in an array and you will get a callback when all is finished.
If you use mapLimit you can control how many documents you want to save in parallel. In this example we save 10 documents in parallell until all items are successfully saved.
async.mapLimit(myArray, 10, function(document, next){
document.save(next);
}, done);
I know this is an old question, but it worries me that there are no properly correct answers here. Most answers just talk about iterating through all the documents and saving each of them individually, which is a BAD idea if you have more than a few documents, and the process gets repeated for even one in many requests.
MongoDB specifically has a batchInsert() call for inserting multiple documents, and this should be used from the native mongodb driver. Mongoose is built on this driver, and it doesn't have support for batch inserts. It probably makes sense as it is supposed to be a Object document modelling tool for MongoDB.
Solution: Mongoose comes with the native MongoDB driver. You can use that driver by requiring it require('mongoose/node_modules/mongodb') (not too sure about this, but you can always install the mongodb npm again if it doesn't work, but I think it should) and then do a proper batchInsert
Newer versions of MongoDB support bulk operations:
var col = db.collection('people');
var batch = col.initializeUnorderedBulkOp();
batch.insert({name: "John"});
batch.insert({name: "Jane"});
batch.insert({name: "Jason"});
batch.insert({name: "Joanne"});
batch.execute(function(err, result) {
if (err) console.error(err);
console.log('Inserted ' + result.nInserted + ' row(s).');
}
Use insertMany function to insert many documents. This sends only one operation to the server and Mongoose validates all the documents before hitting the mongo server. By default Mongoose inserts item in the order they exist in the array. If you are ok with not maintaining any order then set ordered:false.
Important - Error handling:
When ordered:true validation and error handling happens in a group means if one fails everything will fail.
When ordered:false validation and error handling happens individually and operation will be continued. Error will be reported back in an array of errors.
Here is another way without using additional libraries (no error checking included)
function saveAll( callback ){
var count = 0;
docs.forEach(function(doc){
doc.save(function(err){
count++;
if( count == docs.length ){
callback();
}
});
});
}
You can use the promise returned by mongoose save, Promise in mongoose does not have all, but you can add the feature with this module.
Create a module that enhance mongoose promise with all.
var Promise = require("mongoose").Promise;
Promise.all = function(promises) {
var mainPromise = new Promise();
if (promises.length == 0) {
mainPromise.resolve(null, promises);
}
var pending = 0;
promises.forEach(function(p, i) {
pending++;
p.then(function(val) {
promises[i] = val;
if (--pending === 0) {
mainPromise.resolve(null, promises);
}
}, function(err) {
mainPromise.reject(err);
});
});
return mainPromise;
}
module.exports = Promise;
Then use it with mongoose:
var Promise = require('./promise')
...
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
Promise.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
})
Add a file called mongoHelper.js
var MongoClient = require('mongodb').MongoClient;
MongoClient.saveAny = function(data, collection, callback)
{
if(data instanceof Array)
{
saveRecords(data,collection, callback);
}
else
{
saveRecord(data,collection, callback);
}
}
function saveRecord(data, collection, callback)
{
collection.save
(
data,
{w:1},
function(err, result)
{
if(err)
throw new Error(err);
callback(result);
}
);
}
function saveRecords(data, collection, callback)
{
save
(
data,
collection,
callback
);
}
function save(data, collection, callback)
{
collection.save
(
data.pop(),
{w:1},
function(err, result)
{
if(err)
{
throw new Error(err);
}
if(data.length > 0)
save(data, collection, callback);
else
callback(result);
}
);
}
module.exports = MongoClient;
Then in your code change you requires to
var MongoClient = require("./mongoHelper.js");
Then when it is time to save call (after you have connected and retrieved the collection)
MongoClient.saveAny(data, collection, function(){db.close();});
You can change the error handling to suit your needs, pass back the error in the callback etc.
This is an old question, but it came up first for me in google results when searching "mongoose insert array of documents".
There are two options model.create() [mongoose] and model.collection.insert() [mongodb] which you can use. View a more thorough discussion here of the pros/cons of each option:
Mongoose (mongodb) batch insert?
Here is an example of using MongoDB's Model.collection.insert() directly in Mongoose. Please note that if you don't have so many documents, say less than 100 documents, you don't need to use MongoDB's bulk operation (see this).
MongoDB also supports bulk insert through passing an array of
documents to the db.collection.insert() method.
var mongoose = require('mongoose');
var userSchema = mongoose.Schema({
email : { type: String, index: { unique: true } },
name : String
});
var User = mongoose.model('User', userSchema);
function saveUsers(users) {
User.collection.insert(users, function callback(error, insertedDocs) {
// Here I use KrisKowal's Q (https://github.com/kriskowal/q) to return a promise,
// so that the caller of this function can act upon its success or failure
if (!error)
return Q.resolve(insertedDocs);
else
return Q.reject({ error: error });
});
}
var users = [{email: 'foo#bar.com', name: 'foo'}, {email: 'baz#bar.com', name: 'baz'}];
saveUsers(users).then(function() {
// handle success case here
})
.fail(function(error) {
// handle error case here
});

Resources