Mongoose and mapReduce - map function not called - node.js

I'm attempting to use the mapReduce function of Mongodb via Mongoose, but the map function I'm passing in is never called. Here is the data currently contained in the "Post" model collection:
[ { data: 'Tag test data',
name: 'Tag Test',
_id: 5130dff2560105c235000002,
__v: 0,
comments: [],
tags: [ 'tag1', 'tag2', 'tag3' ] },
{ data: 'Testing tags. Again.',
name: 'Another test post',
_id: 5131213b611fe1f443000002,
__v: 0,
comments: [],
tags: [ 'tags', 'test', 'again' ] } ]
Here is the code:
var Schema = mongoose.Schema;
var PostSchema = new Schema ({
name : String
, data : String
, tags : [String]
});
mongoose.model('Post', PostSchema);
var o = {};
o.map = function() {
if (!this.tags) {
//console.log('No tags found for Post ' + this.name);
return;
}
for (index in this.tags) {
emit(this.tags[index], 1);
}
}
o.reduce = function(previous, current) {
var count = 0;
for (index in current) {
count += current[index];
}
return count;
}
o.out = { replace : 'tags'}
o.verbose = true;
var Post = mongoose.model('Post');
Post.mapReduce(o, function(error, model, stats) {
console.log('model: ' + model);
console.log('stats: ' + stats);
});
The "model" and "stats" objects are always undefined, and the log statements in the map function are never called. If I do something like this with the Post model outside of the mapReduce function, I get the data at the top of the post as expected:
Post.find().exec(function(err, posts) {
console.log(posts);
});
Any suggestions? I'm sure something is just slightly off...

You can't call console.log from within the map and reduce functions as it's not supported by Mongo's JavaScript engine.

To debug your map/reduce/finalize functions you can use the MongoDB print statement. The output will be added to your Mongo log file.

Related

How to push data into existing element of the array of objects in nodejs?

I have this array list of objects.
var list = [{
'ID':1,
'name' : 'Vikas Yadav',
'mobile':8095638475,
'sent':false
},
{
'ID':2,
'name' : 'Rajat Shukla',
'mobile':7486903546,
'sent':false
},
{
'ID':3,
'name' : 'Munna Bhaiya',
'mobile':9056284550,
'sent':false
},
{
'ID':4,
'name' : 'Guddu Pandit',
'mobile':7780543209,
'sent':false
},
{
'ID':5,
'name' : 'Srivani Iyer',
'mobile':8880976501,
'sent':false
}];
Now I want to push two more datas in specific element of this array via forLoop as:
var timeAndOTPArray = {
"time" : new Date(),
"OTP": req.params.ran
}
I am retrieving the list data via cookies into one of the route.
Below is the code I am trying to push the element according to the matching condition.
var lists = req.cookies.list;
Object.keys(lists).forEach(function(item) {
if(req.params.ID == lists[item].ID){ //look for match with name
(lists[item]).push(timeAndOTPArray);
newAddedList.push(lists[item]);
console.log(item, lists[item]);
}
});
Perhaps it's not the correct way. Please help!
Wish you a happy and a prosperous Diwali.
Cheers!
You can use findIndex and append to update the object into list like this:
//List only with ID, easier to read the code
var list = [{'ID':1,},{'ID':2,}]
//your object
var timeAndOTPArray = {
"time" : new Date(),
"OTP": "otp"
}
//Index where object with ID == 2 is
var index = list.findIndex(obj => obj.ID == 2);
//Append the 'timeAndOTPArray' properties into the object itself
list[index] = {"time": timeAndOTPArray.time, "OTP":timeAndOTPArray.OTP, ...list[index]}
console.log(list)
I guess this will help
var lists = req.cookies.list;
Object.keys(lists).forEach(function(item) {
if(req.params.ID == lists[item].ID){ //look for match with ID
Object.keys(timeAndOTPArray).forEach(key=>{
lists[item][key]=timeAndOTPArray[key];
})
}
});
Good evening) I can advice you the best option is update with map
const listItems = [
{
ID: 1,
name: 'Vikas Yadav',
mobile: 8095638475,
sent: false,
},
{
ID: 2,
name: 'Rajat Shukla',
mobile: 7486903546,
sent: false,
},
{
ID: 3,
name: 'Munna Bhaiya',
mobile: 9056284550,
sent: false,
},
{
ID: 4,
name: 'Guddu Pandit',
mobile: 7780543209,
sent: false,
},
{
ID: 5,
name: 'Srivani Iyer',
mobile: 8880976501,
sent: false,
},
];
const paramId = 4;
const result = listItems.map((item) => {
if (paramId === item.ID) {
return {
...item,
time: new Date(),
OTP: 'smth',
};
}
return item;
});
console.log('result', result);
for appending, you can do this,
lists[index] = Object.assign(lists[index], timeAndOTPArray);
If you are using es6,
lists[index] = {...lists[index], timeAndOTPArray};
Here lists is an array of objects.
so lists[item] is an object, so you cant push an object to an object.
In your code timeAndOTPArray is an object.
In your lists object, initialize an empty array called timeAndOTPArray
var index = lists.findIndex(function(item){ return item.ID == req.params.ID});
lists[index].timeAndOTPArray.push(timeAndOTPArray);

How to search with mongooseJs with nested logic when all parameter are not guaranteed?

I want to search in a collection with two parameters and there is no guarantee that both parameters will be available anyone of them can be missing I want to ignore it and search only with one parameter.
I also want to search in two fields with the second parameter using $or.
My Code
NodeJs Express Mongoose
colec.find({
$and: [{
'address.zip': req.query.p,
$or: [{ 'name': req.query.n }, { 'tags': req.query.n }]
}]
}, function (err, foundProfiles) {
//Some Code
})
my code before tags search
var terms = {};
if (req.query.q) {
var name = req.query.q;
}
if (req.query.p) {
terms['address.zip'] = req.query.p;
}
colec.find(terms, function(err, foundProfiles){
//some code
})
I got this after searching for a long time.
var dbQueries = [];
if (req.query.q) {
var search = req.query.q;
dbQueries.push({
$or: [
{ name: search },
{ tags: search },
]
});
}
if (req.query.p) {
dbQueries.push({ 'address.zip': req.query.p });
}
dbQueries = { $and: dbQueries }
Collection.find(dbQueries, function (err, foundProfiles) {
//some code
});

NodeJS MongoDB Mongoose export nested subdocuments and arrays to XLSX columns

I have query results from MongoDB as an array of documents with nested subdocuments and arrays of subdocuments.
[
{
RecordID: 9000,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location A'
}
Items: [
{
Title: 'Example Title A',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format A'
}
},
{
Title: 'Example Title B',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format B'
}
}
],
},
{
RecordID: 9001,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location C'
},
Items: [
{
Title: 'Example Title C',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format C'
}
}
],
}
]
Problem
I need to export the results to XLSX in column order. The XLSX library is working to export the top-level properties (such as RecordID and RecordType) only. I also need to export the nested objects and arrays of objects. Given a list of property names e.g. RecordID, RecordType, Location.LocationName, Items.Title, Items.Format.FormatName the properties must be exported to XLSX columns in the specified order.
Desired result
Here is the desired 'flattened' structure (or something similar) that
I think should be able to convert to XLSX columns.
[
{
'RecordID': 9000,
'RecordType': 'Item',
'Location.LocationName': 'Example Location A',
'Items.Title': 'Example Title A, Example Title B',
'Items.Format.FormatName': 'Example Format A, Example Format B',
},
{
'RecordID': 9001,
'RecordType': 'Item',
'Location.LocationName': 'Example Location C',
'Items.Title': 'Example Title C',
'Items.Format.FormatName': 'Example Format C',
}
]
I am using the XLSX library to convert the query results to XLSX which works for top-level properties only.
const worksheet: XLSX.WorkSheet = XLSX.utils.json_to_sheet(results.data);
const workbook: XLSX.WorkBook = { Sheets: { 'data': worksheet }, SheetNames: ['data'] };
const excelBuffer: any = XLSX.write(workbook, { bookType: 'xlsx', type: 'array' });
const data: Blob = new Blob([excelBuffer], { type: EXCEL_TYPE });
FileSaver.saveAs(data, new Date().getTime());
POSSIBLE OPTIONS
I am guessing I need to 'flatten' the structure either using aggregation in the query or by performing post-processing when the query is returned.
Option 1: Build the logic in the MongoDB query to flatten the results.
$replaceRoot might work since it is able to "promote an existing embedded document to the top level". Although I am not sure if this will solve the problem exactly, I do not want to modify the documents in place, I just need to flatten the results for exporting.
Here is the MongoDB query I am using to produce the results:
records.find({ '$and': [ { RecordID: { '$gt': 9000 } } ]},
{ skip: 0, limit: 10, projection: { RecordID: 1, RecordType: 1, 'Items.Title': 1, 'Items.Location': 1 }});
Option 2: Iterate and flatten the results on the Node server
This is likely not the most performant option, but might be the easiest if I can't find a way to do so within the MongoDB query.
UPDATE:
I may be able to use MongoDB aggregate $project to 'flatten' the results. For example, this aggregate query effectively 'flattens' the results by 'renaming' the properties. I just need to figure out how to implement the query conditions within the aggregate operation.
db.records.aggregate({
$project: {
RecordID: 1,
RecordType: 1,
Title: '$Items.Title',
Format: '$Items.Format'
}
})
UPDATE 2:
I have abandoned the $project solution because I would need to change the entire API to support aggregation. Also, I would need to find a solution for populate because aggregate does not support it, rather, it uses $lookup which is possible but time consuming because I would need to write the queries dynamically. I am going back to look into how to flatten the object by creating a function to iterate the array of objects recursively.
Below is a solution for transforming the Mongo data on the server via a function flattenObject which recursively flattens nested objects and returns a 'dot-type' key for nested paths.
Note that the snippet below contains a function that renders and editable table to preview, however, the important part you want (download the file), should be triggered when you run the snippet and click the 'Download' button.
const flattenObject = (obj, prefix = '') =>
Object.keys(obj).reduce((acc, k) => {
const pre = prefix.length ? prefix + '.' : '';
if (typeof obj[k] === 'object') Object.assign(acc, flattenObject(obj[k], pre + k));
else acc[pre + k] = obj[k];
return acc;
}, {});
var data = [{
RecordID: 9000,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location A"
},
Items: [{
Title: "Example Title A",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format A"
}
},
{
Title: "Example Title B",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format B"
}
}
]
},
{
RecordID: 9001,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location C"
},
Items: [{
Title: "Example Title C",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format C"
}
}]
}
];
const EXCEL_MIME_TYPE = `application/vnd.ms-excel`;
const flattened = data.map(e => flattenObject(e));
const ws_default_header = XLSX.utils.json_to_sheet(flattened);
const ws_custom_header = XLSX.utils.json_to_sheet(flattened, {
header: ['Items.Title', 'RecordID', 'RecordType', 'Location.LocationName', 'Items.Format.FormatName']
});
const def_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_default_header
},
SheetNames: ['data']
}
const custom_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_custom_header
},
SheetNames: ['data']
}
const def_excelBuffer = XLSX.write(def_workbook, {
bookType: 'xlsx',
type: 'array'
});
const custom_excelBuffer = XLSX.write(custom_workbook, {
bookType: 'xlsx',
type: 'array'
});
const def_blob = new Blob([def_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const custom_blob = new Blob([custom_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const def_button = document.getElementById('dl-def')
/* trigger browser to download file */
def_button.onclick = e => {
e.preventDefault()
saveAs(def_blob, `${new Date().getTime()}.xlsx`);
}
const custom_button = document.getElementById('dl-cus')
/* trigger browser to download file */
custom_button.onclick = e => {
e.preventDefault()
saveAs(custom_blob, `${new Date().getTime()}.xlsx`);
}
/*
render editable table to preview (for SO convenience)
*/
const html_string_default = XLSX.utils.sheet_to_html(ws_default_header, {
id: "data-table",
editable: true
});
const html_string_custom = XLSX.utils.sheet_to_html(ws_custom_header, {
id: "data-table",
editable: true
});
document.getElementById("container").innerHTML = html_string_default;
document.getElementById("container-2").innerHTML = html_string_custom;
<script src="https://cdnjs.cloudflare.com/ajax/libs/xlsx/0.14.3/xlsx.full.min.js"></script>
<head>
<title>Excel file generation from JSON</title>
<meta charset="utf-8" />
<style>
.xport,
.btn {
display: inline;
text-align: center;
}
a {
text-decoration: none
}
#data-table,
#data-table th,
#data-table td {
border: 1px solid black
}
</style>
</head>
<script>
function render(type, fn, dl) {
var elt = document.getElementById('data-table');
var wb = XLSX.utils.table_to_book(elt, {
sheet: "Sheet JS"
});
return dl ?
XLSX.write(wb, {
bookType: type,
bookSST: true,
type: 'array'
}) :
XLSX.writeFile(wb, fn || ('SheetJSTableExport.' + (type || 'xlsx')));
}
</script>
<div>Default Header</div>
<div id="container"></div>
<br/>
<div>Custom Header</div>
<div id="container-2"></div>
<br/>
<table id="xport"></table>
<button type="button" id="dl-def">Download Default Header Config</button>
<button type="button" id="dl-cus">Download Custom Header Config</button>
<script src="https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/1.3.8/FileSaver.min.js"></script>
I wrote a function to iterate all object in the results array and create new flattened objects recursively. The flattenObject function shown here is similar to the previous answer and I took additional inspiration from this related answer.
The '_id' properties are specifically excluded from being added to the flattened object, since ObjectIds are still being returned as bson types even though I have the lean() option set.
I still need to figure out how to sort the objects such that they are in the order given e.g. RecordID, RecordType, Items.Title. I believe that might be easiest to achieve by creating a separate function to iterate the flattened results, although not necessarily the most performant. Let me know if anyone has any suggestions on how to achieve the object sorting by a given order or has any improvements to the solution.
const apiCtrl = {};
/**
* Async array iterator
*/
apiCtrl.asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array)
}
}
// Check if a value is an object
const isObject = (val) => {
return typeof val == 'object' && val instanceof Object && !(val instanceof Array);
}
// Check if a value is a date object
const isDateObject = (val) => {
return Object.prototype.toString.call(val) === '[object Date]';
}
/**
* Iterate object properties recursively and flatten all values to top level properties
* #param {object} obj Object to flatten
* #param {string} prefix A string to hold the property name
* #param {string} res A temp object to store the current iteration
* Return a new object with all properties on the top level only
*
*/
const flattenObject = (obj, prefix = '', res = {}) =>
Object.entries(obj).reduce((acc, [key, val]) => {
const k = `${prefix}${key}`
// Skip _ids since they are returned as bson values
if (k.indexOf('_id') === -1) {
// Check if value is an object
if (isObject(val) && !isDateObject(val)) {
flattenObject(val, `${k}.`, acc)
// Check if value is an array
} else if (Array.isArray(val)) {
// Iterate each array value and call function recursively
val.map(element => {
flattenObject(element, `${k}.`, acc);
});
// If value is not an object or an array
} else if (val !== null & val !== 'undefined') {
// Check if property has a value already
if (res[k]) {
// Check for duplicate values
if (typeof res[k] === 'string' && res[k].indexOf(val) === -1) {
// Append value with a separator character at the beginning
res[k] += '; ' + val;
}
} else {
// Set value
res[k] = val;
}
}
}
return acc;
}, res);
/**
* Convert DB query results to an array of flattened objects
* Required to build a format that is exportable to csv, xlsx, etc.
* #param {array} results Results of DB query
* Return a new array of objects with all properties on the top level only
*/
apiCtrl.buildExportColumns = async (results) => {
const data = results.data;
let exportColumns = [];
if (data && data.length > 0) {
try {
// Iterate all records in results data array
await apiCtrl.asyncForEach(data, async (record) => {
// Convert the multi-level object to a flattened object
const flattenedObject = flattenObject(record);
// Push flattened object to array
exportColumns.push(flattenedObject);
});
} catch (e) {
console.error(e);
}
}
return exportColumns;
}

mongoose recursive populate

I have been searching for a while and I didn't find any good answer. I have n-deep tree that I am storing in DB and I would like to populate all parents so in the end I get the full tree
node
-parent
-parent
.
.
-parent
So far I populate to level 2, and as I mentioned I need to get to level n.
Node.find().populate('parent').exec(function (err, items) {
if (!err) {
Node.populate(items, {path: 'parent.parent'}, function (err, data) {
return res.send(data);
});
} else {
res.statusCode = code;
return res.send(err.message);
}
});
you can do this now (with https://www.mongodb.com/blog/post/introducing-version-40-mongoose-nodejs-odm)
var mongoose = require('mongoose');
// mongoose.Promise = require('bluebird'); // it should work with native Promise
mongoose.connect('mongodb://......');
var NodeSchema = new mongoose.Schema({
children: [{type: mongoose.Schema.Types.ObjectId, ref: 'Node'}],
name: String
});
var autoPopulateChildren = function(next) {
this.populate('children');
next();
};
NodeSchema
.pre('findOne', autoPopulateChildren)
.pre('find', autoPopulateChildren)
var Node = mongoose.model('Node', NodeSchema)
var root=new Node({name:'1'})
var header=new Node({name:'2'})
var main=new Node({name:'3'})
var foo=new Node({name:'foo'})
var bar=new Node({name:'bar'})
root.children=[header, main]
main.children=[foo, bar]
Node.remove({})
.then(Promise.all([foo, bar, header, main, root].map(p=>p.save())))
.then(_=>Node.findOne({name:'1'}))
.then(r=>console.log(r.children[1].children[0].name)) // foo
simple alternative, without Mongoose:
function upsert(coll, o){ // takes object returns ids inserted
if (o.children){
return Promise.all(o.children.map(i=>upsert(coll,i)))
.then(children=>Object.assign(o, {children})) // replace the objects children by their mongo ids
.then(o=>coll.insertOne(o))
.then(r=>r.insertedId);
} else {
return coll.insertOne(o)
.then(r=>r.insertedId);
}
}
var root = {
name: '1',
children: [
{
name: '2'
},
{
name: '3',
children: [
{
name: 'foo'
},
{
name: 'bar'
}
]
}
]
}
upsert(mycoll, root)
const populateChildren = (coll, _id) => // takes a collection and a document id and returns this document fully nested with its children
coll.findOne({_id})
.then(function(o){
if (!o.children) return o;
return Promise.all(o.children.map(i=>populateChildren(coll,i)))
.then(children=>Object.assign(o, {children}))
});
const populateParents = (coll, _id) => // takes a collection and a document id and returns this document fully nested with its parents, that's more what OP wanted
coll.findOne({_id})
.then(function(o){
if (!o.parent) return o;
return populateParents(coll, o.parent))) // o.parent should be an id
.then(parent => Object.assign(o, {parent})) // replace that id with the document
});
Another approach is to take advantage of the fact that Model.populate() returns a promise, and that you can fulfill a promise with another promise.
You can recursively populate the node in question via:
Node.findOne({ "_id": req.params.id }, function(err, node) {
populateParents(node).then(function(){
// Do something with node
});
});
populateParents could look like the following:
var Promise = require('bluebird');
function populateParents(node) {
return Node.populate(node, { path: "parent" }).then(function(node) {
return node.parent ? populateParents(node.parent) : Promise.fulfill(node);
});
}
It's not the most performant approach, but if your N is small this would work.
Now with Mongoose 4 this can be done. Now you can recurse deeper than a single level.
Example
User.findOne({ userId: userId })
.populate({
path: 'enrollments.course',
populate: {
path: 'playlists',
model: 'Playlist',
populate: {
path: 'videos',
model: 'Video'
}
}
})
.populate('degrees')
.exec()
You can find the official documentation for Mongoose Deep Populate from here.
Just don't :)
There is no good way to do that. Even if you do some map-reduce, it will have terrible performance and problems with sharding if you have it or will ever need it.
Mongo as NoSQL database is really great for storing tree documents. You can store whole tree and then use map-reduce to get some particular leafs from it if you don't have a lot of "find particular leaf" queries. If this doesn't work for you, go with two collections:
Simplified tree structure: {_id: "tree1", tree: {1: [2, {3: [4, {5: 6}, 7]}]}}. Numbers are just IDs of nodes. This way you'll get whole document in one query. Then you just extract all ids and run second query.
Nodes: {_id: 1, data: "something"}, {_id: 2, data: "something else"}.
Then you can write simple recurring function which will replace node ids from first collection with data from second. 2 queries and simple client-side processing.
Small update:
You can extend second collection to be a little more flexible:
{_id: 2, data: "something", children:[3, 7], parents: [1, 12, 13]}
This way you'll be able to start your search from any leaf. And then, use map-reduce to get to the top or to the bottom of this part of tree.
This is a more straight forward approach to caub's answer and great solution. I found it a bit hard to make sense of at first so I put this version together.
Important, you need both 'findOne' and 'find' middleware hooks in place for this solution to work. *
* Also, the model definition must come after the middleware definition *
const mongoose = require('mongoose');
const NodeSchema = new mongoose.Schema({
children: [mongoose.Schema.Types.ObjectId],
name: String
});
const autoPopulateChildren = function (next) {
this.populate('children');
next();
};
NodeSchema
.pre('findOne', autoPopulateChildren)
.pre('find', autoPopulateChildren)
const Node = mongoose.model('Node', NodeSchema)
const root = new Node({ name: '1' })
const main = new Node({ name: '3' })
const foo = new Node({ name: 'foo' })
root.children = [main]
main.children = [foo]
mongoose.connect('mongodb://localhost:27017/try', { useNewUrlParser: true }, async () => {
await Node.remove({});
await foo.save();
await main.save();
await root.save();
const result = await Node.findOne({ name: '1' });
console.log(result.children[0].children[0].name);
});
I tried #fzembow's solution but it seemed to return the object from the deepest populated path. In my case I needed to recursively populate an object, but then return the very same object. I did it like that:
// Schema definition
const NodeSchema = new Schema({
name: { type: String, unique: true, required: true },
parent: { type: Schema.Types.ObjectId, ref: 'Node' },
});
const Node = mongoose.model('Node', NodeSchema);
// method
const Promise = require('bluebird');
const recursivelyPopulatePath = (entry, path) => {
if (entry[path]) {
return Node.findById(entry[path])
.then((foundPath) => {
return recursivelyPopulatePath(foundPath, path)
.then((populatedFoundPath) => {
entry[path] = populatedFoundPath;
return Promise.resolve(entry);
});
});
}
return Promise.resolve(entry);
};
//sample usage
Node.findOne({ name: 'someName' })
.then((category) => {
if (category) {
recursivelyPopulatePath(category, 'parent')
.then((populatedNode) => {
// ^^^^^^^^^^^^^^^^^ here is your object but populated recursively
});
} else {
...
}
})
Beware it's not very efficient. If you need to run such query often or at deep levels, then you should rethink your design
Maybe a lot late for that but mongoose has some documentation on this :
Ancestors Tree Array
Materialized Path Tree Array
I think the first one is more appropriate to you as you are looking to populate parents.
With that solution, you can with one regex query, search all the documents matching your designered output tree.
You would setup documents with this Schema :
Tree: {
name: String,
path: String
}
Paths field would be the absolute path in your tree :
/mens
/mens/shoes
/mens/shoes/boots
/womens
/womens/shoes
/womens/shoes/boots
For example you could search all the childrens of your node '/mens/shoes' with one query :
await Tree.find({ path: /^\/mens/shoes })
It would return all the documents where the path starts with /mens/shoes :
/mens/shoes
/mens/shoes/boots
Then you'd only need some client-side logic to arrange it in a tree structure (a map-reduce)

Order and limit results in a query with a callback

Using Mongoose, I'd like to make a query with MongoDB and order and limit the results I get. I am doing this with Node.js so I am using callbacks.
So far, I have managed to order my results like this:
myModel.find({ $query: {}, $orderby: { created_at : -1 }}, function (err, items) {
callback( null, items )
});
How can I limit the results I get selecting and index and the number of items I want to get?
Using mongodb native:
http://mongodb.github.io/node-mongodb-native/api-generated/collection.html#find
myModel.find(filter)
.limit(pageSize)
.skip(skip)
.sort(sort)
.toArray(callback);
You can also specify the items in your query:
myModel.find(filter, {sort: {created_at: -1}, limit: 10}, function(err, items){
});
There is no $orderby in node mongodb native, so I'm not sure what library or other tool you're using.
...
Now that you've clarified Mongoose (which in general I recommend against):
myModel.find(filter).limit(10).exec(function(err, items){
//process
});
To sort documents, we can apply sort on a cursor object. To enforce order of sort, instead of passing an object, we need to pass an array to the sort method.
var MongoClient = require('mongodb').MongoClient,
commandLineArgs = require('command-line-args'),
assert = require('assert');
var options = commandLineOptions();
MongoClient.connect('mongodb://localhost:27017/crunchbase', function(err, db) {
assert.equal(err, null);
console.log("Successfully connected to MongoDB.");
var query = queryDocument(options);
var projection = {
"_id": 0,
"name": 1,
"founded_year": 1,
"number_of_employees": 1
};
var cursor = db.collection('companies').find(query);
cursor.project(projection);
cursor.limit(options.limit);
cursor.skip(options.skip);
cursor.sort([
["founded_year", 1],
["number_of_employees", -1]
]);
var numMatches = 0;
cursor.forEach(
function(doc) {
numMatches = numMatches + 1;
console.log(doc.name + "\n\tfounded " + doc.founded_year +
"\n\t" + doc.number_of_employees + " employees");
},
function(err) {
assert.equal(err, null);
console.log("Our query was:" + JSON.stringify(query));
console.log("Documents displayed: " + numMatches);
return db.close();
}
);
});
function queryDocument(options) {
console.log(options);
var query = {
"founded_year": {
"$gte": options.firstYear,
"$lte": options.lastYear
}
};
if ("employees" in options) {
query.number_of_employees = {
"$gte": options.employees
};
}
return query;
}
function commandLineOptions() {
var cli = commandLineArgs([{
name: "firstYear",
alias: "f",
type: Number
}, {
name: "lastYear",
alias: "l",
type: Number
}, {
name: "employees",
alias: "e",
type: Number
}, {
name: "skip",
type: Number,
defaultValue: 0
}, {
name: "limit",
type: Number,
defaultValue: 20000
}]);
var options = cli.parse()
if (!(("firstYear" in options) && ("lastYear" in options))) {
console.log(cli.getUsage({
title: "Usage",
description: "The first two options below are required. The rest are optional."
}));
process.exit();
}
return options;
}
One thing to notice is the order in which MongoDB applies skip, limit and sort
sort
skip
limit
There's also a possibility that we can sort data on the MongoDB side as well, provided that we've setup the indexing.
Notice that MongoDB driver will send a query when we call a cursor method passing a callback function to process query results.

Resources