When I execute the following code (a larger example, boiled down to the essentials)
var mongoose = require("mongoose");
var LocationSchema = new mongoose.Schema({
userName: String,
loc: {
'type': { type: String, enum: "Point", default: "Point" },
coordinates: { type: [Number] }
}
})
LocationSchema.index({ category: 1, loc: "2dsphere" });
var Location = mongoose.model("location", LocationSchema);
var mongoDB = 'mongodb://user1:test#ds042417.mlab.com:42417/locationdemo';
mongoose.Promise = global.Promise;
mongoose.connect(mongoDB, { useMongoClient: true });
var testUser = Location({
userName: "Tester",
loc: { coordinates: [12.44, 55.69] }
});
testUser.save(function (err) {
if (err) {
return console.log("UPPPPs: " + err);
}
console.log("User Saved, Try to find him:");
let query = Location.find({
loc:
{
$near:
{
$geometry:
{
type: "Point",
coordinates: [12.50, 55.71]
},
$maxDistance: 600000
}
}
})
query.exec(function (err, docs) {
if (err) {
return console.log("Err: " + err);
}
console.log("Found: " + JSON.stringify(docs))
})
});
I get this error:
Err: MongoError: error processing query: ns=locationdemo.locationsTree: GEONEAR field=loc maxdist=600000 isNearSphere=0
Sort: {}
Proj: {}
planner returned error: unable to find index for $geoNear query
But the index is there (see line 10) and the screenshot from mlab below. What am I doing wrong?:
You are breaking a rule of how you can use a an index in general. Whilst it is true that there is no restriction that a "2dsphere" index be the "first" property in a compound index, it is however very important that your "queries" actually address the first property in order for the index to be selected.
This is covered in Prefixes from the manual on compound indexes. In excerpt:
{ "item": 1, "location": 1, "stock": 1 }
The index has the following index prefixes:
{ item: 1 }
{ item: 1, location: 1 }
For a compound index, MongoDB can use the index to support queries on the index prefixes. As such, MongoDB can use the index for queries on the following fields:
the item field,
the item field and the location field,
the item field and the location field and the stock field.
However, MongoDB cannot use the index to support queries that include the following fields since without the item field, none of the listed fields correspond to a prefix index:
the location field,
the stock field, or
the location and stock fields.
Because your query references "loc" first and does not include "category", the index does not get selected and MongoDB returns the error.
So in order to use the index you have defined, you need to actually query "category" as well. Amending your listing:
var mongoose = require("mongoose");
mongoose.set('debug',true);
var LocationSchema = new mongoose.Schema({
userName: String,
category: Number,
loc: {
'type': { type: String, enum: "Point", default: "Point" },
coordinates: { type: [Number] }
}
})
//LocationSchema.index({ loc: "2dsphere", category: 1 },{ "background": false });
LocationSchema.index({ category: 1, loc: "2dsphere" });
var Location = mongoose.model("location", LocationSchema);
var mongoDB = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
mongoose.connect(mongoDB, { useMongoClient: true });
var testUser = Location({
userName: "Tester",
category: 1,
loc: { coordinates: [12.44, 55.69] }
});
testUser.save(function (err) {
if (err) {
return console.log("UPPPPs: " + err);
}
console.log("User Saved, Try to find him:");
let query = Location.find({
category: 1,
loc:
{
$near:
{
$geometry:
{
type: "Point",
coordinates: [12.50, 55.71]
},
$maxDistance: 600000
}
}
})
query.exec(function (err, docs) {
if (err) {
return console.log("Err: " + err);
}
console.log("Found: " + JSON.stringify(docs))
})
});
As long as we include "category" everything is fine:
User Saved, Try to find him:
Mongoose: locations.find({ loc: { '$near': { '$geometry': { type: 'Point', coordinates: [ 12.5, 55.71 ] }, '$maxDistance': 600000 } }, category: 1 }, { fields: {} })
Found: [{"_id":"59f8f87554900a4e555d4e22","userName":"Tester","category":1,"__v":0,"loc":{"coordinates":[12.44,55.69],"type":"Point"}},{"_id":"59f8fabf50fcf54fc3dd01f6","userName":"Tester","category":1,"__v":0,"loc":{"coordinates":[12.44,55.69],"type":"Point"}}]
The alternate case is to simply "prefix" the index with the location. Making sure to drop previous indexes or the collection first:
LocationSchema.index({ loc: "2dsphere", category: 1 },{ "background": false });
As well as you probably should be in the habit of setting "background": true, else you start running into race conditions on unit tests where the index has not finished being created before unit test code attempts to use it.
My first solution to this problem was to create the index via the mLab web-interface which worked like a charm.
I have tried the solution suggested by Neil, but that still fails. The detailed instructions related to indexes, given by Neil however, did point me toward the solution to the problem.
It was a timing problem (which you not always see if you run the database locally) related to that my test code did the following:
Created the index, created a Location document (which first time will also create the collection), and then in the callback provided by save, I tried to find the user. It seems that the index was not yet created here, which is what gave the error.
If I delay the find method a second, using setTimeout it works fine.
But still, thanks to Neil for valuable information about the right way of using indexes (background) :-)
Related
I have a unique index like this
code: {
type: String,
index: {
unique: true,
partialFilterExpression: {
code: { $type: 'string' }
}
},
default: null
},
state: { type: Number, default: 0 },
but When the state is 2 (archived) I want to keep the code, but it should be able to reuse the code, so it cannot be unique if state is 2.
Is there any away that I could accomplish this?
This is possible, though it's through a work around documented here https://jira.mongodb.org/browse/SERVER-25023.
In MongoDB 4.7 you will be able to apply different index options to the same field but for now you can add a non-existent field to separate the two indexes.
Here's an example using the work around.
(async () => {
const ItemSchema = mongoose.Schema({
code: {
type: String,
default: null
},
state: {
type: Number,
default: 0,
},
});
// Define a unique index for active items
ItemSchema.index({code: 1}, {
name: 'code_1_unique',
partialFilterExpression: {
$and: [
{code: {$type: 'string'}},
{state: {$eq: 0}}
]
},
unique: true
})
// Defined a non-unique index for non-active items
ItemSchema.index({code: 1, nonExistantField: 1}, {
name: 'code_1_nonunique',
partialFilterExpression: {
$and: [
{code: {$type: 'string'}},
{state: {$eq: 2}}
]
},
})
const Item = mongoose.model('Item', ItemSchema)
await mongoose.connect('mongodb://localhost:27017/so-unique-compound-indexes')
// Drop the collection for test to run correctly
await Item.deleteMany({})
// Successfully create an item
console.log('\nCreating a unique item')
const itemA = await Item.create({code: 'abc'});
// Throws error when trying to create with the same code
await Item.create({code: 'abc'})
.catch(err => {console.log('\nThrowing a duplicate error when creating with the same code')})
// Change the active code
console.log('\nChanging item state to 2')
itemA.state = 2;
await itemA.save();
// Successfully created a new doc with sama code
await Item.create({code: 'abc'})
.then(() => console.log('\nSuccessfully created a new doc with sama code'))
.catch(() => console.log('\nThrowing a duplicate error'));
// Throws error when trying to create with the same code
Item.create({code: 'abc'})
.catch(err => {console.log('\nThrowing a duplicate error when creating with the same code again')})
})();
This is not possible with using indexes. Even if you use a compound index for code and state there will still be a case where
new document
{
code: 'abc',
state: 0
}
archived document
{
code: 'abc',
state: 2
}
Now although you have the same code you will not be able to archive the new document or unarchive the archived document.
You can do something like this
const checkCode = await this.Model.findOne({code:'abc', active:0})
if(checkCode){
throw new Error('Code has to be unique')
}
else{
.....do something
}
I have a problem and I can´t find a solution. I have some MongoSchemas where I store Geolocation from users. Mobile Phone is sending me longitude and latitude every 5 minutes. This API is working perfectly.
Mongo-Schema looks like:
// Importing Node packages required for schema
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
//= ===============================
// User Schema
//= ===============================
const GeolocationSchema = new Schema({
loc: {
type: { type: String },
coordinates: { type: [Number], index: '2dsphere' }
},
user: { type: Schema.Types.ObjectId, ref: 'User' }
},
{
timestamps: true
});
module.exports = mongoose.model('Geolocation', GeolocationSchema);
Now, I want to calculate users-nearby which have an "updateAt"-timestamp not even longer than 5 minutes in the past. That means that one or more users can be in a distance of e.g. 500m until 5 minutes in the past. This should be a match. For this I use Mongo aggregate, and I want to iterate the callback-result and extract the user._id out of the result to build a match.
This is what I tried:
const Geolocation = require('../models/geolocation')
User = require('../models/user'),
config = require('../config/main');
exports.setGeolocation = function (req, res, next) {
// Only return one message from each conversation to display as snippet
console.log(req.user._id);
var geoUpdate = Geolocation.findOneAndUpdate( { user: req.user._id },
{ loc: {
type: 'Point',
coordinates: req.body.coordinates.split(',').map(Number)
},
user: req.user._id
},
{upsert: true, new: true, runValidators: true}, // options
function (err, doc) { // callback
if (err) {
console.log(err);
}
});
// create dates for aggregate query
var toDate = new Date( (new Date()).getTime());
var fromDate = new Date( (new Date()).getTime() - 5000 * 60 );
var match = Geolocation.aggregate([
{
$geoNear: {
near: {
type: "Point",
coordinates: req.body.coordinates.split(',').map(Number)
},
distanceField: "dist.calculated",
maxDistance: 500,
includeLocs: "dist.location",
uniqueDocs: true,
query: { user: {$ne: req.user._id } , updatedAt: { $gte: fromDate,$lte: toDate }},
num: 5,
spherical: true
}
}], function (err, doc){
//here I´m going in trouble correctly parsing doc
var str = JSON.stringify(doc);
var newString = str.substring(1, str.length-1);
var response = JSON.parse(newString);
console.log(response.user);
});
res.sendStatus(200);
};
As you can see I´m going in trouble in parsing the "doc"-callback to iterate over the documents. If I want to parse it as jSON I´m getting an token-error on position 1. If I have more than 2 results, I´m getting an error on position 288.
That´s why I tried to parse and stringify the "doc". But this is not working correctly.
Maybe, someone could help me with a solution. I´m not familiar with mongo-functions because I´m starting with it, maybe there is a better solution but I can´t find something else to calculate geoNear and iterate afterwards over the results.
Thx at all who can help...
I have two Schema defined as below:
var WorksnapsTimeEntry = BaseSchema.extend({
student: {
type: Schema.ObjectId,
ref: 'Student'
},
timeEntries: {
type: Object
}
});
var StudentSchema = BaseSchema.extend({
firstName: {
type: String,
trim: true,
default: ''
// validate: [validateLocalStrategyProperty, 'Please fill in your first name']
},
lastName: {
type: String,
trim: true,
default: ''
// validate: [validateLocalStrategyProperty, 'Please fill in your last name']
},
displayName: {
type: String,
trim: true
},
municipality: {
type: String
}
});
And I would like to loop thru each student and show it's time entries. So far I have this code which is obviously not right as I still dont know how do I join WorksnapTimeEntry schema table.
Student.find({ status: 'student' })
.populate('student')
.exec(function (err, students) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
}
_.forEach(students, function (student) {
// show student with his time entries....
});
res.json(students);
});
Any one knows how do I achieve such thing?
As of version 3.2, you can use $lookup in aggregation pipeline to perform left outer join.
Student.aggregate([{
$lookup: {
from: "worksnapsTimeEntries", // collection name in db
localField: "_id",
foreignField: "student",
as: "worksnapsTimeEntries"
}
}]).exec(function(err, students) {
// students contain WorksnapsTimeEntries
});
You don't want .populate() here but instead you want two queries, where the first matches the Student objects to get the _id values, and the second will use $in to match the respective WorksnapsTimeEntry items for those "students".
Using async.waterfall just to avoid some indentation creep:
async.waterfall(
[
function(callback) {
Student.find({ "status": "student" },{ "_id": 1 },callback);
},
function(students,callback) {
WorksnapsTimeEntry.find({
"student": { "$in": students.map(function(el) {
return el._id
})
},callback);
}
],
function(err,results) {
if (err) {
// do something
} else {
// results are the matching entries
}
}
)
If you really must, then you can .populate("student") on the second query to get populated items from the other table.
The reverse case is to query on WorksnapsTimeEntry and return "everything", then filter out any null results from .populate() with a "match" query option:
WorksnapsTimeEntry.find().populate({
"path": "student",
"match": { "status": "student" }
}).exec(function(err,entries) {
// Now client side filter un-matched results
entries = entries.filter(function(entry) {
return entry.student != null;
});
// Anything not populated by the query condition is now removed
});
So that is not a desirable action, since the "database" is not filtering what is likely the bulk of results.
Unless you have a good reason not to do so, then you probably "should" be "embedding" the data instead. That way the properties like "status" are already available on the collection and additional queries are not required.
If you are using a NoSQL solution like MongoDB you should be embracing it's concepts, rather than sticking to relational design principles. If you are consistently modelling relationally, then you might as well use a relational database, since you won't be getting any benefit from the solution that has other ways to handle that.
It is late but will help many developers.
Verified with
"mongodb": "^3.6.2",
"mongoose": "^5.10.8",
Join two collections in mongoose
ProductModel.find({} , (err,records)=>{
if(records)
//reurn records
else
// throw new Error('xyz')
})
.populate('category','name') //select only category name joined collection
//.populate('category') // Select all detail
.skip(0).limit(20)
//.sort(createdAt : '-1')
.exec()
ProductModel Schema
const CustomSchema = new Schema({
category:{
type: Schema.ObjectId,
ref: 'Category'
},
...
}, {timestamps:true}, {collection: 'products'});
module.exports = model('Product',CustomSchema)
Category model schema
const CustomSchema = new Schema({
name: { type: String, required:true },
...
}, {collection: 'categories'});
module.exports = model('Category',CustomSchema)
I'm trying to update a document which has a specific ID with the current date/time but the below code is not resulting in the DB getting updated and no errors. Any help would be great, thanks.
Schema:
var MerchantShema = new Schema({
merchant_id: Number,
merchant_aw_id: Number,
merchant_name: String,
merchant_url: String,
merchant_image: String,
product_feed: String,
product_feed_updated: Date,
created_at: {type: Date, default: Date.now},
updated_at: {type: Date, default: Date.now}
});
Update Query:
updateMerchantLastProductUpdate: function (mID) {
now = new Date();
var query = { "merchant_aw_id" : mID };
Merchants.update(query, { "product_feed_updated": now }, function (err) {
if (err) return console.error(err);
})
}
Route
app.get('/queries', function (req, res) {
queries.updateMerchantLastProductUpdate("2926");
});
Example document
{
"_id": {
"$oid": "55997638e4b01f0391cb99aa"
},
"merchant_id": "0003",
"merchant_aw_id": "2926",
"merchant_name": "Multipower",
"merchant_url": "www.multipower.com/uk/",
"merchant_image": "",
"product_feed": "aw",
"product_feed_updated": "",
"created_at": "",
"updated_at": ""
}
The merchant_aw_id field in your mongoose schema is expecting a number so you need to parse the string for integer by using the parseInt() method in your query. You also need the $set update operator which replaces the value of a field with the specified value to update your document, together with the {multi: true} option which if set to true, updates multiple documents that meet the query criteria. If set to false, updates one document. The default value is false:
updateMerchantLastProductUpdate: function (mID) {
var now = new Date(),
query = { "merchant_aw_id" : parseInt(mID) },
update = {
"$set": { "product_feed_updated": now }
},
options = { "multi": true };
Merchants.update(query, update, options, function (err) {
if (err) return console.error(err);
})
}
My error was caused by my model have the ID I was looking for in the format Number but my data in mongoDB was a String
I am using mongoose and so far the query I use gets me all of the critiques based on a docId. I would like to group this result by distinct editors now. Except, my editors is an object.
This is what my critique query looks like:
Critique.find({docId:req.params.docId}).populate('editor', 'name username').exec(function(err, critiques){
if(err){
console.error("Cannot find critiques with docId: " + critiques.docId);
}
console.log(critiques);
res.jsonp(critiques);
});
This is my model I am querying:
var CritiqueSchema = new Schema({
className : String,
content: String,
eleId: Number,
type: String,
comments: String,
isAccepted: Boolean,
classes: String,
docId:{
type: Schema.ObjectId,
ref: 'Composition'
},
created: {
type: Date,
default: Date.now
},
editor: {
type: Schema.ObjectId,
ref: 'User'
},
});
UPDATE new query:
Critique.aggregate(
[ {$match : {docId : mongoose.Types.ObjectId(req.params.docId)}},
{$group : { _id : "$editor", critiques: { $push: "$$ROOT" } } }
]).exec(function(error, result){
if(!error)console.log(result);
else console.log(error);
});
What you need is $group in the aggregation framework. But aggregation and population don't go along. So you have two options populate and group the results by yourself by writing a loop or you can use $group to group them and then query each editor manually. The second is better as there will no duplication in editor queries whereas in population there will be significant duplication going on.
Critique.aggregate(
[{
$match:
{
docId: ObjectId(req.params.docid)
}
},
{ $group : { _id : "$editor", critiques: { $push: "$$ROOT" } } }
],
function(err,result){
if(!err){
/* result will be of the form:
[{_id:<an editor's objectid>,critiques:[{<critique1 document>},{<critique2 document>}...]}...]
*/
//you will have to manually query for each distinct editor(result[i]._id) which sucks
//because the call will be asynchronous in the loop and you can't send your response without using async library
//another option would be to use the $in operator on an array of the distinct critiques:
var editors = result.map(function(x) { return x._id } );
User.find({_id:{$in:editors}},{'username':1},function(err,editorDocs){
var editor_ids=editorDocs.map(function(x){return x._id})
var index;
for(var i=0;i<result.length;i++){
index=editor_ids.indexOf(result[i]._id);
result[i].editor=editorDocs[index].username;
}
//result is your final result. In the editor field of each object you will have the username of the editor
})
}
})
Check the docs for $$ROOT.