Here is one document from my mongoose collection:
{ _id: 55ae7be99e772a7c025a0a7b,
id: 'foo',
isBoolean: true,
kind: 'bar',
values:
[
{ y: 0,
x: Wed Aug 26 2015 11:12:56 GMT+0200 (Mitteleuropäische Sommerzeit),
_id: 55ae7ae05596dd740eb8a204 },
{ y: 0,
x: Wed Aug 26 2015 11:12:57 GMT+0200 (Mitteleuropäische Sommerzeit),
_id: 55ae7ae05596dd740eb8a203 },
{ y: 1,
x: Wed Aug 26 2015 11:12:56 GMT+0200 (Mitteleuropäische Sommerzeit);
_id: 55ae7be91fa1511c1795c5ae }
]
}
So, I need to find all documents, that have specific value.x. After that I need to return that document with all fields and found value elements.
Wenn I try it with
.find({'values.x': mTime1})
.select({
'_id' : 1 ,
'id' : 1 ,
'kind' : 1 ,
'isBoolean' : 1 ,
'values' : {$elemMatch: {x: time1}}
})
I receive just the firsTt found value:
{ ...
values:
[ { exceeds: null,
y: 0,
x: Wed Aug 26 2015 11:12:56 GMT+0200 (Mitteleuropäische Sommerzeit),
_id: 55ae7d86870b92b8056bed4c } ]
}
Next version
.aggregate({"$unwind" : "$values"}, {"$match" : {"values.x": time1}},
{"$group" : {
'_id' : '$_id',
'values' : {$addToSet: "$values"}
});
returns all matched values except other fields...
My goal is:
{ _id: 55ae7be99e772a7c025a0a7b,
id: 'foo',
isBoolean: true,
kind: 'bar',
values:
[
{ y: 0,
x: Wed Aug 26 2015 11:12:56 GMT+0200 (Mitteleuropäische Sommerzeit),
_id: 55ae7ae05596dd740eb8a204 },
{ y: 1,
x: Wed Aug 26 2015 11:12:56 GMT+0200 (Mitteleuropäische Sommerzeit);
_id: 55ae7be91fa1511c1795c5ae }
]
}
Have you any idea, how to achieve that with mongoose?? :)
Update:
Thanks to tsturzl, I solved it with next function (without changing the model):
self.aggregate(
{'$unwind' : '$values'},
{'$match' : { 'values.x': mTime1} },
{'$group' : {
'_id' : '$_id',
'values' : {$push: '$values'}
}}
)
.exec(
function(err, results) {
if(err) return done(err);
var values = {}; // hashMap to group values
results.forEach(function(item) {
if(item.values) // prevent empty results
values[item._id] = item.values;
});
self.find({_id:{$in: _.keys(values)}})
.exec(function(err, items) {
if(err) return done(err);
var results = items.map(function(item) {
item.values = values[item._id];
return item;
});
done(err, results); // callback results
});
});
The problem with using elemMatch in a projection is that it accesses a single item. Similar to an array arr[1], elemMatch fetches the index of the item in the array and then projects that array item at that index. So you can only retrieve one sub-document using this method.
You can use an aggregation similar to this
[
{$match: {'values.x': mTime1}}, //match before to reduce size of unwound
{$unwind: '$values'},
{$match: {'values.x': mTime1}},
{$group: {
_id: '$_id',
id: {$first: '$id'},
kind: {$first: '$kind'},
isBoolean: {$first: '$isBoolean'},
values: {$push: '$values'}
}
]
I've tested this to work fine locally on an array of subdocuments.
It's possible that your approach is best suited to being restructured. You should remodel your data so that your values have their own collection and reference by _id. In this cause I would store the reference in the values collection.
Remove values field from this collection
{ _id: 55ae7be99e772a7c025a0a7b,
id: 'foo',
isBoolean: true,
kind: 'bar'
}
Values Model:
{
y: Number,
x: Date,
parentId: {type: ObjectId, ref: "myColl"} //make sure you require ObjectId and rename the reference
}
You can then do something like this
ValuesModel.find({
x: mTime1
}).exec(function(err, results) {
var ids = {}; //hashMap to group values
var idsArr = []; //array of ids
results.forEach(function(item) {
if(!ids.hasOwnProperty(items.parentId.toString())) {
ids[items.parentId.toString()] = [];
idArr.push(item.parentId);
}
ids[items._id.toString()].push(item);
});
myColl.find({_id:{$in: idsArr}})
.exec(function(err, items) {
var results = items.map(function(item) {
item.values = ids[item._id.toString()];
return item;
});
done(results); //callback results
});
});
This will grab all values that you queried for, then group them in a hashMap and push all the parentIds to an array. Then I query for that array of parentIds. I take the hashMap, reference it by the id in the hashMap and create a new field for .values in the parent document. This will prevent you from having to use aggregations, which aren't as scalable, and will allow you to easily query the values table. If you want to find only one value you can simply use the mongoose populate method. The downfall to this approach is that you need to do more work in your code, and you have 2 round trips. However, this should still be more efficient than an aggregation.
This can be used to create a reusable method to simplify your code if you query into values a lot
function queryValues(query, done) {
ValuesModel.find(query).exec(function(err, results) {
if(err) return done(err);
var ids = {}; //hashMap to group values
var idsArr = []; //array of ids
results.forEach(function(item) {
if(!ids.hasOwnProperty(items.parentId.toString())) {
ids[items.parentId.toString()] = [];
idArr.push(item.parentId);
}
ids[items._id.toString()].push(item);
});
myColl.find({_id:{$in: idsArr}})
.exec(function(err, items) {
if(err) return done(err);
var results = items.map(function(item) {
item.values = ids[item._id.toString()];
return item;
});
done(null, results); //callback results
});
});
}
Then you can just call queryValues({x: mTime1}, function(err, results){...});, and you can pass any query you want and the function will handle populating the parent document without fetching duplicate data for maximum efficiency.
One thing I might also recommend is that you define this method as a schema static method in your model definition so you can tuck this code away and never have to worry. See: http://mongoosejs.com/docs/api.html#schema_Schema-static
Related
I am having a collection of documents called 'company'.
company 1 -
{
_id: '1',
data:[
{_id:'11', value: 'emp11'},
{_id:'12', value: 'emp12'}
]
}
company 2-
{
_id: '2',
data:[
{_id:'21', value: 'emp21'},
{_id:'22', value: 'emp22'}
]
}
Now I want to update value 'emp11' to 'emp99'.
I'm following this approach-
companyModel.findById('1', function(err, company) {
return company.data.update(
{_id: '11'},
{$set: {value: 'emp99'}}
);
});
I'm able to get the company but after that it's showing an error-
company.data.update is not a function
Please suggest a possible solution.
companyModel.update(
{ "_id" : :"1", "data._id": "11" },
{ "$set": { "data.$.value": "emp99" }},
function(err, company) {
console.log(company)
})
There is no need to use findById, you can use this directly.
In mongo update you need to search for both the document and the field you want to update.
The field you want to update is essential as it is used to locate the position of the array when you use $.
However you can do it as following also :
companyModel.findById('1', function(err, company) {
for(var i =0; i< company.data.length; i++)
if(company.data._id === "11")
break;
if(i<company.data.length)
company.data[i].value = 'emp99'
company.save()
});
db.company.update({
_id: "1",
data: {
$elemMatch: {
value: 'emp11'
}
}
}, {
$set: {
'data.$.value': 'emp99'
}
})
I am trying to have my API take an id as input and return results from mongoDB according to the id given.
My example collection looks like this:
id: 1 {
count: 5
}
id: 2 {
count: 10
}
My mongoose Schemas looks like this:
var tripSchema = new Schema({
_id: Number,
count: Number
},
{collection: 'test'}
);
And I created another file for this route, where I think the error lies in:
module.exports = function(app) {
app.get('/trips/:id', function(req,res) {
console.log(req.params.id); // Does print the ID correctly
var aggr = Trip.aggregate([
{ "$match": {
"_id": {
"$eq": req.params.id
}
}
},
{
"$project": {
"_id" : 1,
"count": "$count"
}
}
])
aggr.options = { allowDiskUse: true };
aggr.exec(function(err, stations){
if(err)
res.send(err);
res.json(stations);
});
});
}
Now using postman I try to GET /trips/72, but this results in an empty array [], there is an entry in the DB for _id 72 with a corresponding count just like above. My question is if this is the correct approach and what I am doing wrong here.
--Update:
There seems to be something wrong with either the match stage or the whole aggregation. I opted for mongoose's findById, and with this it works now:
Trip.findById(req.params.id, function (err, doc){
res.json(doc);
});
req.params.id returns your id in String form, while I think in aggregate match section you need to pass it as ObjectId. So, you should convert it to ObjectId:
$match: { _id: ObjectId(req.params.id) }
I'm trying to tally a field in a sub-array of a collection and I want to do it for each month. I had this working in Mongo 2.6 but recently upgrading to 3.0.12 has cause some erroneous results in the query. It almost seems like the sum is not getting reset for the several queries.
So currently I am doing twelve queries async and waiting for them all to complete. Again this was working in 2.6. My table structure is like this:
{
"_id" : ObjectId("<id>"),
"userId" : ObjectId("<id>"),
"accountId" : "1234567890",
"transactionId" : "22222222333",
"date" : ISODate("2016-09-08T04:00:00.000Z"),
"description" : "SUPERCOOL STORE",
"amount" : -123.45,
"allocations" : [
{
"jarId" : ObjectId("566faf1937af9ae11ef84bc4"),
"amount" : -100
},
{
"jarId" : ObjectId("566faf1937af9ae11ef84bc1"),
"amount" : -23.45
}
],
"reconciled" : true,
"tally" : true,
"split" : true
}
And this is my code:
var getTransactionAggregate = function (userId, month) {
var deferred = q.defer();
var nextMonth = moment(month).add(1, 'M');
Transactions.aggregate([
{$match: {
userId: userId,
tally: true,
date: {
$gte: month.toDate(),
$lt: nextMonth.toDate()
}
}},
{$project: { _id: 0, allocations: 1 } },
{$unwind: '$allocations'},
{$group: {_id: '$allocations.jarId', total: {$sum: '$allocations.amount'}}}
]).then(function(data) {
deferred.resolve(data);
})
.catch(function (err) {
logger.error(err);
deferred.reject(err);
});
return deferred.promise;
};
summaryReport = function (req, res) {
Jars.aggregate([
{ $match: {userId: new ObjectID(req.user._id)} },
{ $sort: {name: 1} }
])
.then(function (jars) {
var month = moment(moment().format('YYYY-MM') + '-01T00:00:00.000');
var i, j;
var promises = [];
for (i = 0; i < 12; i++) {
promises.push(getTransactionAggregate(new ObjectID(req.user._id), month));
month.add(-1, 'M');
}
q.allSettled(promises).then(function (data) {
for (i = 0; i < data.length; i++) {
// data[i].value here is returned incorrectly from the queries
........
});
});
};
So essentially what is happening is the first month includes the correct data but it appears that the sum continues to include data from all the previous months. If I break down the query the correct transactions are returned in the date range, and the unwind is working as well. Just when the groupBy step seems to be the culprit. The same logic worked fine before I upgraded Mongo to 3.0.12.
Is there a better way to execute this query in one shot or is doing the twelve queries the best way?
It seems to be a problem during the $match phase. Your date field has two expressions, and this scenario you need to use the $and operator, as specified in the docs:
MongoDB provides an implicit AND operation when specifying a comma
separated list of expressions. Using an explicit AND with the $and
operator is necessary when the same field or operator has to be
specified in multiple expressions.
So it becomes:
{$match: {
userId: userId,
tally: true,
$and: [
{ date: { $gte : month.toDate() } },
{ date: { $lt: nextMonth.toDate() } }
]
}}
It ended up being related to the match although not because of the $and case mentioned in the above answer. It had to do with the date matching, I'm guessing the moment object.toDate() does not return the same date object as when you use new Date(), although I thought they were the same.
Anyway the working logic looks like this:
Transactions.aggregate([
{$match: {
userId: userId,
tally: true,
$and: [
{ date: { $gt : new Date(month.toISOString()) } },
{ date: { $lt: new Date(nextMonth.toISOString()) } }
]
}},
{$unwind: '$allocations'},
{$group: {_id: '$allocations.jarId', total: {$sum: '$allocations.amount'}}}
])
Credit to Date query with ISODate in mongodb doesn't seem to work and #Leif for pointing me in the right direction.
Here is a schema that I am working on.
var testSchema = mongoose.Schema({
userCreated : {
type : mongoose.Schema.Types.ObjectId,
ref : "User"
},
points : {type: Number, default: 0},
numVotes : {type: Number, default: 0},
createdAt : Date,
updatedAt : Date,
}, { timestamps : true });
Now, I am trying to write a function that will increment two fields on this document (points and numVotes, as well as an additional points field that exists on the user schema.
Here is my attempt.
testSchema.statics.incrementTest = function(id, ...) {
this.findByIdAndUpdate(id, {$inc : {
points : 5,
numVotes : 1,
'userCreated.points' : 5
}}).exec();
}
Now, this code that I have written does not work. However, when I comment out the 'userCreated.points' : 5 line, the other two fields do increment as expected. My question is, what is the best way using mongoose to update the fields on a document and the fields on a subdocument at the same time?
The data here is contained in different collections, so no single update statement is able to increment counters in both at the same time.
In order to get a consistent view you are going to need to "chain" your update statements and use the return results of each to build the response.
Depending on your needs you can either use a Promise with this:
testSchema.statics.incrementTest = function(id) {
var self = this;
return new Promise(function(resolve,reject) {
self.findByIdAndUpdate(
id,
{
"$inc": {
"points": 5,
"numVotes": 1
}
},
{ "new": true }
).then(function(test) {
var userModel = test.schema.path("userCreated").options.ref;
mongoose.model(userModel).findByIdAndUpdate(
test.userCreated,
{ "$inc": { "points": 5 } },
{ "new": true }
).then(function(user) {
test.userCreated = user;
resolve(test);
})
}).catch(reject)
})
};
Which you can then invoke on your model:
Test.incrementTest("56fe279d363ce91765d9e39e").then(function(test) {
console.log(JSON.stringify(test,undefined,2));
}).catch(function(err) {
throw err;
})
Or you can use async.waterfall from the async library if that suits you better:
testSchema.statics.incrementTest = function(id,callback) {
var self = this;
async.waterfall(
[
function(callback) {
self.findByIdAndUpdate(
id,
{
"$inc": {
"points": 5,
"numVotes": 1
}
},
{ "new": true },
callback
)
},
function(err,test) {
if (err) callback(err);
var userModel = test.schema.path("userCreated").options.ref;
mongoose.model(userModel).findByIdAndUpdate(
test.userCreated,
{ "$inc": { "points": 5 } },
{ "new": true },
function(err,user) {
if ( typeof(user) !== "undefined" )
test.userCreated = user;
callback(err,test);
}
);
}
],
callback
);
};
Which has a similar usage:
Test.incrementTest("56fe279d363ce91765d9e39e",function(err,test) {
if (err) throw err;
console.log(JSON.stringify(test,undefined,2));
})
Both should be giving you a result back that shows the incremented data in both objects for both collections:
{ points: 5,
numVotes: 1,
__v: 0,
userCreated: { points: 5, __v: 0, _id: 56ff1aa6dba6d13e798fc894 },
createdAt: Sat Apr 02 2016 12:04:38 GMT+1100 (AEDT),
updatedAt: Sat Apr 02 2016 12:04:38 GMT+1100 (AEDT),
_id: 56fe279d363ce91765d9e39e }
Is there a way to update values in an object?
{
_id: 1,
name: 'John Smith',
items: [{
id: 1,
name: 'item 1',
value: 'one'
},{
id: 2,
name: 'item 2',
value: 'two'
}]
}
Lets say I want to update the name and value items for item where id = 2;
I have tried the following w/ mongoose:
var update = {name: 'updated item2', value: 'two updated'};
Person.update({'items.id': 2}, {'$set': {'items.$': update}}, function(err) { ...
Problem with this approach is that it updates/sets the entire object, therefore in this case I lose the id field.
Is there a better way in mongoose to set certain values in an array but leave other values alone?
I have also queried for just the Person:
Person.find({...}, function(err, person) {
person.items ..... // I might be able to search through all the items here and find item with id 2 then update the values I want and call person.save().
});
You're close; you should use dot notation in your use of the $ update operator to do that:
Person.update({'items.id': 2}, {'$set': {
'items.$.name': 'updated item2',
'items.$.value': 'two updated'
}}, function(err) { ...
model.update(
{ _id: 1, "items.id": "2" },
{
$set: {
"items.$.name": "yourValue",
"items.$.value": "yourvalue",
}
}
)
MongoDB Document
There is a mongoose way for doing it.
const itemId = 2;
const query = {
item._id: itemId
};
Person.findOne(query).then(doc => {
item = doc.items.id(itemId );
item["name"] = "new name";
item["value"] = "new value";
doc.save();
//sent respnse to client
}).catch(err => {
console.log('Oh! Dark')
});
There is one thing to remember, when you are searching the object in array on the basis of more than one condition then use $elemMatch
Person.update(
{
_id: 5,
grades: { $elemMatch: { grade: { $lte: 90 }, mean: { $gt: 80 } } }
},
{ $set: { "grades.$.std" : 6 } }
)
here is the docs
For each document, the update operator $set can set multiple values, so rather than replacing the entire object in the items array, you can set the name and value fields of the object individually.
{'$set': {'items.$.name': update.name , 'items.$.value': update.value}}
Below is an example of how to update the value in the array of objects more dynamically.
Person.findOneAndUpdate({_id: id},
{
"$set": {[`items.$[outer].${propertyName}`]: value}
},
{
"arrayFilters": [{ "outer.id": itemId }]
},
function(err, response) {
...
})
Note that by doing it that way, you would be able to update even deeper levels of the nested array by adding additional arrayFilters and positional operator like so:
"$set": {[`items.$[outer].innerItems.$[inner].${propertyName}`]: value}
"arrayFilters":[{ "outer.id": itemId },{ "inner.id": innerItemId }]
More usage can be found in the official docs.
cleaner solution using findOneAndUpdate
await Person.findOneAndUpdate(
{ _id: id, 'items.id': 2 },
{
$set: {
'items.$.name': 'updated item2',
'items.$.value': 'two updated',
}
},
);
In Mongoose, we can update array value using $set inside dot(.) notation to specific value in following way
db.collection.update({"_id": args._id, "viewData._id": widgetId}, {$set: {"viewData.$.widgetData": widgetDoc.widgetData}})
Having tried other solutions which worked fine, but the pitfall of their answers is that only fields already existing would update adding upsert to it would do nothing, so I came up with this.
Person.update({'items.id': 2}, {$set: {
'items': { "item1", "item2", "item3", "item4" } }, {upsert:
true })
I had similar issues. Here is the cleanest way to do it.
const personQuery = {
_id: 1
}
const itemID = 2;
Person.findOne(personQuery).then(item => {
const audioIndex = item.items.map(item => item.id).indexOf(itemID);
item.items[audioIndex].name = 'Name value';
item.save();
});
Found this solution using dot-object and it helped me.
import dot from "dot-object";
const user = await User.findByIdAndUpdate(id, { ...dot.dot(req.body) });
I needed to update an array element with dynamic key-value pairs.
By mapping the update object to new keys containing the $ update operator, I am no longer bound to know the updated keys of the array element and instead assemble a new update object on the fly.
update = {
name: "Andy",
newKey: "new value"
}
new_update = Object.fromEntries(
Object.entries(update).map(
([k, v], i) => ["my_array.$." + k, v]
)
)
console.log({
"$set": new_update
})
In mongoose we can update, like simple array
user.updateInfoByIndex(0,"test")
User.methods.updateInfoByIndex = function(index, info) ={
this.arrayField[index]=info
this.save()
}
update(
{_id: 1, 'items.id': 2},
{'$set': {'items.$[]': update}},
{new: true})
Here is the doc about $[]: https://docs.mongodb.com/manual/reference/operator/update/positional-all/#up.S[]