aggregate in Mongodb driver(npm) result is different from robo3T result - node.js

its my aggregate in robo3t and node app
db.getCollection('roles').aggregate([
{$match:{"_id" : ObjectId("63c8ad93eb3b2d975834a6d0")}},
{
$lookup: {
from: 'modules',
localField: 'moduleAccess.moduleId',
foreignField: '_id',
as: 'moduleData'
}
},
{
$unwind:'$moduleData'
}
]).toArray()
my result in robo3t and nodejs is :
robo3t :
/* 1 */
[
{
"_id" : ObjectId("63c8ad93eb3b2d975834a6d0"),
"roleName" : "SuperUser",
"activeStatus" : "Active",
"moduleAccess" : [
{
"moduleId" : ObjectId("63ca80563eea1005982f82d4"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ca80a13eea1005982f82f8"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ce1de83eea1005982f8f4f"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
}
],
"moduleData" : {
"_id" : ObjectId("63ca80563eea1005982f82d4"),
"displayName" : "Module List",
"moduleName" : "moduleList",
"moduleIcon" : "fa fa-circle",
"moduleLevel" : 1,
"moduleParent" : ObjectId("63ca80a13eea1005982f82f8"),
"order" : 0,
"Active" : "Active"
}
},
{
"_id" : ObjectId("63c8ad93eb3b2d975834a6d0"),
"roleName" : "SuperUser",
"activeStatus" : "Active",
"moduleAccess" : [
{
"moduleId" : ObjectId("63ca80563eea1005982f82d4"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ca80a13eea1005982f82f8"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ce1de83eea1005982f8f4f"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
}
],
"moduleData" : {
"_id" : ObjectId("63ca80a13eea1005982f82f8"),
"displayName" : "Master App",
"moduleName" : "masterApp",
"moduleIcon" : "fa fa-circle",
"moduleLevel" : 0,
"moduleParent" : "",
"order" : 0,
"Active" : "Active"
}
},
{
"_id" : ObjectId("63c8ad93eb3b2d975834a6d0"),
"roleName" : "SuperUser",
"activeStatus" : "Active",
"moduleAccess" : [
{
"moduleId" : ObjectId("63ca80563eea1005982f82d4"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ca80a13eea1005982f82f8"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
},
{
"moduleId" : ObjectId("63ce1de83eea1005982f8f4f"),
"view" : 1,
"create" : 1,
"update" : 1,
"delete" : 1
}
],
"moduleData" : {
"_id" : ObjectId("63ce1de83eea1005982f8f4f"),
"displayName" : "Role",
"moduleName" : "role",
"moduleIcon" : "fa fa-circle",
"moduleLevel" : 1,
"moduleParent" : ObjectId("63ca80a13eea1005982f82f8"),
"order" : 1,
"Active" : "Active"
}
}
]
in robo3t result is answer as expected. its correct answer
nodejs result :
{
_id: new ObjectId("63c8ad93eb3b2d975834a6d0"),
roleName: 'SuperUser',
activeStatus: 'Active',
moduleAccess: [
{
moduleId: new ObjectId("63ca80563eea1005982f82d4"),
view: 1,
create: 1,
update: 1,
delete: 1
},
{
moduleId: new ObjectId("63ca80a13eea1005982f82f8"),
view: 1,
create: 1,
update: 1,
delete: 1
},
{
moduleId: new ObjectId("63ce1de83eea1005982f8f4f"),
view: 1,
create: 1,
update: 1,
delete: 1
}
],
moduleData: {
_id: new ObjectId("63ca80563eea1005982f82d4"),
displayName: 'Module List',
moduleName: 'moduleList',
moduleIcon: 'fa fa-circle',
moduleLevel: 1,
moduleParent: new ObjectId("63ca80a13eea1005982f82f8"),
order: 0,
Active: 'Active'
}
}
in nodejs only unwidn 1 data
what i do wrong with my config?
mongodb(npm) : 4.13.0
mongodb version : 5.0.7
node version : 16.14.2
please help to configuration my app the output from node js can be the same as robo3t

Related

High load on simple Mongo find with indices

I have a mongoDB I'm querying using NodeJS (running mongoose).
In this particular case I'm querying a bunch of collections and pipe the data as CSV into archiverjs to create a zip file. So I have an incoming request, the data gets queried using mongoose and a mongo cursor, piped into a pipeline which will end in archiverjs respectively the http response delivering the zip file to the user.
async function getSortedQueryCursor(...) {
...
const query = MODEL_LOOKUP[fileType]
.find(reducer)
.sort({ [idString]: 'asc' });
return query.cursor();
}
async function getData(...) {
const cursor = await getSortedQueryCursor(...);
return cursor
.pipe(filter1Stream)
.pipe(filter2Stream)
.pipe(filter3Stream)
.pipe(csvStringifyStream);
}
router.post('/:scenarioId', async (request, response) => {
...
const archive = Archiver(...);
archive.pipe(response);
const result = await getData(...);
archive.append(stream, { name: filepath });
return archive.finalize();
}
As soon as a particular collection is in the game (the collection holds roughly 40 million documents) the query lasts very long (>15s) and I can see the mongo process on 100% CPU during that time. Even more surprising as the result set is empty (no documents matching the query).
It's a rather simple query:
items.find({ scenarioId: 'ckqf5ulg38gu208eecxlf95fc' }, { sort: { dataId: 1 }
I have indices on scenarioId and dataId. If I run the query on the shell it returns in 30ms.
An explain() results in:
[
{
"queryPlanner": {
"plannerVersion": 1,
"namespace": "data.items",
"indexFilterSet": false,
"parsedQuery": {
"scenarioId": {
"$eq": "ckqf5ulg38gu208eecxlf95fc"
}
},
"winningPlan": {
"stage": "SORT",
"sortPattern": {
"itemId": 1
},
"memLimit": 104857600,
"type": "simple",
"inputStage": {
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"scenarioId": 1
},
"indexName": "scenarioId_1",
"isMultiKey": false,
"multiKeyPaths": {
"scenarioId": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"scenarioId": [
"[\"ckqf5ulg38gu208eecxlf95fc\", \"ckqf5ulg38gu208eecxlf95fc\"]"
]
}
}
}
},
"rejectedPlans": [
...
]
},
"executionStats": {
"executionSuccess": true,
"nReturned": 0,
"executionTimeMillis": 0,
"totalKeysExamined": 0,
"totalDocsExamined": 0,
"executionStages": {
"stage": "SORT",
"nReturned": 0,
"executionTimeMillisEstimate": 0,
"works": 3,
"advanced": 0,
"needTime": 1,
"needYield": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"sortPattern": {
"dataId": 1
},
"memLimit": 104857600,
"type": "simple",
"totalDataSizeSorted": 0,
"usedDisk": false,
"inputStage": {
"stage": "FETCH",
"nReturned": 0,
"executionTimeMillisEstimate": 0,
"works": 1,
"advanced": 0,
"needTime": 0,
"needYield": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"docsExamined": 0,
"alreadyHasObj": 0,
"inputStage": {
"stage": "IXSCAN",
"nReturned": 0,
"executionTimeMillisEstimate": 0,
"works": 1,
"advanced": 0,
"needTime": 0,
"needYield": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"keyPattern": {
"scenarioId": 1
},
"indexName": "scenarioId_1",
"isMultiKey": false,
"multiKeyPaths": {
"scenarioId": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"scenarioId": [
"[\"ckqf5ulg38gu208eecxlf95fc\", \"ckqf5ulg38gu208eecxlf95fc\"]"
]
},
"keysExamined": 0,
"seeks": 1,
"dupsTested": 0,
"dupsDropped": 0
}
}
},
...
},
"serverInfo": {
...
"version": "4.4.6",
"gitVersion": "72e66213c2c3eab37d9358d5e78ad7f5c1d0d0d7"
},
...
}
]
It tells me (I'm not very experienced in interpreting those results) that the query is quite cheap: "executionTimeMillisEstimate": 0, as it's not running a document scan "docsExamined": 0,.
Next I connected to the mongo server and ran db.currentOp({"secs_running": {$gte: 5}}) to get some information from this side:
{
"type" : "op",
...
"clientMetadata" : {
"driver" : {
"name" : "nodejs|Mongoose",
"version" : "3.6.5"
},
"os" : {
"type" : "Linux",
"name" : "linux",
"architecture" : "x64",
"version" : "5.8.0-50-generic"
},
"platform" : "'Node.js v14.17.0, LE (unified)",
"version" : "3.6.5|5.12.3"
},
"active" : true,
"secs_running" : NumberLong(16),
"microsecs_running" : NumberLong(16661409),
"op" : "query",
"ns" : "data.items",
"command" : {
"find" : "items",
"filter" : {
"scenarioId" : "ckqf5ulg38gu208eecxlf95fc"
},
"sort" : {
"itemId" : 1
},
"projection" : {
},
"returnKey" : false,
"showRecordId" : false,
"lsid" : {
"id" : UUID("be3ce18b-5365-4680-b734-543d06418301")
},
"$clusterTime" : {
"clusterTime" : Timestamp(1625498044, 1),
"signature" : {
"hash" : BinData(0,"AAAAAAAAAAAAAAAAAAAAAAAAAAA="),
"keyId" : 0
}
},
"$db" : "data",
"$readPreference" : {
"mode" : "primaryPreferred"
}
},
"numYields" : 14701,
"locks" : {
"ReplicationStateTransition" : "w",
"Global" : "r",
"Database" : "r",
"Collection" : "r"
},
"waitingForLock" : false,
"lockStats" : {
"ReplicationStateTransition" : {
"acquireCount" : {
"w" : NumberLong(14702)
}
},
"Global" : {
"acquireCount" : {
"r" : NumberLong(14702)
}
},
"Database" : {
"acquireCount" : {
"r" : NumberLong(14702)
}
},
"Collection" : {
"acquireCount" : {
"r" : NumberLong(14702)
}
},
"Mutex" : {
"acquireCount" : {
"r" : NumberLong(1)
}
}
},
"waitingForFlowControl" : false,
"flowControlStats" : {
}
}
Any ideas how to improve the performance or find the bottleneck in my application? As the load is high on mongo side and no documents are found/passed to the application I guess it's mongo having trouble ...
EDIT: I've logged the whole process from DB side using db.setProfilingLevel(2) and db.system.profile.find().pretty(). Here we can see that the whole collection (or am I misinterpreting "docsExamined" : 39612167?) is queried:
{
"op" : "query",
"ns" : "data.items",
"command" : {
"find" : "items",
"filter" : {
"scenarioId" : "ckqf5ulg38gu208eecxlf95fc"
},
"sort" : {
"dataId" : 1
},
"projection" : {
},
...
"$db" : "data",
"$readPreference" : {
"mode" : "primaryPreferred"
}
},
"keysExamined" : 39612167,
"docsExamined" : 39612167,
"cursorExhausted" : true,
"numYield" : 39613,
"nreturned" : 0,
"queryHash" : "B7F40289",
"planCacheKey" : "BADED068",
"locks" : {
"ReplicationStateTransition" : {
"acquireCount" : {
"w" : NumberLong(39615)
}
},
"Global" : {
"acquireCount" : {
"r" : NumberLong(39615)
}
},
"Database" : {
"acquireCount" : {
"r" : NumberLong(39614)
}
},
"Collection" : {
"acquireCount" : {
"r" : NumberLong(39614)
}
},
"Mutex" : {
"acquireCount" : {
"r" : NumberLong(1)
}
}
},
"flowControl" : {
},
"storage" : {
},
"responseLength" : 242,
"protocol" : "op_msg",
"millis" : 48401,
"planSummary" : "IXSCAN { dataId: 1 }",
"execStats" : {
"stage" : "CACHED_PLAN",
"nReturned" : 0,
"executionTimeMillisEstimate" : 48401,
"works" : 1,
"advanced" : 0,
"needTime" : 0,
"needYield" : 0,
"saveState" : 39613,
"restoreState" : 39613,
"isEOF" : 1,
"inputStage" : {
"stage" : "FETCH",
"filter" : {
"scenarioId" : {
"$eq" : "ckqf5ulg38gu208eecxlf95fc"
}
},
"nReturned" : 0,
"executionTimeMillisEstimate" : 6270,
"works" : 39612168,
"advanced" : 0,
"needTime" : 39612167,
"needYield" : 0,
"saveState" : 39613,
"restoreState" : 39613,
"isEOF" : 1,
"docsExamined" : 39612167,
"alreadyHasObj" : 0,
"inputStage" : {
"stage" : "IXSCAN",
"nReturned" : 39612167,
"executionTimeMillisEstimate" : 2151,
"works" : 39612168,
"advanced" : 39612167,
"needTime" : 0,
"needYield" : 0,
"saveState" : 39613,
"restoreState" : 39613,
"isEOF" : 1,
"keyPattern" : {
"dataId" : 1
},
"indexName" : "dataId_1",
"isMultiKey" : false,
"multiKeyPaths" : {
"dataId" : [ ]
},
"isUnique" : false,
"isSparse" : false,
"isPartial" : false,
"indexVersion" : 2,
"direction" : "forward",
"indexBounds" : {
"dataId" : [
"[MinKey, MaxKey]"
]
},
"keysExamined" : 39612167,
"seeks" : 1,
"dupsTested" : 0,
"dupsDropped" : 0
}
}
}
(As usual) it seems like the indices are not properly set. I've created a new (secondary?) index:
{
"dataId" : 1,
"scenarioId": 1
}
and now the query returns within milliseconds ...
EDIT: What still makes me wonder is that the shell query returned in milliseconds and the mongoose query took ages. Even though the queries seem to be identical (from my point of view) mongo treats them differently.

Query Mongodb works with the terminal but not with node js

I find myself in front of a problem that I do not know how to solve.
The purpose of the query:
Compare the document of the selection person, and the one who requests the query if the id of the person selected is present in friend then is_friend will be equal to true otherwise it will be equal to false.
I got this query:
users.aggregate({
$match:{
search:/f/
}},{
$lookup:
{from:"users", let:{user:"$_id"}, pipeline:[{
$match:{
$expr:{
$and:[{
$in:["$$user", "$friend.id"]},{
$eq:["$_id", ObjectId("5bd22f28f77cfb1f6ce503ca")]
}]
}
}
},
{$limit:1},
{$project:{email:0, password:0}}
], as:"is_friend"}},{
$project:{name:1, search:1, desc:1, color:1, profil:1, banner:1, date:1, friend:1, is_friend:{
$cond:{
if:{
$eq:[{$arrayElemAt:["$is_friend",0]}, undefined]
},
then: "false",
else:"true"
}
}
}
})
So there is 1 document of users:
{
"_id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"search" : "flarize",
"name" : "flarize",
"email" : "flarize.73#gmail.com",
"password" : "$2a$10$eYeOtEkEUyD7TFkjKvhZOuSSpvBolkL17TrPHuoHhOT8JrsQR0UKW",
"color" : 0,
"profil" : "",
"banner" : "",
"desc" : "",
"date" : 1540501286109,
"friend" : [
{
"id" : ObjectId("5bd19a92da24674fdabd26b6"),
"date" : 1540676931288
},
{
"id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"date" : 1540676931288
}
]
}
When I call this request in the terminal i got this result:
{
"_id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"search" : "flarize",
"name" : "flarize",
"color" : 0,
"profil" : "",
"banner" : "",
"desc" : "",
"date" : 1540501286109,
"friend" : [
{
"id" : ObjectId("5bd19a92da24674fdabd26b6"),
"date" : 1540666689579
},
{
"id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"date" : 1540666689579
}
],
"is_friends" : "true"
}
That's the result I want.
But the node js i got this :
{
"_id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"search" : "flarize",
"name" : "flarize",
"color" : 0,
"profil" : "",
"banner" : "",
"desc" : "",
"date" : 1540501286109,
"friend" : [
{
"id" : ObjectId("5bd19a92da24674fdabd26b6"),
"date" : 1540666689579
},
{
"id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"date" : 1540666689579
}
],
is_friend : {
"_id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"search" : "flarize",
"name" : "flarize",
"color" : 0,
"profil" : "",
"banner" : "",
"desc" : "",
"date" : 1540501286109,
"friend" : [
{
"id" : ObjectId("5bd19a92da24674fdabd26b6"),
"date" : 1540676931288
},
{
"id" : ObjectId("5bd22f28f77cfb1f6ce503ca"),
"date" : 1540676931288
}
]
}
}
How to solve the problem.
EDIT
mu node js code:
users.aggregate({
$match:{
search:new RegExp(req.body.search, 'i')
}},{
$lookup:
{from:"users", let:{user:"$_id"}, pipeline:[{
$match:{
$expr:{
$and:[{
$in:["$$user", "$friend.id"]},{
$eq:["$_id", new ObjectId(decoded["_id"])]
}]
}
}
},
{$limit:1},
{$project:{email:0, password:0}}
], as:"is_friend"}},{
$project:{name:1, search:1, desc:1, color:1, profil:1, banner:1, date:1, friend:1, is_friend:{
$cond:{
if:{
$eq:[{$arrayElemAt:["$is_friend",0]}, undefined]
},
then: "false",
else:"true"
}
}
}
}).toArray(function(err, result){
if(err) throw err;
ress.send(result);
});
Thank you for helping me

Mongodb Node.js $lookup with date and $match

Can anyone help I am using $lookup for join in MongoDB to get all orders where the date is between date "X" and date "Y" and chef_id is "P". something is wrong in "date" part which is not giving data on the dates. but when I use it in single Find query it works fine and gives data between the dates. but it does not give data when I applied with $lookup for join.
Here is my query
Order.aggregate([{
"$lookup": {
"localField": "user_id",
"from": "users",
"foreignField": "_id",
"as": "order_data"
}
},
{
"$match": {
"$and": [
{ "chef_id": mongoose.Types.ObjectId(req.body.chef_id) },
{ "booking_datetime": { $gte: start_time,
$lte: end_time } }
]
}
}
], function(err, gettt) {
if (err) {
res.json({ 'message': "Error", 'status': false, 'data': err });
return false;
} else {
if (gettt.length != 0) {
res.json({ 'message': "Orders Data", 'status': true, 'data': gettt });
} else {
res.json({ 'message': "No Orders for this date", 'status': false, 'data': gettt });
}
}
});
Can anyone help me out.
my collection is here "Order" collection
{
"_id" : ObjectId("5a4256cc3f76bc45065021fc"),
"order_status" : 2,
"total_order_amount" : "160",
"booking_datetime" : ISODate("2017-12-29T23:24:00.000Z"),
"customer_address" : "121/161, South Extension part",
"user_id" : ObjectId("5a3cb4a8a188f2074714f1de"),
"chef_id" : ObjectId("5a390b07f0b3563db59cb3ca"),
"updated_at" : ISODate("2017-12-26T14:03:56.742Z"),
"created_at" : ISODate("2017-12-26T14:03:56.342Z"),
"products" : [
{
"product_id" : "5a3a50fcefc0c972377c3012",
"product_name" : "sweet corn",
"quantity" : "12",
"_id" : ObjectId("5a4256cc3f76bc45065021fd"),
"updated_at" : ISODate("2017-12-26T14:03:56.736Z"),
"created_at" : ISODate("2017-12-26T14:03:56.736Z")
},
{
"product_id" : "5a3a5119efc0c972377c3013",
"product_name" : "chilly paneer",
"quantity" : "10",
"_id" : ObjectId("5a4256cc3f76bc45065021fe"),
"updated_at" : ISODate("2017-12-26T14:03:56.736Z"),
"created_at" : ISODate("2017-12-26T14:03:56.736Z")
},
{
"product_id" : "5a3a512cefc0c972377c3014",
"product_name" : "Gulab jamun",
"quantity" : "20",
"_id" : ObjectId("5a4256cc3f76bc45065021ff"),
"updated_at" : ISODate("2017-12-26T14:03:56.736Z"),
"created_at" : ISODate("2017-12-26T14:03:56.736Z")
},
{
"product_id" : "5a3a50fcefc0c972377c3012",
"product_name" : "ali baba",
"quantity" : "56",
"_id" : ObjectId("5a4256cc3f76bc4506502200"),
"updated_at" : ISODate("2017-12-26T14:03:56.736Z"),
"created_at" : ISODate("2017-12-26T14:03:56.736Z")
}
],
"__v" : 1
and here is my User Collection
{
"_id" : ObjectId("5a623f67eaa08537fe0dba02"),
"salt" : "73824ba53291740e15d26c300c997ce1436ac678299101171af74f4980433285",
"hash" : "8f78291ac737dac15f59f5438033a61de75282a3c671a8d0231406a8374adec140b4cb2dd30b852f05241c6f9900443906fafec22ad58c983dacaed8f9ef4f9039e72b748d9c63d924239aa40372923d824a9cc796079556c8bc5eb0b0f6b17e7fd4c35b8780c870d1b4b819e641e56ce2f88fb0a7fdfbfd91d15921e9b7441a7051523903b43b930f56057852e41ffdbdc044cc09b14ebaac77940576b483d58ff1e18c381d40a143abcd1a180ca208aac6a13eb5c819b97e7e5753bd6fc40fcc1e19b55cb816879b3fedbf187110e84149bad0918672bd2de49bc323a32f04dd0e55aded9a0157fd5eea7db645303eb4cf461e47ca905e1f196618814b88421a3cab9463dac01d5bf6aebcace6e4b1215c3cf07aaae1cac07c94dc28432d223407778f4c6b12b089e09d56a59b1f00084c727f06247c1799c1a8616c74693e2d7057a5026e3c02b9ef73bf867873508575a33fc1e956bd3c704c54e6cc38ffb22e7a04ade70db134ec87e9ed3f43a7273db115127470f8ca5d8def49ba47fe7852cdf0cbd3140b19d5fe358d29eb84519365eea6353fa34c7a6757fbd9ec2ba93eca802f21944da58cd72b5d0d7000f9fd6f231f0668b7e621117a18fcedf977515e181325a9210380e01892891fcc420a67cb5246688eb6e577fccb6d41e719b426fa20c4689af9a9485d0ae0cf026845de8b4f12c7277b9cc506b5e29224",
"email" : "eduardo.llano#geocampo.co",
"firstname" : "Pedro",
"lastname" : "Peez",
"dob" : "1980-01-19",
"phone" : "3185311158",
"gender" : "male",
"latitude" : "4.6936225",
"longitude" : "-74.0730777",
"address" : "Bogota",
"divice_token" : "dcd8cf3ceefc39b8",
"prossing_form" : "1",
"status" : true,
"role" : "chef",
"updated_at" : ISODate("2018-02-01T16:09:27.465Z"),
"created_at" : ISODate("2018-01-19T18:56:39.070Z"),
"products" : [
{
"product_name" : "Producto 1",
"product_price" : "100",
"discount" : "10",
"product_ingredients" : "Pepper",
"product_description" : "Nsjdjd jsjdjdjx",
"minimum_order" : "2",
"tags" : "Indian food",
"status" : "1",
"product_image0" : "https://s3-us-west-2.amazonaws.com/rafahoproject/a47df980-7221-4fde-97cd-977cb3dd1dcf.jpg",
"product_image1" : "https://s3-us-west-2.amazonaws.com/rafahoproject/3a282cae-028c-4d4a-9a22-4c2c43d440f1.jpg",
"product_image2" : "https://s3-us-west-2.amazonaws.com/rafahoproject/1ab4373e-ec63-43fe-9b94-7b85d289c7f9.jpg",
"_id" : ObjectId("5a6602aa135fae732d5ce3d7")
},
{
"product_name" : "Bandeja Paisa",
"product_price" : 100,
"discount" : 20,
"cuisine" : "Continental",
"minimum_order" : 5,
"cooking_time_at_chef_place" : 8,
"cooking_time_at_user_home" : 10,
"tags" : "Bandeja-Paisa,Bandeja,Paisa",
"status" : "1",
"product_image0" : "https://s3-us-west-2.amazonaws.com/rafahoproject/0252161e-0e2a-4682-b7ab-0dce355be794.jpg",
"_id" : ObjectId("5a71d268947536411def9b11")
},
{
"product_name" : "Fritanga",
"product_price" : 200,
"discount" : 30,
"cuisine" : "continental",
"minimum_order" : 6,
"cooking_time_at_chef_place" : 3,
"cooking_time_at_user_home" : 5,
"tags" : "Fritanga,platter-of-grilled-meats",
"status" : "1",
"product_image0" : "https://s3-us-west-2.amazonaws.com/rafahoproject/14c39e2f-4d02-4cd1-8aa7-2f7179b5ea0c.jpg",
"_id" : ObjectId("5a71d3c6947536411def9b12")
},
{
"product_name" : "Dominican Sancocho",
"product_price" : 300,
"discount" : 50,
"cuisine" : "continental",
"minimum_order" : 5,
"cooking_time_at_chef_place" : 1,
"cooking_time_at_user_home" : 2,
"tags" : "Dominican-Sancocho,Dominican,Sancocho",
"status" : "1",
"product_image0" : "https://s3-us-west-2.amazonaws.com/rafahoproject/40a701ff-f868-492d-bee1-d65004fff024.jpg",
"_id" : ObjectId("5a71d4da947536411def9b13")
},
{
"product_name" : "Sudado de Pollo",
"product_price" : 60,
"discount" : 10,
"cuisine" : "Continental",
"minimum_order" : 3,
"cooking_time_at_chef_place" : 3,
"cooking_time_at_user_home" : 2,
"tags" : "Sudado-de-Pollo,Sudado,de-Pollo",
"status" : "1",
"product_image0" : "https://s3-us-west-2.amazonaws.com/rafahoproject/544c2ceb-839b-4263-b482-e2262c228948.jpg",
"_id" : ObjectId("5a71d6e9947536411def9b14")
}
],
"loc" : {
"coordinates" : [
-74.0730777,
4.6936225
],
"type" : "Point"
},
"__v" : 9,
}
Your syntax looks good. I still suspect there is some data type mismatch or erroneous format. I have a similar aggregate which matches a date range, and I use new Date() to convert my strings to date.
if (typeof dateBeginning === "string" && dateBeginning != "" && typeof dateEnding === "string" && dateEnding != "" && dateEnding >= dateBeginning) {
query.date = { $gte: new Date(dateBeginning), $lte: new Date(dateEnding) };
}
Here is an SO question: Date query with ISODate in mongodb doesn't seem to work
which was resolved this way.
Also, Veeram is correct; you should put your $match first. That way, MongoDb will use any indexes on the match fields that are available.

Mongoose/MongoDB sum up fields of an array for subdocuments

I am rather new to MongoDB/Mongoose and I have no idea how I am supposed to get the result I am looking for. Basically I have a document like this:
{
"playerId" : "",
"sessions" : [
{
"start" : "",
"end" : "",
"join" : "",
"leave" : "",
"rounds" : [,
{
"name" : "roundName",
"weapons" : [
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
},
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
}
]
},
{
"name" : "roundName",
"weapons" : [
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
},
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
}
]
},
{
"name" : "roundName",
"weapons" : [
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
},
{
"name" : "weaponName",
"kills" : 1,
"assits" : 1,
"deaths" : 1,
"shots" : 1,
"headshots" : 1
}
]
}
]
}
]
}
What I want is a query that adds fields to each round (kills, deaths, assists, headshots, shots) which contain the sum of all their weapons. The same should happen for the entire session if possible. Any help would be greatly appreciated!
the following should give you list of stats grouping by session->rounds:
the assumption here is 1) name of the collection is shootingGame 2) name of the fields are $sessions.Rounds.Weapons
db.shootingGame.aggregate(
[
{$group: {
_id: "$sessions.Rounds",
"sumkills": {$sum: "$sessions.Rounds.Weapons.kills" },
"sumasists": {$sum: "$sessions.Rounds.Weapons.assists" },
"sumdeaths": {$sum: "$sessions.Rounds.Weapons.deaths" },
"sumheadshots": {$sum: "$sessions.Rounds.Weapons.headshots" },
"sumshots": {$sum: "$sessions.Rounds.Weapons.shots" }
}}
]
)
But for further grouping per session, you have to store this result into another document and run another $sum group per round

Get info from another collection based of _id and merge - MongoDB

I have two collections. Lets call one baskets and the other one fruits.
In baskets we have the following document:
[{
basket_name: "John's Basket",
items_in_basket: [
{
fruit_id: 1,
comment: "Delicious!"
},
{
fruit_id: 2,
comment: "I did not like this"
}
]
}]
And in fruits we have the following documents:
[{
_id: 1,
fruit_name: "Strawberry",
color: "Red"
},
{
_id: 2,
fruit_name: "Watermelon",
color: "Green"
}]
How do I get information on each fruit in John's Basket?
The result should look like this:
[{
fruit_id: 1,
comment: "Delicious!",
fruit_name: "Strawberry",
color: "Red"
},
{
fruit_id: 2,
comment: "I did not like this",
fruit_name: "Watermelon",
color: "Green"
}]
There's no "join" in MongoDB. You either could:
consider using a MapReduce function to create a new structure that contains the merged data
write the code necessary to fetch each fruit instance on demand and merge it in your client code with a basket document.
denormalize the data and include the details for each fruit in the basket document. This poses it's own set of issues as data is duplicated and updates to a particular fruit would then need to be made to every usage in the collection.
Both have their pros and cons.
You might find this Q/A helpful, and also this documentation for MongoDB.
this is no longer true.
Since version 3.2, MongoDB added the $lookup command.
https://docs.mongodb.com/manual/reference/operator/aggregation/lookup/
db.orders.insert([
{ "_id" : 1, "item" : "almonds", "price" : 12, "quantity" : 2 },
{ "_id" : 2, "item" : "pecans", "price" : 20, "quantity" : 1 },
{ "_id" : 3 }
])
db.inventory.insert([
{ "_id" : 1, "sku" : "almonds", description: "product 1", "instock" : 120 },
{ "_id" : 2, "sku" : "bread", description: "product 2", "instock" : 80 },
{ "_id" : 3, "sku" : "cashews", description: "product 3", "instock" : 60 },
{ "_id" : 4, "sku" : "pecans", description: "product 4", "instock" : 70 },
{ "_id" : 5, "sku" : null, description: "Incomplete" },
{ "_id" : 6 }
])
db.orders.aggregate([
{
$lookup:
{
from: "inventory",
localField: "item",
foreignField: "sku",
as: "inventory_docs"
}
}
])
returns:
{
"_id" : 1,
"item" : "almonds",
"price" : 12,
"quantity" : 2,
"inventory_docs" : [
{ "_id" : 1, "sku" : "almonds", "description" : "product 1", "instock" : 120 }
]
}
{
"_id" : 2,
"item" : "pecans",
"price" : 20,
"quantity" : 1,
"inventory_docs" : [
{ "_id" : 4, "sku" : "pecans", "description" : "product 4", "instock" : 70 }
]
}
{
"_id" : 3,
"inventory_docs" : [
{ "_id" : 5, "sku" : null, "description" : "Incomplete" },
{ "_id" : 6 }
]
}

Resources