const page = 1;
const limit = 10;
const searchResultsArray = await mongoose.connection.db
.collection('game_activity')
.aggregate([
{
$match: {
game_id
}
},
{
$addFields: {
activities: {
$filter: {
input: '$activity',
as: 'activity',
cond: {
$and: condition
}
}
}
}
},
{
$project: {
activities: 1,
activity_number: { <--- can't read property error
$size: '$activities'
}
}
},
{ $limit: 50 },
{
$facet: {
paginatedResult: [ { $sort: searchFilter }, { $skip: (page - 1) * limit } ]
}
}
])
.toArray();
const searchResultsObject = searchResultsArray[0];
if (
searchResultsObject.paginatedResult[0].activity_number === undefined ||
searchResultsObject.paginatedResult[0].activity_number == 0
) {
const result = {
no_activities: true
};
res.json(result);
} else {
const result = {
search_result: {
total_activities: searchResultsObject.paginatedResult[0].activity_number,
current_page: page,
last_page: Math.ceil(searchResultsObject.paginatedResult[0].activity_number / 10),
searched_activities: searchResultsObject.paginatedResult[0].activities
},
no_activities: false
};
res.json(result);
}
I have this .aggregate() search function and trying to apply the pagination result. This format worked on other search but on this aggregate() search, I have 2 problems.
console.log(searchResultsObject.paginatedResult[0]);
-------- result --------
{
search_result: {
total_activities: 16,
current_page: 1,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object]
]
},
no_activities: false
}
const limit = 10 does not applied here. It returns all of the activities.
when I set the const page = 2;, I get TypeError: Cannot read property 'activity_number' of undefined this error and console.log(searchResultsObject.paginatedResult[0]); shows `undefined
I want to return 10 gaming activities per page, so in this case, if I set const page =2;, then it will return the rest 6 activities. I couldn't quite figure out how to fix this since this works on other .aggregate() search that I have.
------------ MongoDB Document ---------
{
"_id": {
"$oid": "601942d93aca6ee8cb300327"
},
"location_id": "ddff23323443",
"activity": [{
"activity_id": "VVINxmhRHsnMwvfT",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"location_id": "ddff23323443",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}, {
"activity_id": "EBZNKmsFKDgdeDz0",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"id": "ddff23323443",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}, {
"activity_id": "j8J1Jlk8MtWPi1HT",
"created_by": {
"id": "aa332",
},
"activity_type": "league_of_legends",
"activity_info": {
"location_id": "bvvsd33",
"activity_time": "2021-02-02 05:45:00.000Z",
"game_code": "6"
},
"attendee": ["aa332"]
}]
}
----- expectation -----
const page = 1
{
search_result: {
total_activities: 16,
current_page: 1,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
[Object], [Object],
]
},
no_activities: false
}
const page = 2
{
search_result: {
total_activities: 16,
current_page: 2,
last_page: 2,
searched_activities: [
[Object], [Object],
[Object], [Object],
[Object], [Object],
]
},
no_activities: false
}
Your expectation is not clear as per your try, i can explain the basic pagination process,
pagination configs
const page = 1;
const limit = 10;
// If you are starting page number from 1 then you need to minus 1
const skip = (page - 1) * limit;
$unwind deconstruct activities array
$replaceRoot replace activities object to root
$sort documents
$facet to separate result, result data and count, $skip should before the $limit
const searchResultsArray = await mongoose.connection.db.collection('game_activity').aggregate([
{ $match: { _id: game_id } },
{
$project: {
activities: {
$filter: {
input: '$activity',
as: 'activity',
cond: { $and: condition }
}
}
}
},
{ $unwind: "$activities" },
{ $replaceRoot: { newRoot: "$activities" } },
{ $sort: searchFilter },
{
$facet: {
searched_activities: [
{ $skip: skip },
{ $limit: limit }
],
total_activities: [
{ $count: "count" }
]
}
}
]).toArray();
No data validation response
// No Data Fond!
if (!searchResultsArray[0].total.length) {
res.json({ no_activities: true });
return;
}
Success response
res.json({
search_result: {
total_activities: searchResultsArray[0].total_activities[0].count,
current_page: page,
last_page: Math.ceil(searchResultsArray[0].total_activities[0].count / limit),
searched_activities: searchResultsArray[0].searched_activities
},
no_activities: false
});
Combine above code together in sequence and try.
Related
I ran the following Query in mongoose and it is successful query
const recmessages = await messages.aggregate([
{
$match: {
conversationId: {
$in: mapedId
}
}
},
{
$sort: {
time: -1
}
},
{
$group: {
_id: "$conversationId",
messages: {
$push: "$$ROOT"
}
}
},
{
$project: {
messages: {
$slice: [
"$messages",
5
]
}
}
}
])
The returned data however has the following form. If I use {$unwind:$messages}, the returned messages array is no longer an object but the messages is no longer an array.
[ { _id: 5e7bb66d99aa010ca9bfeed9,
messages: [ [Object], [Object], [Object], [Object] ] },
{ _id: 5e7ba776c55fc40b216b0d12,
messages: [ [Object], [Object], [Object], [Object] ] } ]
my question how can I end up with the same query result, with the messages field a destructed array of items instead of objects
how you doing?
I have a trouble making a aggregation in my project, my aggregation result is different in Robo3T and Node.
db.getCollection('companies').aggregate([
{ '$match': { _id: { '$eq': ObjectId("5e30a4fe11e6e80d7fb544a4")} } },
{ $unwind: '$jobVacancies' },
{
$project: {
jobVacancies: {
_id: 1,
name: 1,
city: 1,
openingDate: 1,
closingDate: 1,
createdAt: 1,
quantity: 1,
steps: {
$filter: {
input: '$jobVacancies.steps',
as: 'step',
cond: {
$and: [
{ $eq: ['$$step.order', 0] },
{ $ne: ['$$step.users', undefined] },
{ $ne: ['$$step.users', null] },
{ $ne: ['$$step.users', []] },
],
},
},
},
},
},
},
{ $match: { 'jobVacancies.steps': { $ne: [] } } },
])
In Robo3T this is returning 1 object, but in Node (the same aggregation) is resulting 6 objects. Can you help me? Thank you
EDIT
Nodejs:
The first match create the ObjectId match for company in context of GraphQL based on my filter.
const companies = await this.MongoClient.db
.collection('companies')
.aggregate([
{
$match: await this.getFilterObject(
filters.filter(f => !f.field.includes('$$jobVacancy') && !f.field.includes('StepOrder')),
),
},
{ $unwind: '$jobVacancies' },
{
$project: {
jobVacancies: {
_id: 1,
name: 1,
city: 1,
openingDate: 1,
closingDate: 1,
createdAt: 1,
quantity: 1,
steps: {
$filter: {
input: '$jobVacancies.steps',
as: 'step',
cond: {
$and: [
{ $eq: ['$$step.order', order] },
{ $ne: ['$$step.users', undefined] },
{ $ne: ['$$step.users', null] },
{ $ne: ['$$step.users', []] },
],
},
},
},
},
},
},
{ $match: { 'jobVacancies.steps': { $ne: [] } } },
])
.toArray();
EDIT 3
This is the result of console.dir (with {depth:null}) of the pipeline
[
{
'$match': {
_id: {
'$eq': ObjectID {
_bsontype: 'ObjectID',
id: Buffer [Uint8Array] [
94, 48, 164, 254, 17,
230, 232, 13, 127, 181,
68, 164
]
}
}
}
},
{ '$unwind': '$jobVacancies' },
{
'$project': {
jobVacancies: {
_id: 1,
name: 1,
city: 1,
openingDate: 1,
closingDate: 1,
createdAt: 1,
quantity: 1,
steps: {
'$filter': {
input: '$jobVacancies.steps',
as: 'step',
cond: {
'$and': [
{ '$eq': [ '$$step.order', 0 ] },
{ '$ne': [ '$$step.users', undefined ] },
{ '$ne': [ '$$step.users', null ] },
{ '$ne': [ '$$step.users', [] ] }
]
}
}
}
}
}
},
{ '$match': { 'jobVacancies.steps': { '$ne': [] } } }
]
I think i found the solution, the document is created with properties:
jobVacancies: {
steps: {
users: []
}
}
But sometimes users array is undefined in mongodb, so I verify with
{ '$ne': [ '$$step.users', undefined ] }
I think JS undefined is different then mongodb undefined, so I initialized all steps with an empty array of users, and removed this verification and worked! –
Consider the Schema :
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const EightWeekGamePlanSchema = new Schema({
Week: {
type: Number,
required: true
},
LeadId: {
type: String,
required: true
},
SupplierGeneralId: {
type: String,
required: true
},
// ... More properties
TotalClaimsLeftToBeClaimedByClientType: {
// incresed by 1 every time it's claimed
type: Number,
required: true
},
InsertDate: {
type: Date
// default: Date.now
}
});
module.exports = EightWeekGamePlan = mongoose.model(
"eightweekgameplan",
EightWeekGamePlanSchema
);
And consider the Mongo query :
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{
$project: {
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" }
}
},
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: "$month",
year: "$year"
},
ClaimsSummary : { $sum: "$TotalClaimsLeftToBeClaimedByClientType" } ,
// TotalLeadsPerSupplierAndDate : ...
// Here I want to group again , by LeadID and count all the
// unique LeadID's
}
}
]
I want to group by SupplierGeneralId and Month + Year of InsertDate ,
Summarize for each month the TotalClaimsLeftToBeClaimedByClientType
Group again but this time by the LeadID , and count all the unique LeadIds for each supplier (previously grouped by SupplierGeneralId, Month, Year).
However I'm getting
[ { _id: { month: 1, year: 2020 }, ClaimsSummary: 0 } ]
...even though there is data.
What's wrong with the pipeline ? and how can I group again to get the unique LeadIds for each supplier ?
Thanks
EDIT:
I've added more fields to the Project but now I'm getting empty array in the $push :
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{ $sort: { SupplierGeneralId: 1 } },
{
$project: {
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" },
SupplierGeneralId: "$SupplierGeneralId",
TotalClaimsLeftToBeClaimedByClientType:
"$TotalClaimsLeftToBeClaimedByClientType"
}
},
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: "$month",
year: "$year"
},
LeadsCollection: {
$push: {
LeadId: "$LeadId"
}
},
ClaimsSummary: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
}
]
Output:
[
[0] {
[0] _id: {
[0] SupplierGeneralId: 'qCwHWFD1cBvrfPp5hdBL6M',
[0] month: 1,
[0] year: 2020
[0] },
[0] LeadsCollection: [
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {},
[0] {}, {}, {}, {}, {}, {}, {}, {}
[0] ],
[0] ClaimsSummary: 4
[0] }
[0] ]
SECOND EDIT :
EightWeekGamePlan.aggregate(
[
{ $sort: { SupplierGeneralId: 1 } },
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: { $month: "$InsertDate" },
year: { $year: "$InsertDate" }
},
LeadsUploaded: {
$push: {
LeadId: "$LeadId"
}
},
Sells: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
},
{
$project: {
Sells: "$Sells",
LeadsUploaded: {
$reduce: {
input: { $setUnion: "$LeadsUploaded.LeadId" },
initialValue: [],
in: {
$concatArrays: [
"$$value",
[
{
Lead_ID: "$$this"
}
]
]
}
}
}
}
}
]
You should just drop the $project stage, you're grouping right after so theres no real point of doing it, adding it just makes the pipeline less efficient.
Rewrite your pipeline as:
EightWeekGamePlan.aggregate(
[
// 1.Group by supplier
// 2.Within each supplier group by month & year
// Use count & sum
{ $sort: { SupplierGeneralId: 1 } },
{
$group: {
_id: {
SupplierGeneralId: "$SupplierGeneralId",
month: {"$month" : "$InsertDate"},
year: { $year: "$InsertDate" },
},
LeadsCollection: {
$push: {
LeadId: "$LeadId"
}
},
ClaimsSummary: { $sum: "$TotalClaimsLeftToBeClaimedByClientType" }
}
}
]
I'm working on an application and am having an issue with Mongoose limiting array size on update. I'm not too familiar with MongoDB or Mongoose but I'm hoping this project will grow my knowledge. I tried a few other solutions I queried on SOF but none appeared to work for me. Here is my breakdown of the problem...
Library: Mongoose 5.7.7
Summary: In a document of logs, I wish to keep only the 10 latest logs with the older being pushed off the array.
Issue: With the current query I have, the slice appears not to be limiting the number of documents. The array just continues to grow.
ActivityLog.updateOne(
{ guild },
{
$push: {
logs: {
$each: [{ ...log }]
},
$slice: -10
}
},
{ upsert: true }
);
This might actually be more of a problem in your implementation. Here's the basic thing in practical usage to show that it does indeed work:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const options = { useNewUrlParser: true, useUnifiedTopology: true };
mongoose.set("debug", true);
mongoose.set("useFindAndModify", false);
mongoose.set("useCreateIndex", true);
const demoSchema = new Schema({
_id: Number,
fifo: [Number]
},{ _id: false });
const Demo = mongoose.model('Demo', demoSchema, 'demo');
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri, options);
await Promise.all(
Object.values(conn.models).map(m => m.deleteMany())
);
let counter = 0;
await new Promise((resolve, reject) =>
setInterval(async () => {
try {
let result = await Demo.findByIdAndUpdate(
1,
{ "$push": { "fifo": { "$each": [counter], "$slice": -3 } } },
{ upsert: true, new: true }
);
log(result);
} catch(e) {
reject(e)
}
counter++;
}, 2000)
);
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})()
On a few iterations ( and using -3 for brevity here ) you would see:
Mongoose: demo.deleteMany({}, {})
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 0 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
0
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 1 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
0,
1
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 2 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
0,
1,
2
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 3 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
1,
2,
3
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 4 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
2,
3,
4
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 5 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
3,
4,
5
],
"_id": 1,
"__v": 0
}
Mongoose: demo.findOneAndUpdate({ _id: 1 }, { '$setOnInsert': { __v: 0 }, '$push': { fifo: { '$each': [ 6 ], '$slice': -3 } }}, { upsert: true, remove: false, projection: {}, returnOriginal: false })
{
"fifo": [
4,
5,
6
],
"_id": 1,
"__v": 0
}
So this does indeed keep an array of the specified $slice length and from the end of the array as due to the negative, both growing the array to the set size and then removing all but the last added members.
I'm using node.js and lodash.
I have data like this:
[
{
to: [ 'foo#bar.com', 'foo1#bar.com' ],
submittedSubs: [ [Object] ]
},
{
to: [ 'foo#bar.com', 'foo2#bar.com' ],
submittedSubs: [ [Object], [Object], [Object] ]
}
]
I'd like to turn it into data like this where it's "sorted" by to
[
{
to: 'foo#bar.com',
submittedSubs: [ [Object],[Object], [Object], [Object] ]
},
{
to: 'foo1#bar.com',
submittedSubs: [ [Object] ]
},
{
to: 'foo2#bar.com',
submittedSubs: [ [Object], [Object], [Object] ]
}
]
How can I do this?
I've tried this:
spam[0].to.push('foo#bar.com');
spam[0].to.push('foo1#bar.com');
spam[1].to.push('foo#bar.com');
spam[1].to.push('foo2#bar.com');
console.log('data is',spam);
var byUser=[];
_.each(spam, function(data){
_.each(data.to,function(addr){
byUser.push({to:addr,submittedSubs:data.submittedSubs});
});
});
console.log('attempt',_.merge(byUser));
But that gives me this:
[ { to: 'foo#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo1#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo#bar.com', submittedSubs: [ [Object], [Object], [Object] ] },
{ to: 'foo2#bar.com', submittedSubs: [ [Object], [Object], [Object] ] } ]
This'll work for you:
var unique = {};
byUser.forEach(function(user) {
unique[user.to] = unique[user.to] || [];
unique[user.to] = unique[user.to].concat(user.submittedSubs);
});
unique = Object.keys(unique).map(function (key, i) {
return {to: key, submittedSubs: unique[key]};
});
/*
[ { to: 'foo#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo1#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo2#bar.com', submittedSubs: [ [Object], [Object], [Object], [Object] ] } ]
*/
I stand by that this should be achievable using the callback feature of _.uniq but I couldn't get it to work the way you needed it to.
You should be able to use _.uniq from lodash on your final array:
_.uniq(byUser, "to");
/*
[ { to: 'foo#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo1#bar.com', submittedSubs: [ [Object] ] },
{ to: 'foo2#bar.com', submittedSubs: [ [Object], [Object], [Object] ] } ]
*/
I imagine there's some nice lodash facilities to shorten this up a bit, but here's a vanilla-js solution:
var data = [
{
to: [ 'foo#bar.com', 'foo1#bar.com' ],
submittedSubs: [{ id: 'sub1' }]
},
{
to: [ 'foo#bar.com', 'foo2#bar.com' ],
submittedSubs: [{ id: 'sub2' }, { id: 'sub3' }, { id: 'sub4' }]
}
];
var emailSubsMap = data.reduce(function(result, record) {
record.to.forEach(function(email) {
result[email] = (result[email] || [])
.concat(record.submittedSubs);
});
return result;
}, {});
var formatted = Object.keys(emailSubsMap).map(function(email) {
return { to: email, submittedSubs: emailSubsMap[email] };
}).sort(function(a, b) {
return a.to <= b.to ? -1 : 1;
});
console.log(JSON.stringify(formatted));
(Formatted) console output:
[
{
"to": "foo1#bar.com",
"submittedSubs": [
{ "id": "sub1" }
]
},
{
"to": "foo2#bar.com",
"submittedSubs": [
{ "id": "sub2" },
{ "id": "sub3" },
{ "id": "sub4" }
]
},
{
"to": "foo#bar.com",
"submittedSubs": [
{ "id": "sub1" },
{ "id": "sub2" },
{ "id": "sub3" },
{ "id": "sub4" }
]
}
]
Note that I mocked up what the submittedSubs objects might look like, simply for testing purposes.
JSFiddle Example
A couple notes about the sorting:
My first version wasn't sorting correctly... it's been updated. :)
The sort method you're requesting doesn't follow JavaScript's "native" string sort order. E.g., ['foo#bar.com', 'foo2#bar.com', 'foo1#bar.com'].sort() --> ['foo1#bar.com','foo2#bar.com','foo#bar.com'], so if you really want foo#bar.com to come before foo1#bar.com, you'll need to define your sort criteria in a little more detail.