how to update an object of an element in array in mongodb? - node.js

This is the structure i have, i want to update the nested array element if an object key matches for example - i want to match grnno :"10431000" and update the other keys of that object like vehicle_no,invoice_no etc.
{
"_id" : ObjectId("5f128b8aeb27bb63057e3887"),
"requirements" : [
{
"grns" : [
{
"invoice_no" : "123",
"vehicle_no" : "345",
"req_id" : "5f128c6deb27bb63057e388a",
"grnno" : "10431000"
},
{
"invoice_no" : "abc",
"vehicle_no" : "def",
"req_id" : "5f128c6deb27bb63057e388a",
"grnno" : "10431001"
}
]
}
]
}
I have tried this code
db.po_grn.update({
"requirements.grns.grnno":"10431001"
}, {
$set: {
"requirements.$.grns": {"invoice_no":"test",vehicle_no:"5455"}
}
})
But this is changing the structure i have like this
"requirements" : [
{
"grns" : {
"invoice_no" : "test",
"vehicle_no":"5455"
},
"req_id" : ObjectId("5f128b8aeb27bb63057e3886")
}
],
grns key should be array, and update should be of the particular object which matches the key "grnno". Please help me out. Thanks.
==Edit==
var grnno = req.body.grnno;
db.po_grn.find({
"requirements.grns.grnno":grnno
}).toArray(function(err, po_grn) {
console.log("po_grn",po_grn);
if (po_grn.length > 0) {
console.log("data.grn.grnno ", grnno);
var query = {
requirements: {
$elemMatch: {
"grns.grnno": grnno
}
}
};
var update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': data.invoice_no,
'requirements.$[].grns.$[inner].vehicle_no': data.vehicle_no,
}
};
var options = {
arrayFilters: [
{ "inner.grnno" : grnno }
]
};
db.po_grn.update(query, update, options
, function(er, grn) {
console.log("grn",grn,"er",er)
res.send({
status: 1,
message: "Grn updated successfully"
});
}
);
} else {
res.send({
status: 0,
message: "Grn not found "
});
}
})

Use a combination of $[] positional-all operator with array filters to update your inner nested document.
var query = {
requirements: {
$elemMatch: {
"grns.grnno": "10431001"
}
}
};
var update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': "test",
'requirements.$[].grns.$[inner].vehicle_no': "5455",
}
};
var options = {
arrayFilters: [
{ "inner.grnno" : "10431001" }
]
};
db.collection.update(query, update, options);
Update -
NodeJS native MongoDb driver code attached, which is working fine
const { MongoClient } = require('mongodb');
const url = "mongodb://localhost:27017/";
MongoClient.connect(url, function(err, db) {
if (err) {
throw err;
}
const dbo = db.db("test");
(async() => {
const query = {
requirements: {
$elemMatch: {
"grns.grnno": "10431001"
}
}
};
const update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': "test",
'requirements.$[].grns.$[inner].vehicle_no': "5455",
}
};
const options = {
arrayFilters: [
{ "inner.grnno" : "10431001" }
],
multi: true
};
try {
const updateResult = await dbo.collection("collection").update(query, update, options);
} catch (err) {
console.error(err);
}
db.close();
})();
});

Related

Unable to update mongodb document with nested object property from node api

I have the following mongodb document:
{
"_id" : ObjectId("5ee95b41ca023a3deb252ef2"),
"uid" : "jdoe",
"name" : "John Doe",
"employee_hire_date" : "2012-06-20",
"three_month_review_target" : "2012-09-20",
"six_month_review_target" : "2012-12-20",
"three_month_review_status" : {
"is_scheduled" : null,
"is_team_member_emailed" : null,
"is_review_executed" : null,
},
"six_month_review_status" : {
"is_scheduled" : null,
"is_team_member_emailed" : null,
"is_review_executed" : null,
}
}
I would like to update the three_month_review_status.is_scheduled nested property to true. I am using a put request to accomplish this:
const mongoose = require('mongoose');
const Reviews = require('../modules/reviews/models/reviews');
module.exports = (app) => {
app.put('/api/reviews/isScheduled', async (req, res) => {
console.log('body', req.body)
console.log('uid', req.body.uid)
console.log('is_scheduled', req.body.three_month_review_status.is_scheduled)
Reviews.findOneAndUpdate( { 'uid': req.body.uid }, { $set: { 'three_month_review_status.is_scheduled': req.body.is_scheduled }}, (err, result) => {
if (err) {
console.log('error', err)
}
else {
console.log('updated', result);
res.status(200).send(result);
}
} )
});
}
To test this, I execute this PUT request through Postman with the following request body:
{
"uid": "jdoe",
"three_month_review_status": {
"is_scheduled": "true"
}
}
However, when the request gets executed, the other two nested objects are removed and is_scheduled remains null. This is the document after the request is executed:
{
"_id" : ObjectId("5ee95b41ca023a3deb252ef2"),
"uid" : "jdoe",
"name" : "John Doe",
"employee_hire_date" : "2012-06-20",
"three_month_review_target" : "2012-09-20",
"six_month_review_target" : "2012-12-20",
"three_month_review_status" : {
"is_scheduled" : null
},
"six_month_review_status" : {
"is_scheduled" : null,
"is_team_member_emailed" : null,
"is_review_executed" : null,
}
}
What am I doing wrong? Here is my reviewsSchema for more context:
const { Schema, model } = require('mongoose');
const reviewsSchema = new Schema({
uid: String,
name: String,
employee_hire_date: String,
three_month_review_target: String,
six_month_review_target: String,
three_month_review_status: {
is_scheduled: Boolean,
is_team_member_emailed: Boolean,
is_review_executed: Boolean,
},
six_month_review_status: {
is_scheduled: Boolean,
is_team_member_emailed: Boolean,
is_review_executed: Boolean,
},
})
const Reviews = model('review', reviewsSchema);
module.exports = Reviews;
In Mongoose you don't need to specify the $set. Also based on the sample JSON that you send from the postman instead of req.body.is_scheduled you need to provide req.body.three_month_review_status.is_scheduled in the query. Also, need to add {new: true} if you want the findOneAndUpdate to return the updated document
So you can update the query like
Reviews.findOneAndUpdate(
{ uid: req.body.uid },
{
"three_month_review_status.is_scheduled":
req.body.three_month_review_status.is_scheduled,
},
{ new: true },
(err, result) => {
if (err) {
console.log("error", err);
} else {
console.log("updated", result);
res.status(200).send(result);
}
}
);

Adding array of objects to mongodb issue

I'm having issues adding an array of objects to mongodb. The problem is related to when I get the posts ngOnInit() and there is an entry for _id like this before I start adding anything to inviteGroup
If I add this.inviteGroup = [] to get rid of _id first entry, then I can successfully add to the database my invite like in this image. Is there a way to not have that _id that's related to my mongoose schema?
but naturally the this.inviteGroup = [] makes it so I can have only one entry at a time since it erases everything on page load. How can I make that _id entry not there anymore so that when I do a .push() it doesn't cause a page reload because it throws off the .push(). I want to have multiple entries in db for each invite. Is it my mongoose model that's the issue? I appreciate any help!
mongoose schema definition
inviteGroup: {
bidderId: { type: String, lowercase: true, trim: true },
username: { type: String, lowercase: true, trim: true }
}
app.js
app.patch("/api/listings/:id", (req, res) => {
console.log("INVITE GRdddOUP IS");
console.log(req.body);
console.log(req.body[0].biddingUserId);
let invites;
if (req.body[0].biddingUserId) {
invites = req.body;
console.log("INVITE IS");
}
console.log(invites);
if (invites) {
console.log("INVITE GROUP IS");
console.log(req.params.id);
Post.findByIdAndUpdate(
{ _id: req.params.id },
{
inviteGroup: invites
},
function(err, docs) {
if (err) {
console.log(err);
res.json(err);
} else {
return true;
console.log(docs);
}
}
);
component.ts
import {
Component,
OnInit,
ViewChild,
OnDestroy,
AfterViewInit
} from "#angular/core";
import { Router } from "#angular/router";
import {
MatTableDataSource,
MatPaginator,
MatSort,
MatDialog
} from "#angular/material";
import { NgForm, FormControl } from "#angular/forms";
import { SubmitListingService } from "../submit-listing/submit-auction.service";
import { BidderInvite } from "./bidder-invite.model";
import { Observable, Subject } from "rxjs";
import { startWith, map, takeUntil } from "rxjs/operators";
import { Page } from "ngx-pagination/dist/pagination-controls.directive";
import { BidderInviteRetrieved } from "./bidder-invite-retrieved";
#Component({
selector: "app-private-auction-invite",
templateUrl: "./private-auction-invite.component.html",
styleUrls: ["./private-auction-invite.component.css"]
})
export class PrivateAuctionInviteComponent
implements OnInit, AfterViewInit, OnDestroy {
allMyPeopleAreInvited: boolean;
auctionId: string;
dataSource: MatTableDataSource<any> = new MatTableDataSource();
timeout: any = null;
posts: BidderInviteRetrieved[];
artistId: string;
bidderId: string;
inviteGroup: BidderInvite[] = [];
test: any[] = [];
value: string;
usernameFound: string;
userSearched: string;
invites: BidderInvite[] = [];
destroy = new Subject();
inviteName: string;
filteredOptions: Observable<string[]>;
myControl = new FormControl();
selectedValue: string;
url: string;
displayedColumnsInvites: string[] = ["User", "revokeInvite"];
options: string[] = [];
#ViewChild(MatSort, { static: false }) set sort(sort: MatSort) {
this.dataSource.sort = sort;
}
#ViewChild(MatPaginator, { static: false }) set paginator(
paginator: MatPaginator
) {
this.dataSource.paginator = paginator;
}
constructor(
private router: Router,
private submitListingService: SubmitListingService
) {}
ngOnInit() {
this.inviteGroup = [];
this.dataSource.paginator = this.paginator;
this.dataSource.sort = this.sort;
this.allMyPeopleAreInvited = false;
this.url = this.router.url;
const value = this.router.url.split("/");
this.auctionId = value[2];
this.artistId = value[3];
this.submitListingService
.getPrivateAuctionInviteList(this.auctionId)
.pipe(takeUntil(this.destroy))
.subscribe(res => {
this.inviteGroup = res.posts;
console.log("res");
console.log(res);
console.log(this.inviteGroup);
if (this.inviteGroup["_id"].length > 2) {
this.inviteGroup = [];
console.log(this.inviteGroup);
}
});
this.filteredOptions = this.myControl.valueChanges.pipe(
startWith(""),
map(value => this._filter(value))
);
}
ngAfterViewInit() {
this.dataSource.paginator = this.paginator;
this.dataSource.sort = this.sort;
this.dataSource = new MatTableDataSource(this.inviteGroup);
this.dataSource.data = this.inviteGroup;
}
sendInvite(form: NgForm) {
if (form.invalid) {
return;
}
let counter: number;
counter = 0;
console.log("USER " + this.value);
console.log("POST LEGNTH: " + this.posts.length);
for (let i = 0; i < this.posts.length; i++) {
counter = counter++;
console.log("post");
console.log(form.value.username);
let user = this.posts[i].username.trim().toLowerCase();
let enteredUser = form.value.username.trim().toLowerCase();
console.log("COUNTER LOOP NUMBER: " + counter);
if (enteredUser === user) {
this.bidderId = this.posts[i].id;
console.log(this.inviteGroup);
let invites = this.inviteGroup;
console.log("INVITE LENGTH = " + this.inviteGroup.length);
console.log(invites.indexOf);
this.inviteGroup.push({
biddingUserId: this.bidderId,
username: this.posts[i].username
});
console.log(this.inviteGroup);
console.log("invite group");
console.log(this.inviteGroup);
//this.posts = [];
this.dataSource.data = this.inviteGroup;
console.log("invite group");
}
}
console.log("BIDDER ID " + this.bidderId);
if (this.bidderId === null || this.bidderId === undefined) {
console.log("SOMETHING WENT WRONG");
}
console.log("made it to next section");
let invites = this.inviteGroup;
console.log("invites[0].username");
console.log("filtering....");
invites = invites.filter((obj, pos, arr) => {
return (
arr.map(mapObj => mapObj["bidderId"]).indexOf(obj["bidderId"]) === pos
);
});
console.log("invites");
console.log(invites);
this.submitListingService
.sendPrivateAuctionInvite(this.auctionId, invites)
.pipe(takeUntil(this.destroy))
.subscribe(res => {
console.log("res");
console.log(res);
});
}
private onKeySearch(event: any) {
console.log("EVENT IS ");
console.log(event);
clearTimeout(this.timeout);
var $this = this;
this.timeout = setTimeout(function() {
if (event.keyCode !== 13) {
$this.executeListing(event.target.value);
}
}, 1000);
}
private executeListing(bidderName: string) {
console.log("BIDDERNAME");
console.log(bidderName);
if (bidderName === "[Object object]") {
return;
}
if (bidderName.length < 4) {
return;
}
if (bidderName.length > 3) {
this.submitListingService
.getUserIdAutoComplete(bidderName)
.pipe(takeUntil(this.destroy))
.subscribe(res => {
console.log("res");
console.log(res);
this.posts = res.posts;
console.log(this.posts);
// this.artists = res.posts;
});
}
}
private _filter(value: string): string[] {
const filterValue = value.toLowerCase();
return this.options.filter(
option => option.toLowerCase().indexOf(filterValue) === 0
);
console.log("OPTION IS " + filterValue);
}
storeUserPrivaeAuctionInvite(user: Page) {
console.log("USER VALUE I S" + user);
}
ngOnDestroy() {
this.destroy.next();
this.destroy.complete();
}
}
angular service
sendPrivateAuctionInvite(id: string, inviteGroup1: BidderInvite[]) {
// console.log(inviteGroup1);
return this.http.patch(
`http://localhost:3000/api/listings/${id}/`,
inviteGroup1
);
}
BidderInvite model
export interface BidderInvite {
biddingUserId: string;
username: string;
}
Is your schema definition supposed to be:
inviteGroup: {
type: [inviteSchema]
default: undefined //if you want to unset []
}
invite: {
bidderId: { type: String, lowercase: true, trim: true },
username: { type: String, lowercase: true, trim: true }
}
(See https://mongoosejs.com/docs/schematypes.html#arrays)
Try caching to prevent a reload. (See https://github.com/isaacs/node-lru-cache) and How to stop MongoDB from reloading data every time I refresh a page?
Try projection to exclude _id in a query output with _id: 0. See https://docs.mongodb.com/v3.2/tutorial/project-fields-from-query-results/#return-all-but-the-excluded-field
Try this. Hope it will help you.
Post.findByIdAndUpdate(
{ _id: req.params.id },
{
inviteGroup: invites
},
{select: {_id: 0}}, // sets the document fields to return
function(err, docs) {
if (err) {
console.log(err);
res.json(err);
} else {
return true;
console.log(docs);
}
}
);
If I understood your question right, the problem is related to the lack of operator at the update method.
When you use something like:
Post.findByIdAndUpdate(
{ _id: req.params.id },
{
inviteGroup: invites // <-- Update statement
},
function(err, docs) {
//...
}
);
It will replace the full value of the inviteGroup field.
In order to add an item to an existent array on the database, you will need to use $push or $addToSet operator, along with $each operator.
The $push and $addToSet operators, only add/append one item per time, so the use of $each is necessary to interact with every item present on the invites array. In the following examples, I will include that because I believe is what you will need. But please, take the time to read the linked documentation of every operator so you can find more samples.
The $push operator appends a specified value to an array, making no extra verification if the value that is been added already exists on the field or not. As:
//document on mongodb, before the update
// { _id : "1", inviteGroup : [] }
//Invites from the request
// invites = [ { bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"} ];
//update method
Post.findByIdAndUpdate(
{ _id: req.params.id }, //req.params.id = "1"
{ $push : { inviteGroup: { $each : invites } } },
function(err, docs) {
//...
}
);
//document on mongodb, after the update
/*
{
_id : "1",
inviteGroup : [ { bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"} ]
}
*/
If you call the update method again with the same values:
Post.findByIdAndUpdate(
{ _id: req.params.id }, //req.params.id = "1"
{ $push : { inviteGroup: { $each : invites } } },
function(err, docs) { }
);
// the end document will be like:
/*
{
_id : "1",
inviteGroup : [
{ bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"},
{ bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"}
]
}
*/
In the same way, the $addToSet operator adds a value to an array unless the value is already present, in which case $addToSet does nothing to that array. Like:
//document on mongodb, before the update
// { _id : "1", inviteGroup : [] }
//Invites from the request
// invites = [ { bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"} ];
//update method
Post.findByIdAndUpdate(
{ _id: req.params.id }, //req.params.id = "1"
{ $addToSet : { inviteGroup: { $each : invites } } },
function(err, docs) {
//...
}
);
//document on mongodb, after the update
/*
{
_id : "1",
inviteGroup : [ { bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"} ]
}
*/
If you call the update method again with the same values:
Post.findByIdAndUpdate(
{ _id: req.params.id }, //req.params.id = "1"
{ $addToSet : { inviteGroup: { $each : invites } } },
function(err, docs) { }
);
//the end document will be the same because the same value was already on the list:
/*
{
_id : "1",
inviteGroup : [ { bidderId:"5e2350c7f88cfb331c4f67de", username:"artist1"} ]
}
*/
Well, I hope that was what you looking for. =]

Querying nested object using $find by applying conditions in Mongodb

This is my json object
{
"account_id" : "1",
"sections" : [
"name" : "sec1",
"label" : {
"label1" : "text1",
"label2" : "text2"
}
},
"name" : "sec2",
"label" : {
"label3" : "text3",
"label4" : "text4",
"label5" : "text5"
}
},
]
}
So in this json I wanted to query the label object where sector= sec1. I have used the below code but it didn't work.
var getData = (db, query) => {
return db
.collection(TABLE_NAME)
.find(query, { account_id: { sections: { label: 1 } } })
.toArrayAsync();
};
var dataList = (db, event) => {
let dataQuery = {
account_id: id,
'sections.name': event.params.section
};
return getData(db, dataQuery);
};
module.exports.getData = (event, cb) => {
return using(connectDatabase(), db => {
return dataList (db, event);
}).then(data => cb(null, responseObj(data, 200)), err =>
cb(responseObj(err, 500)));
};
Could someone kindly help me? Thanks inadvance.
Try something like this. use $project, we can selectively remove or retain field and we can reassign existing field values and derive entirely new values. after projecting the labels and name do a $match to extract the document by name. One thing to notice is that by using $project,it will automatically assign the document's _id.
var dataList = (db, event) => {
return db
.collection(TABLE_NAME)
.aggregate([
{
$match: { account_id: your_id }
},
{
$unwind: '$sections'
},
{
$project:{labels:'$sections.label',name:'$sections.name'}
},
{
$match:{name:section_name}
}]).toArray();
};
You have to use aggregate method with $unwind syntax to find item in array of object.
var dataList = (db, event) => {
return db
.collection(TABLE_NAME)
.aggregate([
{
$match: {
account_id: id,
}
},
{ $unwind: "$comments" },
{
$match: {
'name': event.params.section
}
}
])
.toArrayAsync();
};
Result:
[{
"account_id": "1",
"sections": {
"name": "sec2",
"label": {
"label3": "text3",
"label4": "text4",
"label5": "text5"
}
}
}]

mongoDB and sails aggregate dont work with nodejs

I'm using mongodb and sails framework, Production.find({}) is working normally
but Production.aggregate([]) is returning an error
Production.aggregate() is not a function
module.exports = {
list : function(req,res) {
Production.aggregate([{
$project: {
data: { $substr: ["$pt",0,10] },
prodTempo: { $substr: ["$sis",0,10]}
}
}])
.exec(function(err,collection ){
if(err){
res.send(500,{error:"DataBase Error"});
}
res.view('list',{producao:collection});
});
}
};
As of Sails v1.0 the .native() method is deprecated in favor of getDatastore().manager.
https://sailsjs.com/documentation/reference/waterline-orm/models/native
Due to a bug with the current version of sails-mongo (v1.0.1) which doesn't support the new required cursor method I've actually switched to using Mongo View's to manage aggregate queries.
The pattern below is "supposed" to work but currently returns no results because toArray() of an aggregate() function is currently not properly supported. It returns an AggregateCursor which does not support the toArray() method.
WHAT I ENDED UP DOING
const myView = sails.getDatastore().manager.collection("view_name");
myView.find({...match/filter criteria...}).toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});
The entire Aggregate query I put into the Mongo DB View and added additional columns to support filter/match capabilities as needed. The only portion of "match" I did not place into Mongo are the dynamic fields which I use above in the find() method. That's why you need the additional fields since find() will only query the columns available in the query and not the underlying model
WHAT SHOULD HAVE WORKED
So the pattern for aggregate would now be as follows:
const aggregateArray = [
{
$project: {
data: { $substr: ['$pt', 0, 10] },
prodTempo: { $substr: ['$sis', 0, 10] }
}
}
];
sails.getDatastore('name of datastore').manager.collection('collection name')
.aggregate(aggregateArray)
.toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});
For aggregations you need to call the native function first. Then it looks like this:
const aggregateArray = [
{
$project: {
data: { $substr: ['$pt', 0, 10] },
prodTempo: { $substr: ['$sis', 0, 10] }
}
}
];
Production.native(function(err, prodCollection) {
if (err) {
// handle error 1
} else {
prodCollection
.aggregate(aggregateArray)
.toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});
}
});
const regexForFileName = '.*' + fileName + '.*';
var db = model.getDatastore().manager;
var rawMongoCollection = db.collection(model.tableName);
rawMongoCollection.aggregate(
[
{
$project : {
"_id" : 0,
"fileId" : 1,
"fileName" : 1,
"fileSize" : 1,
"createdTime" : 1
}
},
{
$match : {
"fileName" : {
$regex: regexForFileName,
$options: 'i'
}
}
},
{
$sort: {
"createdTime" : -1
}
},
{
$skip: pageNumber * numberOfResultsPerPage
},
{
$limit: numberOfResultsPerPage
}
]
).toArray((err, results) => {
if (err) {
console.log(err);
}
console.log("results: " + JSON.stringify(results));
});

Mongo + check multiple fields existing

I am working mongo with nodejs.
I have array list:
var checkFields = ["field1","field2","field3"];
I try to get the count of records having the array list fields and user field is equal to admin.
Sample data:
[
{
"checkFields": {
"field1": "00124b3a5c31",
"user": "admin"
}
},
{
"checkFields": {
"field2": "00124b3a5c31",
"user": "admin"
}
},
{
"checkFields": {
"field1": "00124b3a5c31",
"user": "regular"
}
}
]
Query:
db.collection_name.find(
{"checkFields.user" : "admin"}
{ "checkFields.field1": { $exists: true} }
)
Expected Result:
Result is to get rows of count of matching the field in array list(checkFields).
Building up an $or array for the list of field existence checks is the right approach, but assuming you're on a current node.js build you can simplify the query creation to:
var checkFieldsLists = checkFields.map(field => ({
['checkFields.' + field]: {$exists: true}
}));
var query = {
$or: checkFieldsLists,
'checkFields.user': 'admin'
}
This removes the superfluous $or for the "user is admin" check which lets you also remove the outer $and, so that the generated query is:
{ '$or':
[ { 'checkFields.field1': { '$exists': true } },
{ 'checkFields.field2': { '$exists': true } },
{ 'checkFields.field3': { '$exists': true } } ],
'checkFields.user': 'admin' }
I tried the following code. Its working but don't know its good solution and perfomance. Please anyone have better answer means please post it.
var checkFields = ["field1", "field2", "field3"];
var checkFieldsLists = [];
for ( i = 0; i < checkFields.length; i++) {
var jsObj = {};
jsObj['checkFields.' + checkFields[i]] = {};
jsObj['checkFields.' + checkFields[i]].$exists = true;
checkFieldsLists.push(jsObj);
}
var query = {
"$and" : [{
"$or" : checkFieldsLists
}, {
"$or" : [{
"checkFields.user" : "admin"
}]
}]
};
console.log(JSON.stringify(query));
//console log will return
/*
{"$and":[{
"$or" : [{
"checkFields.field1" : {
"$exists" : true
}
}, {
"checkFields.field2" : {
"$exists" : true
}
}, {
"checkFields.field3" : {
"$exists" : true
}
}]
}, {
"$or" : [{
"checkFields.user" : "admin"
}]
}]
}
*/
collection.find(query);
Here is the solution using aggregate query.
var Db = require('mongodb').Db, Server = require('mongodb').Server, assert = require('assert');
var db = new Db('localhost', new Server('localhost', 27017));
var checkFields = ["field1", "field2", "field3"];
var checkFieldsLists = [];
for (var i = 0; i < checkFields.length; i++) {
var jsObj = {};
jsObj['checkFields.' + checkFields[i]] = {};
jsObj['checkFields.' + checkFields[i]].$exists = true;
checkFieldsLists.push(jsObj);
}
var query = {
"$and" : [{
"$or" : checkFieldsLists
}, {
"$or" : [{
"checkFields.user" : "admin"
}]
}]
};
var matchQuery = {
"$match" : {
"checkFields.user" : "admin",
"$or" : checkFieldsLists
}
};
var groupQuery = {
$group : {
_id : null,
count : {
$sum : 1
}
}
};
var aggregateCheckFields = function(db, callback) {
console.log("Match query is ====>" + JSON.stringify(matchQuery));
console.log("Group query is ====>" + JSON.stringify(matchQuery));
db.collection('checkfields').aggregate([ matchQuery, groupQuery ]).toArray(
function(err, result) {
assert.equal(err, null);
console.log("Result is ===>" + JSON.stringify(result));
if (result.length > 0) {
console.log("Count is ===>" + result[0].count);
}
callback(result);
});
};
db.open(function(err, db) {
aggregateCheckFields(db, function() {
db.close();
});
});
Output:-
Result is ===>[{"_id":null,"count":3}]
Count is ===>3

Resources