MongoError unknown group operator - node.js

I am using MongoDb Driver for NodeJS.
I am facing issues in using aggregation.
The error is
{"name":"MongoError","message":"unknown group operator
'_id'","ok":0,"errmsg":"unknown group operator '_id'","code":15952}
for the below script :
MongoClient.connect(url, function (err, db) {
if (err)
{
console.log('Unable to connect to the mongoDB server. Error:', err);
return;
}
var collName = "order";
var whereParas = {};
var groupParas = {"_id":null,total:{$sum:"$Value"}};
var havingParas = {};
db.collection(collName).aggregate(
[
{ $match: whereParas },
{
$group: { groupParas}
},
{ $match: havingParas }
]).toArray(function (err,result) {
console.log("err");
console.log(err);
console.log("result");
console.log(result);
});
});
The data used is
Desired Output is Sum of Values.
In sql, I would have written :
Select Sum(Value) From order

The group pipeline should be { $group: groupParas } instead of { $group: { groupParas } } hence the error you are getting as Mongo is trying to interpret the nested document win the object as the _id group operator.

Related

mongodb changestream “pipeline” not working nodejs

I have the following change stream but it does not function changed is not logged once I update using mongo compass.
var pipeline = [
{ $match: { _id: ObjectId(id) } }
];
try {
const collection = client.db("mydb").collection("shop");
const changeStream = collection.watch(pipeline);
changeStream.on('change', (next) => {
//console.log(next);
console.log('changed')
}, err => {
console.log(err);
});
} catch (err) {
console.log(err)
}
Is the problem that you don't normally update the _id of a document in a collection? If, for some reason, you are updating the _id then maybe the problem is in how you're referencing your $match. This works for me:
const pipeline01 = [
{ $match: { 'updateDescription.updatedFields.fieldIamInterestedIn': { $ne: undefined } } },
{ $project: { 'fullDocument._id': 1, 'fullDocument.anotherFieldIamInterestedIn': 1 } },
];
theCollectionIamWatching.watch(pipeline01, { fullDocument: 'updateLookup' }).on('change', async (data) => {
// do the thing I want to do using data.fullDocument
});

MongoDB average aggregation using NodeJS

Good afternoon everyone.
I have an Array inside a MongoDB Document. I wan't to find a Volume inside it and make an average for whole collection on Volume parameter.
Document inside a MongoDB:
As you can see I have array inside array. I will have a lot of the same type documents and I need to make an average calculation between every document on Volumeparameter.
Problematic area of code(This is not working just showcasing my try.
function simplePipeline(db, callback) {
const collection = db.collection('ADABTC');
collection.aggregate(
[
{ '$group': { 'Volume': { '$avg': 'Array.Volume' } } }
],
function(err, cursor) {
assert.equal(err, null);
cursor.toArray(function(err, documents) {
console.log(documents)
callback(documents);
});
}
);
}
Just in case I can connect to DB username:password just for example.
How I should specify it NodeJS?
NodeJS Full Code
const MongoClient = require('mongodb').MongoClient;
const assert = require('assert');
// Connection URL
const url = 'mongodb://username:password#ip.adress.port/<dbname>?retryWrites=true&w=majority';
// Database Name
const dbName = 'Crypto';
// Create a new MongoClient
const client = new MongoClient(url, { useUnifiedTopology: true });
// Use connect method to connect to the Server
client.connect(function(err, client) {
assert.equal(null, err);
console.log("Connected correctly to server");
const db = client.db(dbName);
simplePipeline(db, function() {
client.close();
});
});
function simplePipeline(db, callback) {
const collection = db.collection('ADABTC');
collection.aggregate(
[
{ '$group': { 'Volume': { '$avg': 'Array.Volume' } } }
],
function(err, cursor) {
assert.equal(err, null);
cursor.toArray(function(err, documents) {
console.log(documents)
callback(documents);
});
}
);
}
Your $group syntax is just wrong:
Groups input documents by the specified _id expression and for each distinct grouping, outputs a documen
A $group stage must have a _id field specified. to group the entire collection just put any constant there.
{
'$group': {
_id: null,
'Volume': {
'$avg': '$Array.Volume'
}
}
}

How to get all data matched each record from mongodb using mongoose and nodejs

Tried to get value of product_name from my mongodb using mongoose but i do not know how to do it.
My DB Data collection:
{
_id:ObjectId("5ecea02ebb6f3c19e86fe805"),
product_name:"Test1"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe806"),
product_name:"Test2"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe807"),
product_name:"Test3"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe808"),
product_name:"Test4"
},
{
_id:ObjectId("5ecea02ebb6f3c19e86fe809"),
product_name:"Test5"
}
data.controller.js:
module.exports.getData = (req, res, next) => {
var tableCate = mongoose.model("Product");
tableCate.find({ product_name }, function(err, docs) {
if (err) {
console.log( err);
return
} else {
console.log(docs)// output should be Test1,Test2,Test3,Test4,Test5
}
});
db.collection.find returns a Cursor which is A pointer to the result set of a query, to access the result you can use db.collection.find({}).toArray() to return an array of documents or
.forEach(function(item){
// and you can print or do what you want with each item
})

Nodejs AggregationCursour.each() and rewind() bug

I need to perform complicated aggregation on Mongo DB collection. I also need to go through the results twice. After first pass, I execute rewind() on a cursor. Then I try to perform second pass. This is where I get the error. What is strange that if I replace each(), with couple of next(). Then everything works as expected.
Is it a bug in each(), which I should submit to MongoDB bugtracker? Or it is some sort of my error?
Much simplified sample, but still reproduce the error:
var MongoClient = require('mongodb').MongoClient;
var ObjectId = require('mongodb').ObjectId;
MongoClient.connect('mongodb://localhost:27017/test', function(err, db) {
// Create a collection
let collection = db.collection('aggregation_each_example');
// collection.drop();
// Some docs for insertion
function createData(callback) {
let docs = [
{
"oid" : new ObjectId("59883f2e7d8c6325200b81e4"),
"tod" : new Date("2017-08-07T10:21:34.877Z")
},
{
"veryImportanta" : "newOne",
"oid" : new ObjectId("59883f2e7d8c6325200b81e4")
}
];
// Insert the docs
collection.insertMany(docs, callback);
}
function getData(callback) {
return function(err) {
if (err) {
console.error(err);
} else {
let cursor = collection.aggregate([
{
"$match": {
"oid": new ObjectId("59883f2e7d8c6325200b81e4"),
"tod": {
"$exists": 0
}
}
}
], {cursor: {batchSize: 1}});
let count = 0;
cursor.each(function(err, doc) {
if(doc) {
console.log(doc);
count++;
} else {
console.log(cursor.isClosed());
cursor.rewind();
console.log(cursor.isClosed());
callback(count, cursor);
}
});
}
}
}
createData(getData(function(count, cursor) {
console.log("Count: "+ count);
console.log("Cursor is closed: " + cursor.isClosed());
cursor.next(function(err, doc) {
if (err) console.log(err);
else console.log(doc);
// db.dropDatabase();
db.close();
});
}));
});
Output:
{ _id: 598851ad48a1841c18b50bcf,
veryImportanta: 'newOne',
oid: 59883f2e7d8c6325200b81e4 }
true
false
Count: 1
Cursor is closed: false
{ MongoError: Cursor is closed
at Function.MongoError.create (error.js:31:11)
at nextObject (node_modules\mongodb\lib\cursor.js:669:112)
at AggregationCursor.Cursor.next (node_modules\mongodb\lib\cursor.js:269:12)
at error.js:61:12
at error.js:50:13
at handleCallback (node_modules\mongodb\lib\utils.js:120:56)
at node_modules\mongodb\lib\cursor.js:748:16
at handleCallback (node_modules\mongodb\lib\utils.js:120:56)
at node_modules\mongodb\lib\cursor.js:682:5
at handleCallback (node_modules\mongodb-core\lib\cursor.js:171:5) name: 'MongoError', message: 'Cursor is closed', driver: true }
Environment:
nodejs: v6.6.0
mongodb: 2.2.30
os: windows 10
mongodb engine: 3.4

How to Improve the Performance of mongodb long running Query

I am new to mongoDb, i am trying to update the fields for Each Records for around 10 to 15k records. When i am trying to update the records the query blocks whole database, the Running query will allow me to do any read or write operations till the Query Execution completes, is there anyway to improve the performance for this kind of Queries.
Here is My Code:
var ExisitingData=[{"isActive" : true,
"barcode" : "8908001921015",
"mrp" : 2000,
},
{"isActive" : true,
"barcode" : "7808001921019",
"mrp" : 1000,
}
....15k]
var updatedRsult=[];
async.forEach(ExisistingData, function (item, innerCallback) {
exports.populateData(item, function (err, populatedResult) {
if (err) {
innerCallback(err);
}
if(populatedResult==true)
totalRecordsUpdated++;
else
totalIgnoredRecords++;
innerCallback();
});
}, function (err) {
console.log("FinalDone");
var h1={}
h1['totalRecordsUpdated'] = totalRecordsUpdated;
h1['totalIgnoredRecords'] = totalIgnoredRecords;
updatedResult.push(h1);
updateCheck(null, updatedResult);
});
exports.populateData=function(item, mainCallback) {
var updated = false;
async.parallel([
function (callback1) {
Test.update({
$and: [
{'barcode': item['barcode']},
{'mrp': {$lt: parseInt(item['mrp'])}}
]
}, {$set: {'mrp': parseInt(item['mrp'])}}, function (err, result) {
if (err) {
console.log(err);
callback1();
}
else {
if (result['nModified'] == 1) {
console.log("Its Updated");
console.log(item);
updated=true;
callback1()
}
else {
callback1()
}
}
});
}
], function done(err) {
mainCallback(null,updated);
});
};

Resources