mongoDB and sails aggregate dont work with nodejs - node.js

I'm using mongodb and sails framework, Production.find({}) is working normally
but Production.aggregate([]) is returning an error
Production.aggregate() is not a function
module.exports = {
list : function(req,res) {
Production.aggregate([{
$project: {
data: { $substr: ["$pt",0,10] },
prodTempo: { $substr: ["$sis",0,10]}
}
}])
.exec(function(err,collection ){
if(err){
res.send(500,{error:"DataBase Error"});
}
res.view('list',{producao:collection});
});
}
};

As of Sails v1.0 the .native() method is deprecated in favor of getDatastore().manager.
https://sailsjs.com/documentation/reference/waterline-orm/models/native
Due to a bug with the current version of sails-mongo (v1.0.1) which doesn't support the new required cursor method I've actually switched to using Mongo View's to manage aggregate queries.
The pattern below is "supposed" to work but currently returns no results because toArray() of an aggregate() function is currently not properly supported. It returns an AggregateCursor which does not support the toArray() method.
WHAT I ENDED UP DOING
const myView = sails.getDatastore().manager.collection("view_name");
myView.find({...match/filter criteria...}).toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});
The entire Aggregate query I put into the Mongo DB View and added additional columns to support filter/match capabilities as needed. The only portion of "match" I did not place into Mongo are the dynamic fields which I use above in the find() method. That's why you need the additional fields since find() will only query the columns available in the query and not the underlying model
WHAT SHOULD HAVE WORKED
So the pattern for aggregate would now be as follows:
const aggregateArray = [
{
$project: {
data: { $substr: ['$pt', 0, 10] },
prodTempo: { $substr: ['$sis', 0, 10] }
}
}
];
sails.getDatastore('name of datastore').manager.collection('collection name')
.aggregate(aggregateArray)
.toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});

For aggregations you need to call the native function first. Then it looks like this:
const aggregateArray = [
{
$project: {
data: { $substr: ['$pt', 0, 10] },
prodTempo: { $substr: ['$sis', 0, 10] }
}
}
];
Production.native(function(err, prodCollection) {
if (err) {
// handle error 1
} else {
prodCollection
.aggregate(aggregateArray)
.toArray((err, results) => {
if (err) {
// handle error 2
}
// Do something with your results
});
}
});

const regexForFileName = '.*' + fileName + '.*';
var db = model.getDatastore().manager;
var rawMongoCollection = db.collection(model.tableName);
rawMongoCollection.aggregate(
[
{
$project : {
"_id" : 0,
"fileId" : 1,
"fileName" : 1,
"fileSize" : 1,
"createdTime" : 1
}
},
{
$match : {
"fileName" : {
$regex: regexForFileName,
$options: 'i'
}
}
},
{
$sort: {
"createdTime" : -1
}
},
{
$skip: pageNumber * numberOfResultsPerPage
},
{
$limit: numberOfResultsPerPage
}
]
).toArray((err, results) => {
if (err) {
console.log(err);
}
console.log("results: " + JSON.stringify(results));
});

Related

how to update an object of an element in array in mongodb?

This is the structure i have, i want to update the nested array element if an object key matches for example - i want to match grnno :"10431000" and update the other keys of that object like vehicle_no,invoice_no etc.
{
"_id" : ObjectId("5f128b8aeb27bb63057e3887"),
"requirements" : [
{
"grns" : [
{
"invoice_no" : "123",
"vehicle_no" : "345",
"req_id" : "5f128c6deb27bb63057e388a",
"grnno" : "10431000"
},
{
"invoice_no" : "abc",
"vehicle_no" : "def",
"req_id" : "5f128c6deb27bb63057e388a",
"grnno" : "10431001"
}
]
}
]
}
I have tried this code
db.po_grn.update({
"requirements.grns.grnno":"10431001"
}, {
$set: {
"requirements.$.grns": {"invoice_no":"test",vehicle_no:"5455"}
}
})
But this is changing the structure i have like this
"requirements" : [
{
"grns" : {
"invoice_no" : "test",
"vehicle_no":"5455"
},
"req_id" : ObjectId("5f128b8aeb27bb63057e3886")
}
],
grns key should be array, and update should be of the particular object which matches the key "grnno". Please help me out. Thanks.
==Edit==
var grnno = req.body.grnno;
db.po_grn.find({
"requirements.grns.grnno":grnno
}).toArray(function(err, po_grn) {
console.log("po_grn",po_grn);
if (po_grn.length > 0) {
console.log("data.grn.grnno ", grnno);
var query = {
requirements: {
$elemMatch: {
"grns.grnno": grnno
}
}
};
var update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': data.invoice_no,
'requirements.$[].grns.$[inner].vehicle_no': data.vehicle_no,
}
};
var options = {
arrayFilters: [
{ "inner.grnno" : grnno }
]
};
db.po_grn.update(query, update, options
, function(er, grn) {
console.log("grn",grn,"er",er)
res.send({
status: 1,
message: "Grn updated successfully"
});
}
);
} else {
res.send({
status: 0,
message: "Grn not found "
});
}
})
Use a combination of $[] positional-all operator with array filters to update your inner nested document.
var query = {
requirements: {
$elemMatch: {
"grns.grnno": "10431001"
}
}
};
var update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': "test",
'requirements.$[].grns.$[inner].vehicle_no': "5455",
}
};
var options = {
arrayFilters: [
{ "inner.grnno" : "10431001" }
]
};
db.collection.update(query, update, options);
Update -
NodeJS native MongoDb driver code attached, which is working fine
const { MongoClient } = require('mongodb');
const url = "mongodb://localhost:27017/";
MongoClient.connect(url, function(err, db) {
if (err) {
throw err;
}
const dbo = db.db("test");
(async() => {
const query = {
requirements: {
$elemMatch: {
"grns.grnno": "10431001"
}
}
};
const update = {
$set: {
'requirements.$[].grns.$[inner].invoice_no': "test",
'requirements.$[].grns.$[inner].vehicle_no': "5455",
}
};
const options = {
arrayFilters: [
{ "inner.grnno" : "10431001" }
],
multi: true
};
try {
const updateResult = await dbo.collection("collection").update(query, update, options);
} catch (err) {
console.error(err);
}
db.close();
})();
});

Mongoose - replace all array elements

I want to replace all array's elements in 'prices' filed as below:
{
"name": "My customer name"
"taxCode":123456
"prices":
[
{
"name": "Chocolate",
"unitPrice": 10
},
{
"name": "Cookie",
"unitPrice": 9
}
]
}
The JSON that uses to change 'prices' is:
{
"prices":
[
{
"name": "Chocolate1",
"unitPrice": 10
},
{
"name": "Candy",
"unitPrice": 5
}
]
}
And here is my code to replace the 'prices' array
router.route('/:obj/:id')
.put((req, res) => {
const PObj = require('../models/customer');
PObj.findById(req.params.id, (err, doc) => {
if (err) {
console.log('Lookup error: ' + err);
res.status(500).send('Error');
} else if (doc) {
doc.update({$set: req.body}, (err, task) => {
res.status(200).json(task);
}); } else {
res.status(404).send('Something is wrong');
}
});
});
After code executed is done but without any changes in Mongo DB. Please help me to correct my code. Thank!
If your req.body prints that prices array then it has to be req.body.prices, also rather than fetching the document & updating it - Which is a two- way process, You can try this :
router.route("/:obj/:id").put((req, res) => {
const PObj = require("../models/customer");
PObj.findByIdAndUpdate(
req.params.id, /** this 'req.params.id' has to be `_id` value of doc in string format */
/** internally mongoose will send this as { $set: { prices: req.body.prices }} which will replace `prices` array will new array,
* Just in case if you wanted to push new values, have to manually do { $push: { prices: req.body.prices }} each object */
{ prices: req.body.prices },
{ new: true }, /** returns updated doc, this option is not needed if you don't need doc - by default it returns old doc */
(err, doc) => {
if (err) {
console.log("Lookup error: " + err);
res.status(500).send("Error");
} else if (doc) {
res.status(200).json(task);
} else { /** `doc` value will be null if no doc is not found for given id */
res.status(404).send("Something is wrong");
}
}
);
});
Ref : .findByIdAndUpdate()

Set Incremental Values from Array of Items

How to update the multiple documents in MongoDB and set the value of the element in an increasing order?
I have got the document as follows
{
"_id" : ObjectId("5b162a31dfaf342dc44c920d")
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920f")
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920c")
}
How can I update the whole documents with a single query so that I can have a new element called "order" in every single field in an increasing order as below
{
"_id" : ObjectId("5b162a31dfaf342dc44c920d"),
"order": 1
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920f"),
"order": 2
}
{
"_id" : ObjectId("5b162a31dfaf342dc44c920c"),
"order": 3
}
Currently I am using the following way to solve the problem
for(let i = 0; i <= req.body.id.length;i++) {
const queryOpts = {
_id: ObjectId(req.body.id[i])
};
const updateOpts = {
$set: {
'order': i + 1
}
};
const dataRes = await req.db.collection('GalleryImage').updateOne(queryOpts, updateOpts);
if(i === req.body.id.length-1) {
return commonHelper.sendResponseMessage(res, dataRes, {
_id: req.body.id
}, moduleConfig.message.updateGalleryOrder);
}
If there any better way than this so that it would not be the expensive operation if there are large number of documents ?
Use bulkWrite() with Array.map() to construct the statement:
try {
let response = await req.db.collection('GalleryImage').bulkWrite(
req.body.id.map((_id,order) =>
({ updateOne: {
filter: { _id: ObjectId(_id) },
update: {
$set: { order: order+1 }
}
}})
)
);
} catch(e) {
// deal with any errors
}
Array.map() has the "index" of the array element being processed within it's second function argument. So simply use that to get the order and set that on all statements.
Rather than writing/responding with the database n times, this only needs happen "once".
There is no other way to get a "sequence" other than introducing it yourself, but at least we can do it with "one" write this way instead of several. Note also to "trap your possible errors" when using async/await syntax.
Example listing
const { MongoClient, ObjectID: ObjectId } = require('mongodb');
const uri = 'mongodb://localhost:27017';
const data = [
"5b162a31dfaf342dc44c920d",
"5b162a31dfaf342dc44c920f",
"5b162a31dfaf342dc44c920c"
];
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const client = await MongoClient.connect(uri);
let db = client.db('test');
// Set up
await db.collection('gallery').removeMany({});
await db.collection('gallery').insertMany(
data.map(_id => ({ _id: ObjectId(_id) }))
);
// Update with indexes
let response = await db.collection('gallery').bulkWrite(
data.map((_id,idx) =>
({
updateOne: {
filter: { _id: ObjectId(_id) },
update: { $set: { order: idx+1 } }
}
})
)
);
log({ response });
let items = await db.collection('gallery').find().toArray();
log({ items });
client.close();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
And the output
{
"response": {
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 0,
"nMatched": 3,
"nModified": 3,
"nRemoved": 0,
"upserted": [],
"lastOp": {
"ts": "6563535160225038345",
"t": 18
}
}
}
{
"items": [
{
"_id": "5b162a31dfaf342dc44c920d",
"order": 1
},
{
"_id": "5b162a31dfaf342dc44c920f",
"order": 2
},
{
"_id": "5b162a31dfaf342dc44c920c",
"order": 3
}
]
}
Clearly shows nMatched: 3 and nModified: 3 just as is expected.

Node Mongo - find multiple parameters

I'm trying to find in a collection if there is already a session number, to avoid duplications. dadosORS.email and dadosORS.sessao (which is 3)come from a form. So when I do this:
mongoClient.collection('registosORS', function(err,collection){
collection.find({email:{$eq:dadosORS.email}},{sessao:{$eq:dadosORS.sessao}}).toArray(function(err,result){
try{
console.log(result);
}catch (err){
console.log(err);
}
if(result){
// callback(false)
return
} else {
I get result = undefined. If I change the query to
collection.find({email:dadosORS.email},{sessao:dadosORS.sessao}).toArray(function(err,result){
it lists my every occurence of the email:
[ { _id: 5a37b4c3da53ff1e825f94b4, sessao: '1' },
{ _id: 5a37b4e6da53ff1e825f94b6, sessao: '1' },
{ _id: 5a37b57ce500ca1ea5522e22, sessao: '2' } ]
So, how can I see if the dadosORS.sessao for that dadosORS.email already exists?
Just do an and query:
collection.find( { email : dadosORS.email, sessao : dadosORS.sessao } )
or can be expressed as
collection.find( { $and: [ { email : dadosORS.email }, { sessao : dadosORS.sessao } ] } )

MongoDB Node.js native driver silently swallows `bulkWrite` exception

The script below has a bug in the mongo bulkWrite op syntax: $setOnInsert: { count:0 }, is unnecessary and thus mongo throws an exception: "Cannot update 'count' and 'count' at the same time".
The problem is, the node.js driver doesn't seem to catch it. This script logs "Success!" to the console.
(async () => {
let db = await require('mongodb').MongoClient.connect('mongodb://localhost:27017/myNewDb');
let mongoOps = [{
updateOne: {
filter: { foo: "bar" },
update: {
$setOnInsert: { count:0 },
$inc: { count:1 },
},
upsert: true,
}
}];
try {
await db.collection("myNewCollection").bulkWrite(mongoOps);
console.log("Success!");
} catch(e) {
console.log("Failed:");
console.log(e);
}
})();
Examining db.system.profile.find({}) with db.setProfileLevel(2) we can see the exception:
{
"op" : "update",
"ns" : "myNewDb.myNewCollection",
"query" : {
"foo" : "bar"
},
"updateobj" : {
"$setOnInsert" : {
"count" : 0
},
"$inc" : {
"count" : 1
}
},
"keyUpdates" : 0,
"writeConflicts" : 0,
"numYield" : 0,
"locks" : {
"Global" : {
"acquireCount" : {
"r" : NumberLong(1),
"w" : NumberLong(1)
}
},
"Database" : {
"acquireCount" : {
"w" : NumberLong(1)
}
},
"Collection" : {
"acquireCount" : {
"w" : NumberLong(1)
}
}
},
"exception" : "Cannot update 'count' and 'count' at the same time",
"exceptionCode" : 16836,
"millis" : 0,
"execStats" : {},
"ts" : ISODate("2017-10-12T01:57:03.008Z"),
"client" : "127.0.0.1",
"allUsers" : [],
"user" : ""
}
Why is the driver swallowing errors like this? I definitely seems like a bug, but I figured I'd ask here first just to be sure.
So as commented, "It's a bug". Specifically the bug is right here:
// Return a Promise
return new this.s.promiseLibrary(function(resolve, reject) {
bulkWrite(self, operations, options, function(err, r) {
if(err && r == null) return reject(err);
resolve(r);
});
});
The problem is that the "response" ( or r ) in the callback which is being wrapped in a Promise is not actually null, and therefore despite the error being present the condition is therefore not true and reject(err) is not being called, but rather the resolve(r) is being sent and hence this is not considered an exception.
Correcting would need some triage, but you can either 'work around' as mentioned by inspecting the writeErrors property in the response from the current bulkWrite() implementation or consider one of the other alternatives as:
Using the Bulk API methods directly:
const MongoClient = require('mongodb').MongoClient,
uri = 'mongodb://localhost:27017/myNewDb';
(async () => {
let db;
try {
db = await MongoClient.connect(uri);
let bulk = db.collection('myNewCollection').initializeOrderedBulkOp();
bulk.find({ foo: 'bar' }).upsert().updateOne({
$setOnInsert: { count: 0 },
$inc: { count: 0 }
});
let result = await bulk.execute();
console.log(JSON.stringify(result,undefined,2));
} catch(e) {
console.error(e);
} finally {
db.close();
}
})();
Perfectly fine but of course has the issue of not naturally regressing on server implementations without Bulk API support to using the legacy API methods instead.
Wrapping the Promise Manually
(async () => {
let db = await require('mongodb').MongoClient.connect('mongodb://localhost:27017/myNewDb');
let mongoOps = [{
updateOne: {
filter: { foo: "bar" },
update: {
$setOnInsert: { count:0 },
$inc: { count:1 },
},
upsert: true,
}
}];
try {
let result = await new Promise((resolve,reject) => {
db.collection("myNewCollection").bulkWrite(mongoOps, (err,r) => {
if (err) reject(err);
resolve(r);
});
});
console.log(JSON.stringify(result,undefined,2));
console.log("Success!");
} catch(e) {
console.log("Failed:");
console.log(e);
}
})();
As noted the problem lies within the implementation of how bulkWrite() is returning as a Promise. So instead you can code with the callback() form and do your own Promise wrapping in order to act how you expect it to.
Again as noted, needs a JIRA issue and Triage to which is the correct way to handle the exceptions. But hopefully gets resolved soon. In the meantime, pick an approach from above.

Resources