Mongoose query to keep only 50 documents - node.js

How do I make a query in mongoose to find if a user has 50 documents then remove the oldest one and add in the new one, if not just add in the new one?
This is my attempt:
Notifications.findOne({_id: userId}, function(err, results) {
if(err) throw err;
if(results.length < 50) {
saveNotification();
} else {
Notifications.findByIdAndUpdate(userId, {pull: //WHAT GOES HERE),
function(err, newNotify) {
if(error) throw error;
saveNotification();
});
}
});
function saveNotification() {
var new_notification = new Notification ({
notifyToUserId: creatorId,
notifyFromUserId: null,
notifyMsg: newmsg,
dateNotified: dateLastPosted
});
new_notification.save(function(err, results){
if(!err) {
console.log('notification has been added to the db');
cb(null, resultObject);
} else {
console.log("Error creating notification " + err);
}
});
}

As #Pio mentioned I don't think you can do it in one query with your current schema. But if you have chance to change the schema, you can use fixed size array pattern that is described in the following article Limit Number of Elements in an Array after an Update
Basically you can keep the notifications of users in one document. Key of the document will be userId, and notifications will be stored in an array. Then the following query would achieve your goal.
Notifications.update(
{ _id: userId },
{
$push: {
notifications: {
$each: [ notificationObject ], // insert your new notification
$sort: { dateNotified: 1 }, // sort by insertion date
$slice: -50 // retrieve the last 50 notifications.
}
}
}
)

I am not sure you can do it in one query, but you can
.count({user: yourUser'}) then depending on the count .insert(newDocument) or update the oldest one so you won't remove + insert.

Capped collections do what you want by nature. If you define a capped collection with size 50 it will only keep 50 documents and will overwrite old data when you insert more.
check
http://mongoosejs.com/docs/guide.html#capped
http://docs.mongodb.org/manual/core/capped-collections/
new Schema({..}, { capped: { size: 50, max: 50, autoIndexId: true } });
Remember that when working with capped collection you can only make inplace updates. Updating whole document may change the size of collection that will remove other documents.

I ended up using cubbuk's answer and expanding it to add a notification if there is no array to start with along with upsert...
Notification.findOneAndUpdate({notifyToUserId: to}, {
$push: {
notifyArray: {$each: [newNotificationObject], // insert your new notification
$sort: { dateNotified: 1 }, // sort by insertion date
$slice: -50 // retrieve the last 50 notifications.
}
}
}, {upsert: true}, function(err, resultOfFound) {
if(err) throw err;
if(resultOfFound == null) {
var new_notification = new Notification ({
notifyToUserId: to,
notifyArray: [newNotificationObject]
});
new_notification.save(function(err, results){
if(!err) {
console.log('notification has been added to the db');
cb(null, resultObject);
} else {
console.log("Error creating notification " + err);
}
});
} else {
cb(null, resultObject);
}
});

Related

$setOnInsert doesn't work with bulkWrite in MongoDB

I have millions of record in source collection, I am trying to perform operation bulkWirte with $setOnInsert, like if incoming record already exists in destination collection then just skip it.
But somehow this operation is not working, I don't know if there is change in behavior of $setOnInsert or it doesn't work in bulk update case.
If I perform same operation without bulkWrite then its working as expected.
This is code part of code where I tried with batch update
bulkArr.push({
updateOne: {
filter: {
date: { $gte: new Date(from).getTime(), $lte: new Date(to).getTime() }, "uid": mergedObj.uid, "baseId": mergedObj.baseId,
"lessonId": mergedObj.lessonId, "pageId": mergedObj.pageId, "site": mergedObj.site
},
update: {
$setOnInsert: mergedObj
},
upsert: true
}
});
if (bulkArr.length % batchSize === 0) {
batchCounter = batchCounter + 1;
stream.pause();
db.collection('saq_temp_jan').bulkWrite(bulkArr, (err, result) => {
if (err) {
console.log(err);
} else {
bulkArr = [];
console.log('Upserted count>>>', result.nUpserted);
console.log('Matched count>>>', result.nMatched);
console.log('Batch counter>>>', batchCounter);
stream.resume();
}
});
Any suggestion would be most welcome.

how does one update numbers inside mongoose model nested array

can someone please explain what I am doing wrong. I am attempting to update a number value inside a nested array on my mongoose schema by adding two numbers
here is the section in question
$set: {
"shareHolders.$.shares": Number(req.existingStock) + Number(req.stock)
}
req.existing shares is say 100 and req.stock is a formatted as a string but equals say 100 so, in short, the new value for the shares should be 200
BUT when i run the code the shares of the said shareholder does not change it remains the original value.
here is the full snippet
module.exports.updateShareHolder = function(req, callback) {
console.log('updateShareHolder');
console.log(req);
console.log(req.existingStock + Number(req.stock));
Company.update({
"_id": req.companyID,
"shareHolders.userId": req.userID
}, {
$push: {
"shareHolders.$.agreements": {
agreementID: req.agreementID
}
}
}, {
$set: {
"shareHolders.$.shares": Number(req.existingStock) + Number(req.stock)
}
}, function(err) {
if (err) {
console.log(err);
callback(err, err);
} else {
console.log('updateShareHolder');
callback(null, 'success');
}
})
};
Convert to a number before doing your update.
const updatedStock = Number(req.existingStock) + Number(req.stock)
then
$set: {
"shareHolders.$.shares": updatedStock
}

MongoDB Query Optimize Search Text

I have an application developed in NodeJS, which works as a REST API and consumes data from MongoDB
In MongoDB I have a collection called 'ftp' with more than 10 million documents with the following structure
{
"_id" : ObjectId("59e7c66911506bd1725cf145"),
"ip" : "72.32.177.76",
"timestamp" : "2017-10-16T02:30:26-04:00",
"data" : {
"banner" : "220-FileZilla Server version 0.9.41 beta\r\n"
}
}
The "data.banner" field is a hased index
From NoodeJs I make an aggregate query that filters a string of text using a regular expression, groups and counts the results.
function getData(timeInit, table, textSearch, res) {
MongoClient.connect(url, function (err, db) {
if (err) throw err;
db.collection(table).aggregate([
{
$match: { 'data.banner': $regex:textSearch}
}, {
$group: {
_id: '$data.banner',
num: { $sum: 1 },
}
},
{
$sort: {
num: -1
}
},{
$limit:5
}
], {
allowDiskUse: true
}
).toArray(function (err, result) {
if (err) throw err;
var timeFinal = new Date();
var data = {
result: result,
timeLapse: (timeFinal - timeInit) / 1000,
numResult: result.length
};
res.send(data);
db.close();
});
});
};
The query with regular expression takes about 8 seconds to return results, an excessive time in my opinion, since the regular expressions are not optimal.
My question is how should I make the filter to search for documents that contain text in an optimal way reducing the response time.
If someone knows how to optimize this type of query I would appreciate it a lot.

Selecting only modified subdocument from Mongo

I have a mongoose query like this:
var query = Events.findOneAndUpdate({ '_id': event._id,'participants._id':participant._id},{'$set': {'participants.$': participant}}, {upsert:false,new: true},function(err,result){
if(err){
return res.status(500).jsonp({
error: 'Unable to update participant'
});
}
console.log(result.participants[0]);
res.jsonp(result.participants[0]);
});
and the query works properly modifying the participants subdocument inside Events collection.
The problem:
I need only the modified participant to be returned as JSON and I am not in need of the entire participants array but I am not able to achieve this since I get all the participants when I do console.log(result.participants);
How do I get only the modified subdocument after the query?
You may have to use the native JS filter() method as in the following:
Events.findOneAndUpdate(
{ '_id': event._id, 'participants._id': participant._id },
{ '$set': { 'participants.$': participant } },
{ upsert: false, new: true },
function(err, result){
if(err){
return res.status(500).jsonp({
error: 'Unable to update participant'
});
}
var modified = result.participants.filter(function(p){
return p._id === participant._id
})[0];
console.log(modified);
res.jsonp(modified);
}
);

Upserting a document with MongoDB, incrementing a field and setting it to 0 if not existent

I'm using MongoDB in node.js
What I would like is to upsert a document in a collection. The document has an unique ID, a lastAccess field, which stores the date of the last time accessed, and a timesAccessed field, which should be set to 0 on document creation and incremented by 1 if updating.
I tried:
// coll is a valid collection
coll.update(
{user: accountInfo.uid},
{user: accountInfo.uid,
lastAccess: new Date(),
$inc: {timesAccessed: 1},
$setOnInsert: {timesAccessed: 0}
},
{upsert: true, w: 1},
function(err, result) {
if (err) throw err;
console.log("Record upserted as " + result);
});
but node says:
MongoError: Modifiers and non-modifiers cannot be mixed
What is a coincise and safe way to do this?
You should either $set the values or update/replace the whole object. So either update(find_query, completely_new_object_without_modifiers, ...) or update(find_query, object_with_modifiers, ...)
Plus, you cannot $set and $setOnInsert with the same field name, so you will start counting from 1 :) Oh, and you don't need to add the find_query items to the update_query, they will be added automatically.
Try:
col1.update( {
user: accountInfo.uid
}, {
$set: {
lastAccess: new Date()
}
$inc: {
timesAccessed: 1
}
}, {
upsert: true,
w: 1
}, function(err, result) {
if(err) {
throw err;
}
console.log("Record upsert as", result);
});

Resources