Two queries in FindOneAndUpdate don't work with arrays - node.js

I have this schema in Mongoose:
var CoinAmountSchema = new Schema(
{
user: [{ type: Schema.ObjectId, ref: 'User' }],
coinAmounts: [{
_id: false,
coinID: { type: Number, ref: 'Coin' },
amount: Number
}]
})
I am writing this query, that checks the userID and coinID and should update the amount of only that coinID's amount.
exports.coin_amount_update = [
(req, res, next) => {
CoinAmount.update({
"user": req.params.userId,
"coinAmounts.coinID": req.params.coinId
},
{
'$set': {
'coinAmounts.$.amount': req.body.amount
}
},
function (err, model) {
if (err) {
console.log(err)
return res.send(err)
}
return res.json(model)
})
}]
But like this, it only updates the first coin's in the array amount. BUT, if I delete the line "user": req.params.userId, it would find and update the right coin. I need to check for a user as well though, so how can I make it work?
Is there something wrong with the query or the way the data is structured?
EDIT: I send a request in React-native:
fetch(`${apiBaseURL}/users/${getState().user._id}/coins/${id}/update`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
amount: getState().coins[id].amount
}),
})
If the request is /users/:userID/coins/0/update (with amount: 1)
then the result will be
{ _id: 5a579d0d44e7390ba3029327,
__v: 0,
coinAmounts:
[ { coinID: 0, amount: 1 },
{ coinID: 1, amount: 0 },
{ coinID: 2, amount: 0 },
{ coinID: 3, amount: 0 },
{ coinID: 4, amount: 0 } ],
user: [ 5a579d0d44e7390ba3029326 ] }
The same result if the request is /users/:userID/coins/1/update with the same amount.
But if as mentioned before, I remove the check for userID, the request /users/:userID/coins/1/update would produce this:
{ _id: 5a579d0d44e7390ba3029327,
__v: 0,
coinAmounts:
[ { coinID: 0, amount: 0 },
{ coinID: 1, amount: 1 },
{ coinID: 2, amount: 0 },
{ coinID: 3, amount: 0 },
{ coinID: 4, amount: 0 } ],
user: [ 5a579d0d44e7390ba3029326 ] }
Hope I was clear.

It looks like a bug when using two arrays in find with $ positional update, it gets matching index of user for $ positional update
tried below workarounds, both updates correct coinID
workaround-1, using arrayFilters
db.coins.update(
{ "user" : "5a579d0d44e7390ba3029326" }, //user
{
$set: { "coinAmounts.$[elem].amount" : 1 } //update
},
{
multi: false,
arrayFilters: [
{ "elem.coinID": 2 } //coinID
]
}
)
workaround-2, using elemMatch for user array
db.coins.update(
{
"coinAmounts.coinID" : 1, //coinID
"user" : { $elemMatch : { $eq : "5a579d0d44e7390ba3029326" } } //user
},
{ $set : { "coinAmounts.$.amount" : 1 } } //update
)

Related

I want to write a MongoDB query in NodeJS where it return the matching documents as well as the count of documents too

I want to write a MongoDB query in NodeJS where it return the matching documents as well as the count of documents too. For ex consider the below code -
const result = await Student.aggregate(
[
{
$match: {
...filter
}
},
{
$project: {
_id: 1,
payment: 1,
type: 1,
BirthDate: 1
}
},
{
$sort: { StudentData: -1 }
},
{
$count: 'count'
},
{
$skip: skip
},
{
$limit: limit
}
]
);
Here I want to save two things in the result variable - the number of documents and individually all the documents.
let [{ totalItems, result }] = await Student.aggregate(
[
{
$match: {
...filter
}
},
{
$project: {
_id: 1,
payment: 1,
type: 1,
BirthDate: 1
}
},
{
$facet: {
result: [
{
$sort: { BirthDate: -1 },
},
{
$skip: skip
},
{
$limit: limit
}
],
totalItems: [{ $count: 'count' }]
}
},
{
$addFields: {
totalItems: {
$arrayElemAt: ["$totalItems.count", 0]
},
}
}
]
);

Decrement a field and delete entire object if field's value reaches to 0 in MongoDB & Node.js

I have array of objects like shopping list .for example i added 2 product to my list
cart:[{id:1,quantity:15},{id:2,quantity:5}]
i can increment and decrement quantities well , but what i want to do is when i decrement to 0 i want it to show it as i dont have in my cart, i know its possible for pull and i tried this and fail.idk what i am doing wrong , when i try console.log(user) it gives me null ,because it doesnt see "cart.quantity":0 , idk why not. User info and layout is ok btw.
router.get("/:productId/decrement", auth, async (req, res) => {
const user = await User.findOne({
id: req.user._id,
"cart.quantity": 0,
});
if (user) {
await User.findOneAndUpdate(
{
id: req.user._id,
"cart.id": req.params.productId,
"cart.quantity": 0,
},
{ $pull: { cart: { id: req.params.productId } } }
);
} else {
await User.findOneAndUpdate(
{
id: req.user._id,
"cart.id": req.params.productId,
},
{ $inc: { "cart.$.quantity": -1 } },
{ new: true },
(err, doc) => {
if (err) {
return res.json({ success: false, err });
}
return res.json({ success: true, cart: doc });
}
);
}
});
and here is my user modal
const userSchema = mongoose.Schema({
name: {
type: String,
maxlength: 50,
},
email: {
type: String,
trim: true,
unique: 1,
},
password: {
type: String,
minglength: 5,
},
lastname: {
type: String,
maxlength: 50,
},
cart: { type: Array, default: [] },
On MongoDB version >= 3.2 :
router.get("/:productId/decrement", auth, async (req, res) => {
try {
let bulkArr = [
{
updateOne: {
filter: {
id: req.user._id,
"cart.id": req.params.productId,
"cart.quantity": 1, // You can use {$lte : 1}
},
update: { $pull: { 'cart': { id: req.params.productId } } },
},
},
{
updateOne: {
filter: {
id: req.user._id,
"cart.id": req.params.productId
},
update: { $inc: { "cart.$.quantity": -1 } },
},
},
];
await User.bulkWrite(bulkArr);
return res.json({ success: true });
} catch (error) {
console.log("Error ::", error);
return res.json({ success: false, ...{error} });
}
});
Note :
I believe you're mixing async-await's & callbacks(). Please try to avoid it.
In bulkWrite all filter conditions will be executed but update operation will only be executed if respective filter matches (kind of individual ops) but in one DB call.
bulkWrite outputs 'write result' but if you need result doc you need to do .find() call. But as the whole point of using bulkWrite is to avoid multiple calls to DB - If result has no errors you can send success message to front-end then they can show appropriate count (by doing decrement).
Ref : .bulkwrite()
On MongoDB version >= 4.2 :
As we can use aggregation in updates you can do like below :
User.findOneAndUpdate({id: req.user._id, 'cart.id': req.params.productId},
[
{
$addFields: {
cart: {
$reduce: {
input: "$cart",
initialValue: [],
in: {
$cond: [
{ $and: [ { $eq: [ "$$this.id", req.params.productId ] }, { $gt: [ "$$this.quantity", 1 ] } ] }, // condition
{
$concatArrays: [ "$$value", [ { $mergeObjects: [ "$$this", { quantity: { $add: [ "$$this.quantity", -1 ] } } ] } ]]
}, // condition is true, So we're decrementing & pushing object to accumulator array
{
$cond: [ { $eq: [ "$$this.id", req.params.productId ] }, "$$value", { $concatArrays: [ "$$value", [ "$$this" ] ] } ]
} // condition failed, so pushing objects where id != input productId
]
}
}
}
}
}
], {new: true})
Ref : aggregation-pipeline-for-updates, $reduce
Test aggregation pipeline here : mongoplayground
Note :
Few MongoDB clients may throw an error saying 'update operations may only contain atomic operators' that case you can use .update() or testing it using code as most drivers that support 4.2 doesn't throw such errors.
Try this:
Create a partial index that only indexes documents where the field value equals or is less than 0 (depending on your needs)
Add a TTL to "expire after 0 seconds" on a field that is set to 0. (i.e. all indexed documents are set to expire immediately).
On Java it would look like:
new IndexModel(
Indexes.ascending(ZERO_FIELD),
new IndexOptions().expireAfter(0L, TimeUnit.SECONDS)
.partialFilterExpression(Filters.lte(VALUE_FIELD, 0)
)
)

mongodb insert data into the array of objects and update it

I need to make a vote, it looks like an array of objects, look like the user’s ID and the value that he set.
If the user has already voted, but changed his value, you need to change the value of the rate in the array of objects for this user.
I need to make an array of objects into which data will be inserted like this {rate: 3, user: "asdr2r24f2f42f24"} and if the user has already voted in this array, then you need to change the value rate of the given user
I already tried to do something, but it seems to me you can write something better, can you help?
JSON https://jsoneditoronline.org/?id=442f1dae0b2d4997ac69d44614e55aa6
router.post('/rating', (req, res) => {
console.log(req.body)
// { id: 'f58482b1-ae3a-4d8a-b53b-ede80fe1e225',
// rating: 5,
// user: '5e094d988ddbe02020e13879' }
Habalka.find({
_id: req.body.id
})
.then(habalka => {
// here I need to check whether the user has already voted or not, and from this whether to add an object with it or update the number
Habalka.updateOne(
{_id: req.body.id},
{$push: {rating: {rate: req.body.rating, user: req.body.user}}}
)
.then(e => {
console.log(e)
})
});
});
Schema
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const HabalkaSchema = new Schema({
_id: {
type: String
},
bio: {
firstname: String,
lastname: String,
middlename: String,
company: String
},
rating: [
],
files: [
{
_id: {
type: String
},
destination: {
type: String
},
filename: {
type: String
},
path: {
type: String
},
folder: {
type: String
},
info: {
size: {
type: Number
},
mimetype: {
type: String
},
encoding: {
type: String
},
originalname: {
type: String
},
fieldname: {
type: String
},
},
date: {
type: Date,
default: Date.now
},
bio: {
type: Object
},
userId: String,
guessId: {},
}
],
date: {
type: Date,
default: Date.now
}
});
module.exports = Habalka = mongoose.model('habalka', HabalkaSchema);
This is an aggregation query which inserts a new user or updates the rating of existing user in the rating array:
The req.body.id, req.body.user and req.body.rating are set as follows for the example code:
var ID = 1, INPUT_USER = "new user", INPUT_RATE = 5;
const matchStage = { $match: { _id: ID } };
const facetStage = {
$facet: {
new_user: [
{ $match: { "rating.user": { $not: { $eq: INPUT_USER } } } },
{ $addFields: { rating: { $concatArrays: [ "$rating", [ { user: "new user", rate: INPUT_RATE } ] ] } } },
],
user: [
{ $match: { "rating.user": INPUT_USER } },
{ $addFields: {
rating: {
$map: {
input: "$rating",
as: "r",
in: {
$cond: [ { $eq: [ "$$r.user", INPUT_USER ] },
{ user: "$$r.user", rate: { $add: [ "$$r.rate", INPUT_RATE ] } },
"$$r"
]
}
}
}
} }
]
}
};
const projectStage = {
$project: {
result: { $arrayElemAt: [ { $concatArrays: [ "$user", "$new_user" ] }, 0 ] }
}
};
const queryPipeline = [
matchStage,
facetStage,
projectStage
];
// Run the aggregation query and get the modified document
// after applying the user and rate data in the rating array.
// The result of the aggregation is used to update the collection.
col.aggregate(queryPipeline).toArray( ( err, docs ) => {
console.log("Aggregation output:");
console.log( JSON.stringify( docs[0] ) );
// Update the aggregate result to the collection.
col.updateOne( { _id: docs[0].result._id },
{ $set: { rating: docs[0].result.rating } },
( err, updateResult ) => {
console.log( 'Updated count: ', updateResult.matchedCount );
}
);
callback(docs);
} );
Example collection document:
{ "_id" : 1, "rating" : [ { "user" : "user1", "rate" : 2 } ] }
If the input is var ID = 1, INPUT_USER = "new user", INPUT_RATE = 5; the updated document will be:
{ "_id" : 1, "rating" : [ { "user" : "user1", "rate" : 2 }, { "user" : "new user", "rate" : 5 } ] }
If the input is var ID = 1, INPUT_USER = "user1", INPUT_RATE = 5; the updated document will be:
{ "_id" : 1, "rating" : [ { "user" : "user1", "rate" : 7 } ] }

MongoDB, Node and Express return all collections that don't exist in a separate collection that has a large amount of data

I've been pulling my hair out for weeks over this one.
I have a collection (this is a cut down version):
const SubscriberSchema = new Schema({
publication: { type: Schema.Types.ObjectId, ref: "publicationcollection" },
buyer: { type: Schema.Types.ObjectId, ref: "buyercollection" },
postCode: { type: String },
modifiedBy: { type: String },
modified: { type: Date }
});
I also have a collection containing the 1.75 million UK Postcodes
const PostcodeSchema = new Schema({
postcode: { type: String }
});
What I want to do is to return any record in the Subscriber collection which doesn't exist within the Postcode collection.
When I try a very simple aggregation using Mongoose on anything >100 records in the Subscriber collection, I'm getting either a timeout or a >16MB return error.
Here's what I've tried so far:
router.get(
"/badpostcodes/:id",
passport.authenticate("jwt", { session: false }),
(req, res) => {
const errors = {};
Subscriber.aggregate([
{
$match: {
publication: mongoose.Types.ObjectId(req.params.id),
postCode: { "$ne": null, $exists: true }
}
},
{
$lookup: {
'from': 'postcodescollections',
'localField': 'postCode',
'foreignField': 'postcode',
'as': 'founditem'
}
},
// {
// $unwind: '$founditem'
// },
{
$match: {
'founditem': { $eq: [] }
}
}
], function (err, result) {
if (err) {
console.log(err);
} else {
if (result.length > 0) {
res.json(result);
} else {
res.json("0");
}
}
})
}
);
The unwind didn't seem to do anything but it's commented out to show I tried to use it.
I've also tried using a pipeline on the lookup instead but that didn't work, similar to the following (sorry, I don't have my original code attempt so this is from memory only):
$lookup: {
'from': 'postcodescollections',
'let': { 'postcode': "$postCode" },
'pipeline': [
{
'$match': {
'postcode': { $exists: false }
}
},
{
'$unwind': "$postCode"
}
],
'as': 'founditem'
}
Thanks in advance so I can hopefully retain some hair!
You are doing a match on all postcodes that don't match and then unwinding those - that will be a 1.75m documents for each subscriber! The syntax in $lookup is also incorrect I think.
I think you can try something like the following - adjust accordingly for your data:
Do a $lookup to find a matching postcode in postcodes, then do a match to filter those subscribers that that don't have any founditem elements: "founditem.0": {$exists: false}
See an example:
db.getCollection("subscribers").aggregate(
[
// Stage 1
{
$match: {
postCode: { "$ne": null, $exists: true }
}
},
// Stage 2
{
$project: {
_id: 1,
postCode: 1
}
},
// Stage 3
{
$lookup: {
from: "postcodescollections",
let: { p: "$postCode" },
pipeline: [
{
$match: {
$expr:
{
$eq: ["$$p","$postcode"] }
}
},
{ $project: { _id: 1 } }
],
as: "founditem"
}
},
// Stage 4
{
$match: {
"founditem.0": {$exists: false}
}
},
]
);

How can I upsert multiple objects with MongoDB & Node.js?

Let's say I have an array of Movie genres like so:
[
{ id: 28, name: 'Action' },
{ id: 12, name: 'Adventure' },
{ id: 16, name: 'Animation' },
{ id: 35, name: 'Comedy' },
{ id: 80, name: 'Crime' },
{ id: 99, name: 'Documentary' },
{ id: 18, name: 'Drama' },
{ id: 10751, name: 'Family' },
{ id: 14, name: 'Fantasy' },
{ id: 10769, name: 'Foreign' },
{ id: 36, name: 'History' },
{ id: 27, name: 'Horror' },
{ id: 10402, name: 'Music' },
{ id: 9648, name: 'Mystery' },
{ id: 10749, name: 'Romance' },
{ id: 878, name: 'Science Fiction' },
{ id: 10770, name: 'TV Movie' },
{ id: 53, name: 'Thriller' },
{ id: 10752, name: 'War' },
{ id: 37, name: 'Western' }
]
and I have a connection to a MongoDB (v3.2) instance: db, and I'm using the standard mongodb Node.js driver (const mongodb = require('mongodb').MongoClient).
What I want to be able to do is one bulk upsert operation onto a collection, say genres, where the _id field maps to the id field of our genre objects.
Now, I know I could loop through each item in the array, and do a simple upsert:
for (let i = 0; i < genres.length; i++) {
await db.collection('genres').update(
{ _id: genres[i].id },
genres[i],
{ upsert: true }
);
}
But this feels wasteful and wrong.
Is there an easier way to do what should be a relatively simple task?
Thanks
Use the bulkWrite API to carry out the updates:
var bulkUpdateOps = genres.map(function(doc) {
return {
"updateOne": {
"filter": { "_id": doc.id },
"update": { "$set": { "name": doc.name } },
"upsert": true
}
};
});
db.collection('genres').bulkWrite(bulkUpdateOps, function(err, r) {
// do something with result
})
If you're dealing with larger arrays i.e. > 1000 then consider sending the writes to the server in batches of 500 which gives you a better performance as you are not sending every request to the server, just once in every 500 requests:
var bulkUpdateOps = [],
counter = 0;
genres.forEach(function(doc) {
bulkUpdateOps.push({
"updateOne": {
"filter": { "_id": doc.id },
"update": { "$set": { "name": doc.name } },
"upsert": true
}
});
counter++;
if (counter % 500 == 0) {
db.collection('genres').bulkWrite(bulkUpdateOps, function(err, r) {
// do something with result
});
bulkUpdateOps = [];
}
})
if (counter % 500 != 0) {
db.collection('genres').bulkWrite(bulkUpdateOps, function(err, r) {
// do something with the result
});
}
I would try:
db.collection('genres').update(genres, {upsert: true, multi: true});
Note: untested code...
UPDATE: to remap id field to _id:
var _genres = genres.map(function(genre) {
return { _id: genre.id, name: genre.name };
});
db.collection('genres').update(_genres, {upsert: true, multi: true});

Resources