Updating Mongo DB entries by params - node.js

The goal of this program is to add a property called 'userResponse' to the problem objects that reflects the input from the user. The problem object in Mongo DB is held in an array of objects, called 'problems', that is held in an object called 'session':
{
"_id" : ObjectId("59df5ee7adb378237377dbb4"),
"updatedAt" : ISODate("2017-10-12T12:24:07.269Z"),
"createdAt" : ISODate("2017-10-12T12:24:07.269Z"),
"userId" : "59df5edbadb378237377dbb3",
"problems" : [
{
"correctResponse" : 23,
"problem" : "20 + 3",
"secondTerm" : 3,
"firstTerm" : 20
} ]
}
Here is the logic for the endpoint that I have been using:
router.patch( '/session/:sessionId/:index', passport.authenticate( 'jwt', { session: false } ), ( req, res ) => {
Session.findOne({_id: req.params.sessionId})
.then( (item)=>{
item.problems[req.params.index].userResponse = req.body.userResponse;
Session.update({_id: req.params.sessionId}, item).then( (updated)=>{
res.json(updated.problems[req.params.index]);
});
})
})
I looked at some other examples ( one two ) but they do not seem relavant since this is a patch to single elements of the array as identified by the params.
This approach works, in that it successfully updates the database with the new properties on the objects, but during the execution the server logs this error:
(node:10955) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): TypeError: Cannot read property '0' of undefined
My understanding is that this means the object can not be found in the database at some point, but since the database does include the new properties it must be resolved later. Is that corrrect?
Additionally it seems brittle to have two chained promises with no catch statements...
Can anyone offer suggestions on how to improve this process? This logic does successfully update the database, with errors. Is there a better way?
Thank you for your time.

Well i can see multiple issues in the code that can cause your problem
1- From the structure of your database session document _id is of type ObjectId and when you try to find, you are finding by just the id which is probably string you will need to use ObjectId(req.params.sessionId)
2- from the error, i can tell that the value of item does not contain an array of problems ... so items.problems is undefined so undefined[0] is an error.
3- You don't need to find session document item twice to update the array, i updated the code to update the userResponse value in array by using a single update operation.
4- for a better error handling and code maintainability you need to handle promise rejection so you need to handle catch as well as you are doing with then
So from the above comments i can update your code to be
var mongoose = require('mongoose');
router.patch( '/session/:sessionId/:index', passport.authenticate( 'jwt', { session: false } ), ( req, res ) => {
var index = req.params.index;
Session.update({_id: mongoose.Types.ObjectId(req.params.sessionId) }, {$set : {"problems." + index + ".userResponse" : req.body.userResponse } )
.then( (updated)=>{
console.log(updated);
res.json(updated.problems[req.params.index]);
})
.catch((err) => {
console.log(err.message);
res.json({status: "error" , message:err.message});
});
})

Related

How to Update only passed value in mongodb using mongoose?

I have built an API that updates records in MongoDB using mongoose, but currently what happening is if I am passing only 4 field values in the JSON file of postman and try to update then all the values are updating with value null except that 4 fields which I had passed in JSON so can anyone help me how I can pass dynamic field and value that update only passed values of collection not all the fields of collection.
Passes JSON :
{
"preferance_id" : "60fe9ba1766d10d65c64083c",
"is_active": true,
"price_blur":true,
"affiliate_commission":27,
"language_code": "en"
}
Update call which I have passed in node js
transaction.update(PreferencesMasterName,
{ _id: new mongoose.Types.ObjectId(preferance_id) }, {
subscriptin_vat : subscriptin_vat,
popular_normal : popular_normal,
popular_crawled : popular_crawled,
price_blur : price_blur,
blur_rule : blur_rule,
affiliate_commission : affiliate_commission,
red_lock : red_lock,
automatic_dummy_price : automatic_dummy_price,
...
is_active: is_active
})
I want to pass dynamic field and values here instead of this because due to this other values are set will null value. So, can anyone have an idea how to do this?
You can do something like this:
const data = res.body; // should be an object that needs to updated
transaction.update({_id: PreferanceMasterName._id}, data, {new: true }, ( error, obj ) => {
if( error ) {
console.error( JSON.stringify( error ) );
}
console.log( obj );
});
In certain cases new doesn't work, you can use : { returnOriginal: false };
for more details, you can check this thread there are multiple ways you can do this.
Please check update how to use it.

How to avoid two concurrent API requests breaking the logic behind document validation?

I have an API that in order to insert a new item it needs to be validated. The validation basically is a type validator(string, number, Date, e.t.c) and queries the database that checks if the "user" has an "item" in the same date, which if it does the validation is unsuccessful.
Pseudocode goes like this:
const Item = require("./models/item");
function post(newDoc){
let errors = await checkForDocErrors(newDoc)
if (errors) {
throw errors;
}
let itemCreated = await Item.create(newDoc);
return itemCreated;
}
My problem is if I do two concurrent requests like this:
const request = require("superagent");
// Inserts a new Item
request.post('http://127.0.0.1:5000/api/item')
.send({
"id_user": "6c67ea36-5bfd-48ec-af62-cede984dff9d",
"start_date": "2019-04-02",
"name": "Water Bottle"
})
/*
Inserts a new Item, which shouldn't do. Resulting in two items having the
same date.
*/
request.post('http://127.0.0.1:5000/api/item')
.send({
"id_user": "6c67ea36-5bfd-48ec-af62-cede984dff9d",
"start_date": "2019-04-02",
"name": "Toothpick"
})
Both will be successful, which it shouldn't be since an "user" cannot have two "items" in the same date.
If I execute the second one after the first is finished, everything works as expected.
request.post('http://127.0.0.1:5000/api/item') // Inserts a new Item
.send({
"id_user": "6c67ea36-5bfd-48ec-af62-cede984dff9d",
"start_date": "2019-04-02",
"name": "Water Bottle"
})
.then((res) => {
// It is not successful since there is already an item with that date
// as expected
request.post('http://127.0.0.1:5000/api/item')
.send({
"id_user": "6c67ea36-5bfd-48ec-af62-cede984dff9d",
"start_date": "2019-04-02",
"name": "Toothpick"
})
})
To avoid this I send one request with an array of documents, but I want to prevent this issue or at least make less likely to happen.
SOLUTION
I created a redis server. Used the package redis-lock and wrapped around the POST route.
var client = require("redis").createClient()
var lock = require("redis-lock")(client);
var itemController = require('./controllers/item');
router.post('/', function(req, res){
let userId = "";
if (typeof req.body === 'object' && typeof req.body.id_user === 'string') {
userId = req.body.id_user;
}
lock('POST ' + req.path + userId, async function(done){
try {
let result = await itemController.post(req.body)
res.json(result);
} catch (e) {
res.status(500).send("Server Error");
}
done()
})
}
Thank you.
Explain
That is a race condition.
two or more threads can access shared data and they try to change it at the same time
What is a race condition?
Solution:
There are many ways to prevent conflict data in this case, a lock is 1 option.
You can lock on application level or database level... but I prefer you read this thread before chose any of them.
Optimistic vs. Pessimistic locking
Quick solution: pessimistic-lock https://www.npmjs.com/package/redis-lock
You should create a composite index or a composite primary key that includes the id_user and the start_date fields. This will ensure that no documents for the same user with the same date can be created, and the database will throw an error if you'll try to do it.
Composite index with mongoose
You could also use transactions. To do it, you should execute the find and the create methods inside a transaction, to ensure that no concurrent queries on the same document will be executed.
Mongoose transactions tutorial
More infos
I would go with an unique composite index, that in your specific case should be something like
mySchema.index({user_id: 1, start_date: 1}, {unique: true});

Sequelize query to count and update a data if count value returns 1

I am building a register system in my nodejs application.
I am using Sequelize js as ORM.
This is my controller
exports.activateaccount=function(req,res,next){
var token=req.params.token;
indexmodel.activateemail(token,function(err,result){
if(err) throw err;
else{
res.send('Activation Completed');
}
})
}
i am taking token and sending it to model as :
exports.activateemail=function(token,callback){
partner.count({where:{token:token}}).then((partner)=>{
Sequelize.query("UPDATE partners WHERE token='"+token+"' SET confirmed=1").then(partner=>{
callback();
})
})
}
I want to set 'confirmed' column of that 'token' as 1 if token matches.
Currently the default value is 0.
Does anyone knows code of it.
I tried using Sequelize.query but it is not a function.
Instead of using raw query like :
Sequelize.query("UPDATE partners WHERE token='"+token+"' SET confirmed=1")
You should be using the Sequelize way , this way you can handle it better.
By better means :
In future you can switch to any dialect
you can use hooks
And others ...
But with method you have used you can't to do such things.
Here you go :
partner.update({ 'confirmed' : 1 },{ where : { 'token' : token , 'confirmed' : 0 }})
.then(...) // handle update
.catch(...); // handle error
For delete :
partner.destroy({ where : { 'token' : token , 'confirmed' : 1 }});

mongoose-encryption and updating objects

I know the mongoose-encryption doc states:
update will work fine on unencrypted and unauthenticated fields, but will not work correctly if encrypted or authenticated fields are involved.
And I've observed that when I use the mongoose create method that my fields are encrypted into the _ct field. However if I then use findByIdAndUpdate to update my object I see the fields are created in plain text (as output from mongodb console via find command).
From save
> db.tenants.find().pretty()
{
"_id" : ObjectId("554b7f8e7806c204e0c7589e"),
"_ac" : BinData(0,"YdJjOUJhzDWuDE5oBU4SH33O4qM2hbotQTsF6NzDnx4hWyJfaWQiLCJfY3QiXQ=="),
"_ct" : BinData(0,"YaU4z/UY3djGCKBcgMaNIFHeNp8NJ9Woyh9ahff0hRas4WD80V80JE2B8tRLUs0Qd9B7IIzHsq6O4pYub5VKJ1PIQA+/dbStZpOH/KfvPoDC6DzR5JdoAu+feU7HyFnFCMY81RZeJF5BKJylhY1+mG4="),
"__v" : 0
}
After findByIdAndUpdate
> db.tenants.find().pretty()
{
"_id" : ObjectId("554b7f8e7806c204e0c7589e"),
"_ac" : BinData(0,"YdJjOUJhzDWuDE5oBU4SH33O4qM2hbotQTsF6NzDnx4hWyJfaWQiLCJfY3QiXQ=="),
"_ct" : BinData(0,"YaU4z/UY3djGCKBcgMaNIFHeNp8NJ9Woyh9ahff0hRas4WD80V80JE2B8tRLUs0Qd9B7IIzHsq6O4pYub5VKJ1PIQA+/dbStZpOH/KfvPoDC6DzR5JdoAu+feU7HyFnFCMY81RZeJF5BKJylhY1+mG4="),
"__v" : 0,
"userId" : ObjectId("55268f43cbfc87be221cd611"),
"social" : "123-45-6789",
"last" : "bar",
"first" : "foo"
}
Is there a recommended strategy for updating objects and maintaining the encryption with mongoose-encryption?
As you quoted, the documentation for mongoose-encryption clearly tells that it does not work for update.
https://github.com/joegoldbeck/mongoose-encryption
Mongoose update hook is little tricky as well.
What you can do potentially is model your collection in such a way that fields which needs to be encrypted are a separate collection altogether and in the paren collection just link them via ids.
Person = {
_id: <ObjectId>
name: Blah
..
..
documents: [
{ 'doc_id': <ObjectId1> },
{ 'doc_id': <ObjectId2> },
]
}
Documents = [
{
"_id" : <ObjectId1>,
"_ac" : BinData(0,"YdJjOUJhzDWuDE5oBU4SH33O4qM2hbotQTsF6NzDnx4hWyJfaWQiLCJfY3QiXQ=="),
"_ct" : BinData(0,"YaU4z/UY3djGCKBcgMaNIFHeNp8NJ9Woyh9ahff0hRas4WD80V80JE2B8tRLUs0Qd9B7IIzHsq6O4pYub5VKJ1PIQA+/dbStZpOH/KfvPoDC6DzR5JdoAu+feU7HyFnFCMY81RZeJF5BKJylhY1+mG4="),
"__v" : 0
}
...
...
]
This will increase code reuse as well.
I have implemented an strategy that i don´t think it is most efficient but it works.
I need to have all my data in database encrypted so i can´t use the above approach.
What i did is to create an update function that finds the document i want to modify, then i construct a new schema object and assing the _id of the found document to the new object.
Then i delete the original document and after that save the new object wich has the original _id. The only problem i found is that mongoose throw an error because duplicated _id that is printed in the console but it still works and _id aren´t duplicated.
I have tried replacing the_id and traking the document with another property but it still throw that error, anyway data is stored as expected.
exports.update= (req, res, next) => {
Solucion.findOne({_id: req.params.id})
.then(document => {
if (!document) {
res.status(404).json({
message: notFoundMessage,
data: null,
error: null
})
} else {
const solucion = new Solucion({
_id: document._id,
identificacion: document.identificacion,
informacion: document.informacion,
estado: req.body
})
Solucion.deleteOne({_id: document._id})
.then(() => {return solucion.save()})
.then(result=> {
return res.status(201).json({
message: editedSavedMessage,
data: result,
error: null
});
})
.catch(err => {
errorHandler.errorHandler(err, res);
})
}
})
};
UPDATE 29/07/2020
I have found that if you use the save method using the same _id, data is stored encrypted but Mongo creates your schema structure but with all values set to null.
Beyond that it seems to work as expected as data is not visible in DB.

Programmatically modify existing mongo document

Resolved, the issue was with the PUT request , I had to specify the header properly and go on with one at a time.
curl -X PUT -H "Accept: application/json" -d "prenom=Maxwell" localhost:3000/contact/51df5cec5e88a2bbce7fac05
I'm trying to modify a mongodb document via coffeescript in a node.js application I'm currently working on and when I try :
curl -X PUT -d '{"nom": "House", "prenom": "Maxwell"}' localhost:3000/contact/51ddb907ae3267d6154a3e64
on this
{
"_id": "51ddb907ae3267d6154a3e64",
"archive": 1,
"nom": "Bir",
"prenom": "Sim"
}
The ID and the routes are correct, so I'm pretty sure the error lies in the javascript logic but I can't seem to grasp the right angle to work it. Here is the defective code :
exports.modifyContact = (req, res) ->
db.collection "data-center", (erreur, collection) ->
collection.update { _id: new mongo.BSONPure.ObjectID(req.params.id) }, { $set: req.body } , (erreur, resultat) ->
res.send resultat
res.end()
and the result is
{
"_id" : ObjectId("51df4ad424f6d9207cc3e2d5"),
"nom" : "Bir",
"nom": "House",
"prenom": "Maxwell" : "",
"prenom" : "Sim"
}
I can't seem to find an effective way to set missing value and modify value already in there. What is wrong ?
Give this a try instead:
exports.modify = (req, res) ->
fields = if (typeof req.body == 'string') then JSON.parse(req.body) else req.body
db.collection "data-center", (erreur, collection) ->
// check erreur here
collection.update { _id: new mongo.BSONPure.ObjectID(req.params.id) }, { $set: fields }, (erreur, resultat) ->
// check erreur here
res.send resultat
res.end()
It is important to note that inserting data directly from req.body without some sort of validation/content checking is not secure and should be avoided (even if it works as expected).
If you want to just "merge" a set of fields into an existing object you do it this way
test:PRIMARY> db.t2.insert({b:1})
test:PRIMARY> db.t2.update({b:1}, {$set: {a:1, c:2}})
test:PRIMARY> db.t2.find({b:1})
{ "_id" : ObjectId("520a3f10e2d66ef50d3b042b"), "a" : 1, "b" : 1, "c" : 2 }
Ah yeah and for the json issue, you cannot use the "original" object to return as json. You need to perform a findOne to retrieve the updated object from mongodb or change to using a findAndModify with the parameter new to get the changed object in a single operation. The existing object might well have have circular references.

Resources