Asynchronous CRUD operations with express - node.js

I have a basic CRUD application using html forms, nodejs/express and mongodb. I have been learning about synchronous vs asynchronous code via callbacks, promises, and async/await and to my understanding for a crud application you would want the operations to be asynchronous so multiple users can do the operations at the same time. I am trying to implement aync/await with my express crud operations and am not sure if they are executing synchronously or asynchronously.
Here is my update function, which allows a user to type in the _id of the blog they want to change, then type in a new title and new body for the blog and submit it. In its current state, to my knowledge it is executing synchronously:
app.post('/update', (req, res) => {
const oldValue = { _id: new mongodb.ObjectId(String(req.body.previousValue)) }
const newValues = { $set: { blogTitle: req.body.newValue, blogBody: req.body.newValue2 } }
db.collection("miscData").updateOne(oldValue, newValues, function (err, result) {
if (err) throw err;
console.log("1 document updated");
res.redirect('/')
});
})
The way in which I was going to change this to asynchronous was this way:
app.post('/update', async (req, res) => {
const oldValue = { _id: new mongodb.ObjectId(String(req.body.previousValue)) }
const newValues = { $set: { blogTitle: req.body.newValue, blogBody: req.body.newValue2 } }
await db.collection("miscData").updateOne(oldValue, newValues, function (err, result) {
if (err) throw err;
console.log("1 document updated");
res.redirect('/')
});
})
Both blocks of code work, however I am not sure if the second block of code is doing what I am intending it to do, which is allow a user to update a blog without blocking the call stack, or if the second block of code would only make sense if I was running more functions after the await. Does this achieve the intended purpose, if not how could/should I do that?

db.collection(...).updateOne is always asynchronous, so you need not worry that a long-running database operation might block your application. There are two ways how you can obtain the asynchronous result:
With a callback function
db.collection(...).updateOne(oldValues, newValues, function(err, result) {...});
console.log("This happens synchronously");
The callback function with the two parameters (err, result) will be called asynchronously, after the database operation has completed (and after the console.log). Either err contains a database error message or result contains the database result.
With promises
try {
var result = await db.collection(...).updateOne(oldValues, newValues);
// Do something with result
} catch(err) {
// Do something with err
}
console.log("This happens asynchronously");
The updateOne function without a callback function as third parameter returns a promise that must be awaited. The statements that do something with result will be executed asynchronously, after the database operation has successfully completed. If a database error occurs, the statements in the catch block are executed instead. In either case (success or error), the console.log is only executed afterwards.
(If updateOne does not have a two-parameter version, you can write
var result = await util.promisify(db.collection(...).updateOne)(oldValues, newValues);
using util.promisify.)
Your second code snippet contains a mixture of both ways (third parameter plus await), which does not make sense.

Related

.then statements not executing sequentially

I have an application using Node.js/Express. Within this code I have the following promise designed to check if an email already exists in my (PostGres) database:
//queries.js
const checkEmail = function(mail) {
return new Promise(function(resolve, reject) {
pool.query('SELECT * FROM clients WHERE email = $1', [mail], function(error, results) {
if (error) {
reject(new Error('Client email NOT LOCATED in database!'));
} else {
resolve(results.rows[0]);
}
}) //pool.query
}); //new promise
}
In my 'main (server.js)' script file, I have a route which is called upon submission of a 'signup' form. When the post to this route is processed...I run the script above to check if the passed email address is already located in the database, along with various other 'hashing' routines:
My code is as follows:
//server.js
const db = require('./queries');
const traffic = require('./traffic');
const shortid = require('shortid');
...
app.post('/_join_up', function(req, res) {
if (!req.body) {
console.log('ERROR: req.body has NOT been returned...');
return res.sendStatus(400)
}
var newHash, newName;
var client = req.body.client_email;
var creds = req.body.client_pword;
var newToken = shortid.generate();
var firstname = req.body.client_alias;
db.sanitation(client, creds, firstname).then(
function(direction) {
console.log('USER-SUPPLIED DATA HAS PASSED INSPECTION');
}
).then(
db.checkEmail(client).then(
function(foundUser) {
console.log('HEY THERE IS ALREADY A USER WITH THAT EMAIL!', foundUser);
},
function(error) {
console.log('USER EMAIL NOT CURRENTLY IN DATABASE...THEREFORE IT IS OK...');
}
)).then(
traffic.hashPassword(creds).then(
function(hashedPassword) {
console.log('PASSWORD HASHED');
newHash = hashedPassword;
},
function(error) {
console.log('UNABLE TO HASH PASSWORD...' + error);
}
)).then(
traffic.hashUsername(firstname).then(
function(hashedName) {
console.log('NAME HASHED');
newName = hashedName;
},
function(error) {
console.log('UNABLE TO HASH NAME...' + error);
}
)).then(
db.createUser(client, newName, newHash, newToken).then(
function(data) {
console.log('REGISTERED A NEW CLIENT JOIN...!!!');
res.redirect('/landing'); //route to 'landing' page...
},
function(error) {
console.log('UNABLE TO CREATE NEW USER...' + error);
}
))
.catch(function(error) {
console.log('THERE WAS AN ERROR IN THE SEQUENTIAL PROCESSING OF THE USER-SUPPLIED INFORMATION...' + error);
res.redirect('/');
});
}); //POST '_join_up' is used to register NEW clients...
My issue is the '.then' statements do not appear to run sequentially. I was under the impression such commands only run one after the other...with each running only when the previous has completed. This is based upon the logs which show the readout of the 'console.log' statements:
USER-SUPPLIED DATA HAS PASSED INSPECTION
PASSWORD HASHED
NAME HASHED
UNABLE TO CREATE NEW USER...Error: Unable to create new CLIENT JOIN!
USER EMAIL NOT CURRENTLY IN DATABASE...THEREFORE IT IS OK...
As mentioned previously, I am under the impression the '.then' statements should run synchronously, therefore the last statement ("USER EMAIL NOT CURRENTLY IN DATABASE...THEREFORE IT IS OK...") should in fact be after the first...before the "PASSWORD HASHED" according to the layout of the '.then' statements. Is this normal behavior...or do I have an error in my code?
Sorry for my confusion however I find '.then' statements and promises to be somewhat confusing for some reason. I thank you in advance.
TLDR - You must pass a function reference to .then() so the promise infrastructure can call that function later. You are not doing that in several places in your code.
A more specific example from your code:
You have several structures like this:
.then(db.createUser().then())
This is incorrect. This tells the interpreter to run db.createUser() immediately and pass its return result (a promise) to .then(). .then() will completely IGNORE anything you pass is that is not a function reference and your promises will not be properly chained.
Instead, you must pass a function reference to .then() something like this (not sure what execution logic you actually want):
.then(() => { return db.createUser.then()})
Then main point here is that if you're going to sequence asynchronous operations, then you must chain their promises which means you must not execute the 2nd until the first calls the function you pass to .then(). You weren't passing a function to .then(), you were executing a function immediately and then passing a promise to .then(p) which was completely ignored by .then() and your function was executed before the parent promise resolved.
FYI, sequencing a bunch of asynchronous operations (which it appears you are trying to do here) can take advantage of await instead of .then() and end up with much simpler looking code.

How to store mongodb output in variable using nodejs in lambda function?

In a lambda function, I have the following code:
var user;
exports.handler = function uploadToS3(event, context, callback) {
var name = event["username"];
MongoClient.connect(uri, { useNewUrlParser: true }, (error, client) => {
if (error) return 1; // Checking the connection
db = client.db(databasename);
db.collection("user_profile").findOne({ username: name }, function(
err,
result
) {
if (err) throw err;
user = result._id;
console.log(user); // 1st console.log
});
});
console.log(user); //2nd console.log
};
In the above code, I have declared user as a global variable. In 1st console.log it will display the value but in 2nd console.log it will undefined. find the below output of lambda function.
Function Logs:
2019-08-23T15:23:34.610Z 83141f62-f840-4e52-9440-35f3be7b0dc8
5d5eaa9f921ed00001ee1c3f
2019-08-23T15:23:34.192Z 83141f62-f840-4e52-9440-35f3be7b0dc8
undefined
How can I get a value in the second case?
The problem is not so much storing the mongodb output into a variable, as it is a synchronous vs asynchronous behavior. Javascript by design is synchronous, but has capability to handle asynchronous tasks. The method that performs the mongo query is asynchronous. Read: Javascript calls findOne(), this returns an 'pending' promise, then your script continues to call console.log(user) - which is still undefined. When the request from MongoDB comes back, javascript resolves the promise and executes any further actions and/or callbacks.
The second console.log comes back and is evaluated BEFORE the mongo client returns a response and assigns a new value to your variable. If you look at the timestamp of the responses, the undefined one comes back before the one with the value. It looks like you are using mongoose, which should return a promise and you can try putting that second call inside a .then, or a .done block. e.g:
var user;
exports.handler = function uploadToS3(event, context, callback) {
var name = event["username"];
MongoClient.connect(uri, { useNewUrlParser: true }, (error, client) => {
if (error) return 1; // Checking the connection
db = client.db(databasename);
db.collection("user_profile").findOne({ username: name }, function(
err,
result
) {
if (err) throw err;
user = result._id;
console.log(user); // 1st console.log
})
.done(function(){
console.log(user); //2nd console.log
});
});
};
If not using mongoose... make your own promise, or use a callback, or just try Mongoose (it rocks!) :)
*note that I put the .done after the findOne(), but I believe you could attach a .done() to the .connect() as well. (Don't quote me on that. You would have to test it, see when that promise resolves exactly)
Additionally, I would suggest storing this value outside of your lambda somehow. You might not get the same container bootstrapped for each lambda execution. You could have some issues with this down the line.
Check out:
AWS Lambda caching issues with Global Variables - https://medium.com/tensult/aws-lambda-function-issues-with-global-variables-eb5785d4b876
Improving Performance From Your Lambda Function From the Use of Global Variables - https://blog.ruanbekker.com/blog/2018/08/27/improving-performance-from-your-lambda-function-from-the-use-of-global-variables/
AWS Lambda best practices - https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html

nodejs multi threading async parallel

Currently i have a dashboard that lists a bunch of records in a table. users can select 1 record and hit execute and i send a AJAX POST request to my routes middleware which executes 3 functions inside async.waterfall and returns a 200 response back to my client if everything works correctly. this async waterfall usually takes about 40-55 seconds to finish executing (fn_1,fn_2 and fn_3) and works perfectly fine.
router.post('/url', function(req, res, next) {
try {
async.waterfall([
fn_1,
fn_2,
fn_3
], function (err, body) {
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify({"error":err, "result":body}));
});
function fn_1(callback) {
callback(null, response);
}
function fn_1(result, callback) {
callback(err, result);
}
function fn_2(result, callback) {
callback(null, result);
}
}
catch (err){
console.log(err)
}
});
But, If i were to give provision for users to select MULTIPLE records and send that as an array back to my route middleware. how can i execute multiple async.waterfall methods for each item in the array in parallel
i can run a loop and execute the waterfall inside the loop but it again will wait for each item to complete and only then start the next iteration. this is not what i want.
is this doable in node / express . whats the easiest way to achieve this ? or are there modules/plugins that can help solve this case ?
Here is an abbreviated version of your code and how it could be changed to suit your needs. If none of your calls need data from any of the other calls, you can just run them in parallel with promises and use Promise.all to capture the result.
function fn_1(callback) {
// See function fn_2 for structure
}
function fn_1(result, callback) {
// See function fn_2 for structure
}
function fn_2(result, callback) {
return new Promise(resolve, reject => {
resolve(result)
})
.then(d => {
// Instead of callbacks, use a "then"
// block/statement.
//
// Do something with D here.
})
}
Promise.all([fn_1(), fn_2(), fn_3()])
.then(v => {
// Do somthing with v;
})
.catch(e => {
// Do something with e
})
I tend to advocate the use of native Promises over libraries like async, however, since your already using async...
You can use parallel and map each item in the array to a waterfall handler e.g.
async.parallel(
myArray.map(val => cb => async.waterfall(fn_1, fn_2, fn_3, cb)
, (err, results) => {
// return consolidated response
})
You would need to rework your waterfall handlers to not send a response but instead just propagate any errors.
It should also be noted that parallel is only useful if you are infact running I/O bound code, if the code is anything like your example then you won't really gain anything from using parallel over something like async.each

Recover an object requested with MongoDB Driver on node JS

I try to recover object from a mongo DB database, in a node JS file and it doesn't work.
In a file called db.js , i have made the following code :
var MongoClient = require('mongodb').MongoClient;
module.exports = {
FindinColADSL: function() {
return MongoClient.connect("mongodb://localhost/sdb").then(function(db) {
var collection = db.collection('scollection');
return collection.find({"type" : "ADSL"}).toArray();
}).then(function(items) {
return items;
});
}
};
And, I try to use it in the file server.js :
var db = require(__dirname+'/model/db.js');
var collection = db.FindinColADSL().then(function(items) {
return items;
}, function(err) {
console.error('The promise was rejected', err, err.stack);
});
console.log(collection);
In the result I have "Promise { }". Why ?
I just want to obtain an object from the database in order to manipulate it in the others functions situated in the server.js file.
Then then function called on promises returns a promise. If a value is returned within a promise, the object the promise evaluates to is another promise which resolves to the value returned. Take a look at this question for a full explanation of how it works.
If you want to verify that your code is successfully getting the items, you will have to restructure your code to account for the structure of promises.
var db = require(__dirname+'/model/db.js');
var collection = db.FindinColADSL().then(function(items) {
console.log(items);
return items;
}, function(err) {
console.error('The promise was rejected', err, err.stack);
});
That should log your items after they are retrieved from the database.
Promises work this way to make working asynchronously more simple. If you put more code below your collection code, it would run at the same time as your database code. If you have other functions within your server.js file, you should be able to call them from within the body of your promises.
As a rule, remember a promise will always return a promise.
The callback functions created in the then() are asynchronous, thus making the console.log command execute before the promise is even resolved. Try placing it inside the callback function instead like below:
var collection = db.FindinColADSL().then(function(items) {
console.log(items)
return items;
}, function(err) {
console.error('The promise was rejected', err, err.stack);
});
Or, for the sake of another example using the logger functions themselves as the callbacks, and showing that the last console.log call will actually be called before the others.
db.findinColADSL()
.then(console.log)
.catch(console.error)
console.log('This function is triggered FIRST')

Testing asynchronous middleware functionality with Mongoose

I'm using a save middleware in Mongoose to create a log of activity in the DB whenever some action is taken. Something like
UserSchema.post("save", function (doc) {
mongoose.model("Activity").create({activity: "User created: " + doc._id});
});
This appears to work fine, but the problem is that I can't test it because there is no way to pass a callback to post (which probably would not make sense). I test this out using mocha with:
User.create({name: "foo"}, function (err, user) {
Activity.find().exec(function (err, act) {
act[0].activity.should.match(new RegExp(user._id));
done();
});
});
The problem is that the Activity.create apparently does not finish before .find is called. I can get around this by wrapping .find in setTimeout, but this seems hacky to me. Is there any way to test asynchronous mongoose middleware operations?
Unfortunately, there's not a way to reliably interleave these two asynchronous functions in the way you'd like (as there aren't threads, you can't "pause" execution). They can complete in an inconsistent order, which leaves you to solutions like a timeout.
I'd suggest you wire up an event handler to the Activity class so that when an Activity is written/fails, it looks at a list of queued (hashed?) Activities that should be logged. So, when an activity is created, add to list ("onactivitycreated"). Then, it will eventually be written ("onactivitywritten"), compare and remove successes maybe (not sure what makes sense with mocha). When your tests are complete you could see if the list is empty.
You can use util.inherits(Activity, EventEmitter) for example to extend the Activity class with event functionality.
Now, you'll still need to wait/timeout on the list, if there were failures that weren't handled through events, you'd need to handle that too.
Edit -- Ignore the suggestion below as an interesting demo of async that won't work for you. :)
If you'd like to test them, I'd have a look at a library like async where you can execute your code in a series (or waterfall in this case) so that you can first create a User, and then, once it completes, verify that the correct Activity has been recorded. I've used waterfall here so that values can be passed from one task to the next.
async.waterfall([
function(done) {
User.create({name: "foo"}, function (err, user) {
if (err) { done(err); return; }
done(null, user._id); // 2nd param sent to next task as 1st param
});
},
function(id, done) { // got the _id from above
// substitute efficient method for finding
// the corresponding activity document (maybe it's another field?)
Activity.findById(id, function (err, act) {
if (err) { done(err); return; }
if (act) { done(null, true);
done(null, false); // not found?!
});
}
], function(err, result) {
console.log("Success? " + result);
});
Async post-save middleware will apparently be available in Mongoose 4.0.0:
https://github.com/LearnBoost/mongoose/issues/787
https://github.com/LearnBoost/mongoose/issues/2124
For now, you can work around this by monkey-patching the save method on the document so that it supports async post-save middleware. The following code is working for me in a similar scenario to yours.
// put any functions here that you would like to run post-save
var postSave = [
function(next) {
console.log(this._id);
return next();
}
];
var Model = require('mongoose/lib/model');
// monkey patch the save method
FooSchema.methods.save = function(done) {
return Model.prototype.save.call(this, function(err, result) {
if (err) return done(err, result);
// bind the postSave functions to the saved model
var fns = postSave.map(function(f) {return f.bind(result);});
return async.series(fns,
function(err) {done(err, result);}
);
});
};

Resources