nodejs async middlewares is not working - node.js

I am trying to make an OAuth authentication flow myself in nodejs, express, and mongodb. I am trying to check if the client is available in my MongoDB before each route. So I made a middleware function that checks if client id and client secret are available in my DB. Here is my middleware function
module.exports = function() {
return function(req, res, next) {
// Implement the middleware function based on the options object
winston.info("check client")
User.getClient(req.get("clientId"), req.get("clientSecret"), function (err, client) {
winston.info("checking time");
if (client) {
return next();
} else {
}
});
winston.info("check client done")
}
}
But as i am calling my mongoose model i think i middleware isnt working perfectly. By the time async call finishes. script is passing through.
So what i want to achieve here is that the middleware should pass if the client is available in our db. So middleware has to go forward after it gets response from the mongoose. Here is my router code.
var express = require('express')
, router = express.Router()
, clientVerify = require("../middlewares/clientVerify");
router.use(clientVerify())

I used promise to solve the issue. Thanks to #Vassilis Pallas for his guideline in the comments.
return new Promise( function(resolve,reject){
User.getClient(req.get("clientId"), req.get("clientSecret"), function (err, client) {
winston.info(client);
if (client) {
resolve( client );
} else {
res.status(401).send({
status: 401,
message: "unauthorized"
});
}
});
}).then(function(saveResult){
saveResult = saveResult && typeof saveResult == 'object' ? saveResult.toJSON() : saveResult;
winston.info(saveResult);
if (saveResult)
return next();
});

Related

Why is my sinon stub acting like it's calling the real function?

I'm trying to follow this example: https://www.alexjamesbrown.com/blog/development/stubbing-middleware-testing-express-supertest/ but the sinon stub doesn't seem to be executing the wrapped code. I've seen lots of stackoverflow posts regarding this issue but none of the answers have helped me figure out what I'm doing wrong. Whenever I run my test I get the following error:
1) should return a list of sites
0 passing (42ms) 1 failing
GET /api/config/buildPro/sites
should return a list of sites:
Error: expected 200 "OK", got 403 "Forbidden"
at Test._assertStatus (node_modules\supertest\lib\test.js:268:12)
at Test._assertFunction (node_modules\supertest\lib\test.js:283:11)
at Test.assert (node_modules\supertest\lib\test.js:173:18)
at Server.localAssert (node_modules\supertest\lib\test.js:131:12)
at emitCloseNT (net.js:1655:8)
at processTicksAndRejections (internal/process/task_queues.js:83:21)
This leads me to believe it's not calling the stub code but instead executes the actual authorization function. Here's my code:
app.js
const express = require('express');
const app = express();
const authorization = require('./security/authorization');
const configRoutes = require('./api/routes/config');
app.all('/api/*', authorization.authorize);
app.use('/api/config', configRoutes);
module.exports = app;
authorization.js
const aad = require('azure-ad-jwt');
module.exports.authorize = (req, res, next) => {
if(!req.headers.authorization){
res.status(403).json({
message: "Auth failed"
});
return;
}
const jwtToken = req.headers.authorization.replace('Bearer ', '');
aad.verify(jwtToken, null, function (err, result) {
if (result) {
next();
} else {
res.status(401).json({
message: "Auth failed"
});
}
});
};
config.spec.js
const request = require('supertest');
const sinon = require('sinon');
const app = require('../app');
const authorization = require('../security/authorization');
var agent;
describe('GET /api/names', () => {
before(() => {
ensureAuthenticatedSpy = sinon.stub(authorization, 'authorize');
ensureAuthenticatedSpy.callsArgWithAsync(2);
agent = require('supertest')
.agent(require('../app'));
});
it('should return a list of names', done => {
agent
.get('/api/config/buildPro/sites')
.expect(200)
.end((err, res) => {
if (err) return done(err);
done();
});
});
});
instead executes the actual authorization
This is exactly what is happening.
Note this code in server:
app.all('/api/*', authorization.authorize);
This resolves authorize function reference in this particular state of program and express will use this particular function (original one!) for rest of program.
This:
ensureAuthenticatedSpy = sinon.stub(authorization, 'authorize');
is called later and given that sinon has no power to change references to original authorize captured previously ... is no-op.
IOW, dependency injection in basic Javascript apps is not as simple as one might want.
To workaround, you can change original route in app.js:
app.all('/api/*', (req, res, next) => authorization.authorize(req, res, next));
Now, your closure would resolve authorization.authorize every-time it's called, enabling mocking/spying function you're interested in. However this is solution is far from elegant.

Define/Use a promise in Express POST route on node.js

I currently have a POST route defined in an Express Node.js application as so:
var locationService = require("../app/modules/locationservice.js");
app.post('/createstop', isLoggedIn, function(req, res) {
locationService.createStop(res, req.body);
});
(for this question, please assume the routing in & db works.. my record is created on form submission, it's the response I am struggling with)
In the locationservice.js class I then currently have
var models = require('../models');
exports.createStop = function(res, formData) {
models.location.build({ name: formData.name })
.save()
.then(function(locationObj) {
res.json({ dbResult : locationObj });
});
};
So as you can see, my route invokes the exported function CreateStop which uses the Sequelize persistent layer to insert a record asynchronously, after which I can stick the result on the response in the promised then()
So at the moment this only works by passing the response object into the locationservice.js method and then setting res.json in the then() there. This is sub-optimal to me with regards to my service classes, and doesn't feel right either.
What I would like to be able to do is "treat" my createStop method as a promise/with a callback so I can just return the new location object (or an error) and deal with it in the calling method - as future uses of this method might have a response context/parameter to pass in/be populated.
Therefore in the route I would do something more like:
var locationService = require("../app/modules/locationservice.js");
app.post('/createstop', isLoggedIn, function(req, res) {
locationService.createStop(req.body)
.then(dataBack) {
res.json(dataBack);
};
});
Which means, I could call createStop from else where in the future and react to the response in that promise handler. But this is currently beyond me. I have done my due diligence research, but some individual expert input on my specific case would be most appreciated.
Your locationservice.js could look like that
exports.createShop = function(data){
// here I have used create instead of build -> save
return models.location.create(data).then(function(location){
// here you return instance of saved location
return location;
});
}
And then your post() method should be like below
app.post('/createstop', isLoggedIn, function(req, res){
locationService.createShop(req.body).then(function(location){
// here you access the location created and saved in createShop function
res.json(location);
}).catch(function(error){
// handle the error
});
});
Wrap your createStop function with a promise like so:
exports.createStop = function(res, formData) {
return new Promise(function(resolve, reject) {
models.location.build({ name: formData.name })
.save()
.then(function(locationObj) {
resolve({ dbResult : locationObj });
});
//in case of error, call reject();
});
};
This will allow you to use the .then after the createStop within your router.

Node, express, Monk, mongodb: 400 bad request error on post request. Why am I getting this error?

I've been trying to get a simple nodejs API with CRUD functionality working. I'm using express, and 'monk' package for communicating with mongodb. I've successfully pulled data with a GET request.
I'm unable to get a post() function to work. I'm able to insert new documents to mongo when the insert request is called from a GET request that inserts a doc every time it's called. However, no matter what I do, with or without an actual insert request, my post is returning a 400.
Here's my route file:
var express = require('express');
var router = express.Router();
/* GET hours page. (for users to submit hours) */
router.get('/', function (req, res) {
var db = req.db;
var collection = db.get('entries');
collection.find({}, /*{limit:20}, */function (err, docs) {
if (err) {
console.log('couldn\'t load entries');
}
res.json(docs);
});
});
/* POST hours page. (for users to submit hours) */
router.post('/', function (req, res) {
if (!(req.body.job || req.body.code || req.body.hours)) {
handleError(res, 'Invalid user input', 'Must complete input', 400);
}
var db = req.db;
var collection = db.get('entries');
var newEntry = req.body;
collection.insert(newEntry, function (err, docs) {
if (err){
handleError(res, err.message, 'Failed to create new entry');
}
res.json(docs);
});
});
module.exports = router;
I really don't know why ever single post request is returning a 400. I'm thinking it's a problem with my main file, but it has barely been altered from the initial express generated file.
You if condition is wrong. !(req.body.job || req.body.code || req.body.hours) should be !(req.body.job && req.body.code && req.body.hours).
And are you sure you have really post anything? Check the Content-Type of your requests, which should be application/x-www-form-urlencoded;.
I recommand using supertest to test your app. The usage is very simple.
import request = require("supertest");
import should = require("should");
import app = require("../youApp");
describe("POST /foo", () => {
it("should post something", done => {
request(app)
.post("/foo")
.send({job: "my job", code: "...", "hours"})
.expect(200, done);
});
});
Somthing more
replace var with const .
use arrow function instead of function(req, res).

Express keeping connection open?

I'm using node js, express and postgresql as backend.
This is the approach I used to make a rest API:
exports.schema = function (inputs, res) {
var query = knex('schema')
.orderBy('sch_title', 'asc')
.select();
query.exec(function (err, schemas) {
if(err){
var response = {
message: 'Something went wrong when trying to fetch schemas',
thrownErr: err
};
console.error(response);
res.send(500, response);
}
if(schemas.length === 0){
var message = 'No schemas was found';
console.error(message);
res.send(400, message);
return;
}
res.send(200, schemas);
});
};
It works but after a while postgres logs an error and it's no longer working:
sorry, too man clients already
Do I need a close each request somehow? Could not find any about this in the express docs. What can be wrong?
This error only occurs on production server. Not on developing machine.
Update
The app only brakes in one 'module'. The rest of the app works fine. So it's only some queries that gives the error.
Just keep one connection open for your whole app. The docs shows an example how to do this.
This code goes in your app.js...
var Knex = require('knex');
Knex.knex = Knex.initialize({
client: 'pg',
connection: {
// your connection config
}
});
And when you want to query in your controllers/middlewares...
var knex = require('knex').knex;
exports.schema = function (req, res) {
var query = knex('schema')
.orderBy('sch_title', 'asc')
.select();
// more code...
};
If you place Knex.initialize inside an app.use or app.VERB, it gets called repeatedly for each request thus you'll end up connecting to PG multiple times.
For most cases, you don't need to do an open+query+close for every HTTP request.

Export a function from a function

I have an ExpressJS app where I have api.js in routes that manages connecting to Couchbase and then emits event couchbaseConnected that is awaited by init() function inside api.js.
Inside init() I want to push those exports.someFunction(req, res){return something;}. But when I just put these exports inside init() function, I get an error .get() requires callback functions but got a [object Undefined] so it seems like I am doing it wrong.
The question is how I can export functions from another function in NodeJS?
Here is the code:
//connecting to couchbase and emitting event on connection
couchbase.connect(dbConfiguration, function (err, bucket) {
if (err) {
console.log(err);
}
cb = bucket;
eventEmitter.emit('couchbaseConnected');
});
//listening for the event and fire init() when it's there
eventEmitter.on('couchbaseConnected', function (e) {
console.log('Connected to Couchbase.'.green);
init();
});
function init() {
exports.getUserData = function (req, res) {
if (req.user != undefined && req.user.meta != undefined) {
res.json(200, {result: 'ok'})
}
else {
res.json(401, {error: 'Unauthorized request.'})
}
};
}
Here is the ExpressJS .get() that is located in app.js:
app.get('/api/user/data/:type', api.getUserData);
Here is the ExpressJS .get() that is located in app.js:
app.get('/api/user/data/:type', api.getUserData);
The error message .get() requires callback functions but got a [object Undefined] makes quite clear what happens: You require the API module, it starts to connect to the db, you are defining your express app by passing a non-existent property to .get - which fails, since init has not yet been called and getUserData has not yet been assigned. What you need to do is
var api = require('api');
eventEmitter.on('couchbaseConnected', function () {
app.get('/api/user/data/:type', api.getUserData); // now it is available
});
However, this does not look like good code. Instead of loosely coupling them via that couchbaseConnected event you better should use explicit callbacks that are invoked with the requested values (i.e. the cb bucket, or the getUserData method). At least pass them as parameters to the emitted event.
Also, your setup is unconventional. I don't see why getUserData would need to be asynchronously defined - it should always be available. If the couchbase connection failed, I would not expect the /api/user/data/ service to not exist, but to respond with some 500 internal server error message.
This is an answer that I have made some assumptions as the data provided by you is not sufficient to know what you are doing when you start your app.
I would suggest some change in code:
module.exports.connect = function(callback){
couchbase.connect(dbConfiguration, function (err, bucket) {
if (err) {
console.log(err);
callback(err);
}
cb = bucket;
module.exports.getUserData = getUserData();
callback(err); //just callback with no error as you don't require to send the database
});
}
function getUserData(req, res){
if (req.user != undefined && req.user.meta != undefined) {
res.json(200, {result: 'ok'})
}
else {
res.json(401, {error: 'Unauthorized request.'})
}
};
and in your app.js file where you are starting the app just do this
var api = require('./api');
api.connect(function(error){
if (error) throw error;
app.listen(3000);
console.log('Express started on port 3000');
});
And then you can continue doing exactly what you were doing. It should work.
I have assumed that you are not explicitly calling the connect or its equivalent when you are starting the app.....So your getting that error.

Resources