I'm trying to setup an express app to catch any throw new error from a centeral function instead of many try and catch
var app = express();
tenantsRouter.get('/my_endpoint', async function(req, res, next) {
var result = await methodThatCouldFail()
res.status(HttpStatus.OK).json({result);
});
app.use(apiPrefix + '/tenants', tenantsRouter);
error_handler = function(err, req, res, next) {
console.error(`general error catcher - ${err}.`)
return res.status(HttpStatus.BAD_REQUEST).json({
'error': 'we are on it.'
})
}
// error handler
app.use(error_handler)
The thing is, unless I use specific try and catch in my_endpoint the error_handler doesn't catch throw new Error inside the methodThatCouldFail();
The only API that Express provides to pass errors down the chain of handlers is via the next() function. You need to wrap it around something that will do the try/catch and call next() for you:
function asyncHandler (f) {
return function (req, res, next) {
f(req, res, next).catch(next)
}
}
Now you can do:
tenantsRouter.get('/my_endpoint', asyncHandler(async function(req, res, next) {
var result = await methodThatCouldFail()
res.status(HttpStatus.OK).json({result);
}));
Which would work exactly how you expected it to work.
There are actually several implementations of this simple module on npm if you don't feel like writing it yourself including this one: https://www.npmjs.com/package/express-async-handler
Related
I am currently working on node.js + express + mongoDB project. I am trying to handle error that occurs when data cannot be received from database. I am simulating this by terminating mongod process in console and calling .get in Postman. Sadly instead of getting an error in Postman I only get Unhandled Promise Rejection in console. I read a lot of posts about error handling and implemented it according to this guide: https://expressjs.com/en/guide/error-handling.html. I would be grateful for any idea of how can I fix this.
The code:
Printing all courses:
router.get("/", async (req, res, next) => {
try {
const courses = await Course.find().sort("dishName");
res.send(courses);
} catch (ex) {
next(ex);
}
});
error.js:
module.exports = function (err, res, req, next) {
res.status(500).send(`500 Error`);
};
index.js
const error = require(`./middleware/error`);
app.use(error);
app.use(error) is placed as the last app.use
There is a minor mistake in your code. The order of the req and res parameters in the error handler function should not be changed.
// Error.js
module.exports = function (err, req, res, next) {
res.status(500).send(`500 Error`);
};
Lets look at the below modified Restify example which now includes Node 7/8s async/await support.
I have slight concerns on the proper implementation of this into express/restify/etc. My concern is with the promise hanging in the event loop for longer than it needs to... I know that this isn't exactly a promise, however should I be concerned about this implementation? I have yet to notice any issues.
'use strict';
const restify = require('restify');
const User = require('./models/User');
const server = restify.createServer({
name: 'myapp',
version: '1.0.0'
});
server.use(restify.acceptParser(server.acceptable));
server.use(restify.queryParser());
server.use(restify.bodyParser());
server.get('/echo/:name', async function (req, res, next) {
try {
const user = await User.findOne({
name: req.params.name;
});
res.send(user.get({plain: true}));
} catch (error) {
console.error(error);
res.send(500);
}
return next();
});
server.listen(8080, function () {
console.log('%s listening at %s', server.name, server.url);
});
There is a problem with using async function instead of regular functions accepting callbacks as the errors are handled differently.
In callback functions (aka "err-backs") the callback must be called regardless if the execution was successful. The first parameter is to be an error object.
The async function simply returns a rejected promise in case of any error (synchronous or asynchronous).
So by default, the Express.js/Restify expect a regular err-back. And if you pass the async function instead and it fails the Express.js/Restify will keep waiting for a callback to be called ignoring the rejected promise. It's simply not aware of a returned promise and not handling it.
Finally, the callback won't be called at all and the endpoint will timeout.
So you won't be able to handle the error properly.
You can try it out:
server.get('/echo/:name', async function (req, res, next) {
throw new Error();
});
So as a rule of thumb, I'd recommend not to mix the concepts and never pass callbacks into async functions. This is a red flag.
In order to fix this you need to use a wrapper like this for example:
const wrap = function(fn) {
return function(req, res, next) {
return fn(req, res, next).catch(function(err) {
return next(err);
});
};
};
server.get('/echo/:name', wrap(async function (req, res, next) {
throw new Error();
}));
You will get a proper status code and there will be no timeout anymore.
There are also a couple of modules you can use if you don't want to wrap it yourself:
Express.js: express-async-wrap
Restify: restify-async-wrap
Until now I've defined my get and post handlers with just (req, res) as arguments, with the assumption being that I put these handlers last in the chain of middleware, and make sure that I handle any responses and error handling properly within these handlers... hence it doesn't matter that I don't make any reference to next.
Is this a valid and sensible approach, or is it good practice always to call next() even if (at present) there is nothing coming afterwards? For example, perhaps in the future you might want to do some handling after these routes... or maybe there's a reason I haven't yet come across why it's good practice to always call next().
For example, there is the following simple example in the express routing guide:
app.get('/example/b', function (req, res, next) {
console.log('the response will be sent by the next function ...')
next()
}, function (req, res) {
res.send('Hello from B!')
})
Of course, I appreciate that this is a very simple example to illustrate that handlers can be chained, and is not intended to provide a complete framework for a get handler, but would it be better to define and use next even in the second handler, as follows?
app.get('/example/b', function (req, res, next) {
console.log('the response will be sent by the next function ...')
next()
}, function (req, res, next) {
res.send('Hello from B!')
next()
})
Or is it actually common practice to assume that a handler function that sends a response back to the client should not call next()... i.e. the assumption should be that the chain will end at the handler that actually sends the response?
Or is there no established practice on this point?
I'm even wondering whether it might be common not to send any response in the get handler but to defer that to a dedicated response handler coming after... by which I mean an OK response handler rather than an error response handler (for which it seems to be common practice to defined a final error handler and call next(err)). So, in a non-error situation, you would call next() and in the following middleware you would do your res.status(200).send(req.mydata) where req.mydata is added in your get handler.
No. You should only call next() if you want something else to handle the request. Usually it's like saying that your route may match that request but you want to act like it didn't. For example you may have two handlers for the same route:
app.get('/test', (req, res, next) => {
if (something) {
return next();
}
// handle request one way (1)
});
app.get('/test', (req, res) => {
// handle request other way (2)
});
Always the first matching handler is called, so for the GET /test request the first handler will be called, but it can choose to pass the control to the second handler, as if the first didn't match the request.
Note that if the second handler doesn't intend to pass the request to the next handler, it doesn't even have next in its arguments.
If there was no second handler, then the standard 404 handler would be used if the first one called next().
If you pass an argument to next() then an error handling middleware will be called.
My rule of thumb is to handle the response in the handler if you're going to give a 20x (Success) response code, and in centralized error handling if not. That looks something like this in practice:
// ./routes/things.js
const express = require('express');
const Thing = require('../models/thing');
const Router = express.Router();
// note, the handlers might get pulled out into a controllers file, if they're getting more complex.
router.param('thingId', (req, res, next, id) => {
Thing.findById(id, (e, thing) => {
if (e) return next(e);
// let's say we have defined a NotFoundError that has 'statusCode' property which equals 404
if (!bot) return next(new NotFoundError(`Thing ${id} not found`));
req.thing = thing;
return next();
});
});
router.get('/', (req, res, next) => {
// possibly pull in some sort, limit, and filter stuff
Thing.find({}, (e, things) => {
if (e) return next(e);
res.send(things);
});
});
router.route('/:thingId')
.get((req, res) => {
// if you get here, you've already got a thing from the param fn
return res.send(req.thing);
})
.put((req, res, next) => {
const { name, description } = req.body; // pull whitelist of changes from body
let thing = req.thing;
thing = Object.assign(thing, { name, description }); // copy new stuff into the old thing
thing.save((e) => {
if (e) return next(e);
return res.send(thing); // return updated thing
});
});
Keeping each logical chunk in its own file can reduce repetition
// ./routes/index.js then mounts the subrouters to the main router
const thingsRoute = require('./things');
const express = require('express');
const router = express.Router();
/* .... other routes **/
router.use('/things', thingsRoute);
Error handling is then centralized, and can be mounted either in its own file or right on the app:
// in ./index.js (main app entry point)
const express = require('express');
// this will require by default ./routes/index.js
const routes = require('./routes');
const app = express();
const log = require('./log');// I prefer debug.js to console.log, and ./log.js is my default config file for it
/* ... other app setup stuff */
app.use(routes);
// you can mount several of these, passing next(e) if you don't handle the error and want the next error handler to do so.
app.use((err, req, res, next) => {
// you can tune log verbosity, this is just an example
if (err.statusCode === 404) {
return res.status(404).send(err.message);
}
log.error(err.message);
log.verbose(err.stack); // don't do stack traces unless log levels are set to verbose
return res.status(500).send(err.message);
});
app.use(function (req, res, next) {
throw new Error('critical');
})
makes Express server to catch a critical error and output it, while I want it to crash.
Adding an error handler doesn't replace the default handler.
How can Express error handling be disabled for critical errors?
If you want your server to crash in the event of a critical error, you can define an error-handling middleware. This is done by defining a function with 4 parameters, the first being the error. This will be called when an error is thrown. You can check the error and determine if it's critical, and if so, call process.exit.
const app = require('express')()
app.use('/', (req, res) => {
throw new Error('critical')
})
app.use((err, req, res, next) => {
if (err.message === 'critical') {
process.exit(1)
} else {
// carry on listening
}
})
I have a few middlewares that I want to combine into one middleware. How do I do that?
For example...
// I want to shorten this...
app.use(connect.urlencoded())
app.use(connect.json())
// ...into this:
app.use(combineMiddleware([connect.urlencoded, connect.json]))
// ...without doing this:
app.use(connect.urlencoded()).use(connect.json())
I want it to work dynamically -- I don't want to depend on which middleware I use.
I feel like there's an elegant solution other than a confusing for loop.
Express accepts arrays for app.use if you have a path:
var middleware = [connect.urlencoded(), connect.json()];
app.use('/', middleware)
However, if you want a generic combineMiddleware function, you can build a helper easily without any additional libraries. This basically takes advantage of the fact that next is simply a function which takes an optional error:
/**
* Combine multiple middleware together.
*
* #param {Function[]} mids functions of form:
* function(req, res, next) { ... }
* #return {Function} single combined middleware
*/
function combineMiddleware(mids) {
return mids.reduce(function(a, b) {
return function(req, res, next) {
a(req, res, function(err) {
if (err) {
return next(err);
}
b(req, res, next);
});
};
});
}
If you like fancy stuff, here is one of possible solutions:
var connect = require('connect')
var app = connect()
function compose(middleware) {
return function (req, res, next) {
connect.apply(null, middleware.concat(next.bind(null, null))).call(null, req, res)
}
}
function a (req, res, next) {
console.log('a')
next()
}
function b (req, res, next) {
console.log('b')
next()
}
app.use(compose([a,b]))
app.use(function (req, res) {
res.end('Hello!')
})
app.listen(3000)
Here is what it does: compose function takes array of middleware and return composed middleware. connect itself is basically a middleware composer, so you can create another connect app with middlewares you want: connect.apply(null, middleware). Connect app is itself a middleware, the only problem is that it doesn't have a next() call in the end, so subsequent middleware will be unreachable. To solve that, we need another last middleware, which will call next : connect.apply(null, middleware.concat(last)). As last only calls next we can use next.bind(null, null) instead. Finally, we call resulting function with req and res.
Old question, but the need is still frequent for all the things using middlewares, like connect, express or custom made req/res/next patterns.
This is a very elegant and purely functional solution:
File ./utils/compose-middleware.js:
function compose(middleware) {
if (!middleware.length) {
return function(_req, _res, next) { next(); };
}
var head = middleware[0];
var tail = middleware.slice(1);
return function(req, res, next) {
head(req, res, function(err) {
if (err) return next(err);
compose(tail)(req, res, next);
});
};
}
module.exports = compose;
The final result of the compose(middlewareList) is a single middleware that encapsulates the whole chain of middleware initially provided.
Then simply import it and use like this:
File app.js:
var connect = require('connect');
var compose = require('./utils/compose-middleware');
var middleware = compose([
connect.urlencoded(),
connect.json()
]);
var app = connect();
app.use(middleware);
A simple and native way, and you don't need to install anything.
const {Router} = require('express')
const combinedMiddleware = Router().use([middleware1, middleware2, middleware3])
Then you can use the combinedMiddleware where you want. For example, you may want to run different set of middlewares/handlers for the same route depending on some conditions (a request attributes, for example):
app.get('/some-route', (req, res, next) => {
req.query.someParam === 'someValue'
? combinedMiddleware1(req, res, next)
: combinedMiddleware2(req, res, next)
})
If you're willing to use a library:
https://www.npmjs.org/package/middleware-flow
var series = require('middleware-flow').series;
var app = require('express')();
app.use(series(mw1, mw2, mw2)); // equivalent to app.use(mw1, mw2, mw3);
Make a list and use a loop.
const connect = require('connect')
const { urlencoded, json } = require('body-parser')
const app = connect()
[ urlencoded(), json() ].forEach(app.use, app)
The second argument of .forEach is used for this, but if you like you can also do the same with:
[ urlencoded(), json() ].forEach(app.use.bind(app))