I have a server.js module that exports a start() function to start my server.
I require this module and start the server from index.js.
I'm trying to unit test the server.js module in isolation (with Mocha) by starting the server in a child_process.fork call but I don't see how to call the exported start function.
It's currently working by passing 'index.js' to the fork call but then I don't see how to pass options through to the server.js module (sending a port number for example).
Here's my server.js and the unit test that uses index.js (which only requires and calls server.start()).
I'd like to test server.js directly so I can pass environment variables to it.
====EDIT====
I'm not sure what I thought I would gain by starting the server in a separate process.
I've changed the test to just start the server in the before block.
Suggestions welcome.
var assert = require("assert");
var request = require("request");
describe("Server", function(){
var server;
var port = 4008;
before(function(done){
server = require("../server");
server.start(port);
done();
});
it('listens on specified port (' + port + ')', function(done){
request('http://localhost:' + port, function(err, res, body){
assert(res.statusCode == 200);
done();
});
});
});
You may want to use the cluster module for this, which makes handling processes a little simpler. The following may be along the lines of what you need.
var cluster = require('cluster');
// Runs only on the master process.
if (cluster.isMaster) {
var environmentVariables = { PORT: 2020 };
var worker = cluster.fork(environmentVariables);
// Catch a message from the worker, and then destroy it.
worker.on('message', function(message) {
if (message.hasOwnProperty('testResult')) {
// Kill the worker.
worker.destroy();
// Then do something with the result.
}
});
}
// Runs only on a worker process.
if (cluster.isWorker) {
var server = require('./server');
server.start();
// Do some stuff for tests.
// Send the test result.
process.send({ testResults: 'pass', failReason: null });
}
I haven't tested this, but hopefully the gist is clear. You can pass in custom environment variables when the worker process is spawned, and then have the worker process message the master with the result. You probably need to handle exit events and a time out for when the worker crashes or hangs up.
As an aside, you should probably be wrapping the process.env.PORT in a parseInt.
Related
I make two 'supertest' requests one after each other like so:
const request = require('supertest');
const server = require('#bin/www');
it('should do something', async () => {
// prepare data
const data = { ... some data }
// create business
const res = await request(server)
.post('/v2/businesses')
.send({
...data
});
// store the returned business id
const b_id = res.body.data.id;
// now the critical point - to make another http request to the api,
// in order to fetch the business and expect it to be defined.
const res1 = await request(server)
.get('/v2/businesses')
.query({
b_id
});
// expectations...
expect(res.body.data.business).toBeDefined();
});
The test passes on first time, after pressing "Enter" to test again, I got the
following error:
listen EADDRINUSE: address already in use :::3002
The only solution is to kill the whole test process and start it all over again.
The error also occurs on different test suites as well.
This is the command to run the tests using JEST (in the package.json):
"test": "jest --forceExit --detectOpenHandles --watchAll --maxWorkers=1"
But this doesn't work as expected.
There are the imported files from where the server object comes from:
// bin/www
var app = require('#app/app');
var server = require('#app/server');
server.listen(... some callback)
module.exports = app;
// app/app
var app = require('express')();
module.exports = app;
// app/server
var app = require('#app/app');
var server = require('http').createServer(app);
module.exports = server;
As I understand, the server is probably keep running even after the test finishes, then when firing another test, the server status is already active and in use, so therefore this error.
How can I close the server after each test?
I have tried:
afterEach(() => server.close())
But got an error that server.close is not a function.
Just use conditional listen:
// Turn off listen, when you are testing
if (process.env.NODE_ENV !== "test") {
app.listen(process.env.PORT || 4000);
}
Jest using special NODE_ENV, that called test, so you don't need an listen method. Also supertest is providing unical port for every app instance.
I am trying to get my MEAN application ready for production. The application was built on the Mean.js boilerplate. From my understanding, MEAN.js uses Forever.js to restart the application after an error (although documentation on preparing Mean.js for production is severely lacking); however, it appears the suggested way to handle the application crashing is using Node's Domains in conjunction with clusters. Here are a few references:
This is from Node's webpage on the deprecated uncaughtException Event:
Note that uncaughtException is a very crude mechanism for exception handling.
Don't use it, use domains instead.
Node.js domains : https://nodejs.org/api/domain.html
\http://shapeshed.com/uncaught-exceptions-in-node/
etc.
Although I have found many suggestions for using domains, I have yet to find one that tells me what needs to be done to incorporate domains in an application, especially one that has already been developed.
The Questions
What do I need to do to integrate node domains into a Mean.js application? From what I have gathered (from the Node.js domains webpage and here), you would go into server.js in the root of the Mean.js project and do something similar to this:
var cluster = require('cluster');
var PORT = +process.env.PORT || 1337;
if (cluster.isMaster) {
//Fork the master as many times as required.
cluster.fork();
cluster.fork();
cluster.on('disconnect', function(worker) {
console.error('disconnect!');
cluster.fork();
});
} else {
var domain = require('domain');
var d = domain.create();
d.on('error', function(er) {
console.error('error', er.stack);
try {
// make sure we close down within 30 seconds
var killtimer = setTimeout(function() {
process.exit(1);
}, 30000);
// But don't keep the process open just for that!
killtimer.unref();
// stop taking new requests.
server.close();
// Let the master know we're dead. This will trigger a
// 'disconnect' in the cluster master, and then it will fork
// a new worker.
cluster.worker.disconnect();
// try to send an error to the request that triggered the problem
res.statusCode = 500;
res.setHeader('content-type', 'text/plain');
res.end('Oops, there was a problem!\n');
} catch (er2) {
// oh well, not much we can do at this point.
console.error('Error sending 500!', er2.stack);
}
});
d.run(function() {
//Place the current contents of server.js here.
});
}
Do I need to wrap all of the backend controllers in domain.run()?
This answer was found by experimenting and a lot more digging. I had to edit both server.js and config/express.js to use domains. The domain is added part of the Express middleware for each request. Do not use the code in the question, it won't work as is.
First, the changes I made to server.js:
var init = require('./config/init')(),
config = require('./config/config'),
mongoose = require('mongoose'),
cluster = require('cluster');
var processes = 4; //Number of processes to run at the same time.
if(cluster.isMaster) {
for(var i = 0; i < processes; i++) {
cluster.fork();
}
cluster.on('disconnect', function(worker) {
console.error("Disconnect!");
cluster.fork();
});
} else {
/**
* Main application entry file.
* Please note that the order of loading is important.
*/
// Bootstrap db connection
var db = mongoose.connect(config.db, function(err) {
if (err) {
console.error('\x1b[31m', 'Could not connect to MongoDB!');
console.log(err);
}
});
// Init the express application
var expressConfig = require('./config/express');
var app = expressConfig.initialize(db);
app.use(function(err, req, res, next) {
console.error(err);
res.send(401).json({your_message_buddy: "Nice try, idiot."});
});
// Bootstrap passport config
require('./config/passport')();
// Start the app by listening on <port>
expressConfig.setServer(app.listen(config.port));
// Expose app
exports = module.exports = app;
// Logging initialization
console.log('MEAN.JS application started on port ' + config.port);
}
The necessary changes for config/express.js:
var domain = require('domain'),
cluster = require('cluster');
var appServer = null;
module.exports = {};
/**
* Since we begin listening for requests in server.js, we need a way to
* access the server returned from app.listen() if we want to close the
* server after an error. To accomplish this, I added this function to
* pass the server object after we begin listening.
*/
module.exports.setServer = function(server) {
appServer = server;
};
module.exports.initialize = function(db) {
//Initialize express app
var app = express();
//Globbing model files
config.getGlobbedFiles('./app/models/**/*.js').forEach(function(modelPath) {
require(path.resolve(modelPath));
});
//Set up domain for request BEFORE setting up any other middleware.
app.use(function(req, res, next) {
//Create domain for this request
var reqdomain = domain.create();
reqdomain.on('error', function(err) {
console.error('Error: ', err.stack);
try {
//Shut down the process within 30 seconds to avoid errors.
var killtimer = setTimeout(function() {
console.error("Failsafe shutdown.");
process.exit(1);
}, 30000);
//No need to let the process live just for the timer.
killtimer.unref();
//No more requests should be allowed for this process.
appServer.close();
//Tell master we have died so he can get another worker started.
if(cluster.worker) {
cluster.worker.disconnect();
}
//Send an error to the request that caused this failure.
res.statusCode = 500;
res.setHeader('Content-Type', 'text/plain');
res.end('Oops, there was a problem. How embarrassing.');
} catch(err2) {
//Well, something is pretty screwed up. Not much we can do now.
console.error('Error sending 500!\nError2: ', err2.stack);
}
});
//Add request and response objects to domain.
reqdomain.add(req);
reqdomain.add(res);
//Execute the rest of the request chain in the domain.
reqdomain.run(next);
});
//The rest of this function, which used to be module.exports, is the same.
};
I am trying to set up a Node.js job queue on Heroku. I am using RabbitMQ with the jackrabbit npm module for this task.
After following the example in the repository, I have the following files:
server.js. Node simple web server.
worker.js. Worker.
As I understand it, on Heroku I start a web process and a worker process in the Procfile:
// Procfile
web: node server.js
worker: node worker.js
In the worker.js file, I do the following:
// worker.js
var http = require('http');
var throng = require('throng');
var jackrabbit = require('jackrabbit')
// want to use "worker: node worker.js" in heroku Procfile
var queue = jackrabbit(process.env.CLOUDAMQP_URL || 'amqp://localhost')
http.globalAgent.maxSockets = Infinity;
var start = function() {
queue.on('connected', function() {
queue.create('myJob', { prefetch: 5 }, function() {
queue.handle('myJob', function(data, ack) {
console.log(data)
console.log("Job completed!")
ack()
})
})
})
}
throng(start, {
workers: 1,
lifetime: Infinity,
grace: 4000
})
Now I want a situation where I can push data to a job from the web application. So I in a middleware function I have,
// middleware.js
var jackrabbit = require('jackrabbit')
var queue = jackrabbit(process.env.CLOUDAMQP_URL || 'amqp://localhost')
app.get('/example', function(req, res, next) {
queue.publish('myJob', { data: 'my_data' })
res.send(200, { data: 'my_data' })
})
On development, I start worker.js and app.js in separate terminal windows. When I call the /example method, I expect to see the worker terminal window show the relevant console logs. However, the request just hangs. The same thing happens on Heroku.
I feel like there is something really fundamental I am missing here in terms of my understanding of jackrabbit and ampq, and how to set up a job queuing system using that.
Any help on this would be greatly appreciated as this is obviously new to me.
I have an http server created using:
var server = http.createServer()
I want to shut down the server. Presumably I'd do this by calling:
server.close()
However, this only prevents the server from receiving any new http connections. It does not close any that are still open. http.close() takes a callback, and that callback does not get executed until all open connections have actually disconnected. Is there a way to force close everything?
The root of the problem for me is that I have Mocha tests that start up an http server in their setup (beforeEach()) and then shut it down in their teardown (afterEach()). But since just calling server.close() won't fully shut things down, the subsequent http.createServer() often results in an EADDRINUSE error. Waiting for close() to finish also isn't an option, since open connections might take a really long time to time out.
I need some way to force-close connections. I'm able to do this client-side, but forcing all of my test connections to close, but I'd rather do it server-side, i.e. to just tell the http server to hard-close all sockets.
You need to
subscribe to the connection event of the server and add opened sockets to an array
keep track of the open sockets by subscribing to their close event and removing the closed ones from your array
call destroy on all of the remaining open sockets when you need to terminate the server
You also have the chance to run the server in a child process and exit that process when you need.
For reference for others who stumble accross this question, the https://github.com/isaacs/server-destroy library provides an easy way to destroy() a server (using the approach described by Ege).
I usually use something similar to this:
var express = require('express');
var server = express();
/* a dummy route */
server.get('/', function (req, res) {
res.send('Hello World!');
});
/* handle SIGTERM and SIGINT (ctrl-c) nicely */
process.once('SIGTERM', end);
process.once('SIGINT', end);
var listener = server.listen(8000, function(err) {
if (err) throw err;
var host = listener.address().address;
var port = listener.address().port;
console.log('Server listening at http://%s:%s', host, port);
});
var lastSocketKey = 0;
var socketMap = {};
listener.on('connection', function(socket) {
/* generate a new, unique socket-key */
var socketKey = ++lastSocketKey;
/* add socket when it is connected */
socketMap[socketKey] = socket;
socket.on('close', function() {
/* remove socket when it is closed */
delete socketMap[socketKey];
});
});
function end() {
/* loop through all sockets and destroy them */
Object.keys(socketMap).forEach(function(socketKey){
socketMap[socketKey].destroy();
});
/* after all the sockets are destroyed, we may close the server! */
listener.close(function(err){
if(err) throw err();
console.log('Server stopped');
/* exit gracefully */
process.exit(0);
});
}
it's like Ege Özcan says, simply collect the sockets on the connection event, and when closing the server, destroy them.
I've rewriten original answers using modern JS:
const server1 = http.createServer(/*....*/);
const server1Sockets = new Set();
server1.on("connection", socket => {
server1Sockets.add(socket);
socket.on("close", () => {
server1Sockets.delete(socket);
});
});
function destroySockets(sockets) {
for (const socket of sockets.values()) {
socket.destroy();
}
}
destroySockets(server1Sockets);
My approach comes from this one and it basically does what #Ege Özcan said.
The only addition is that I set a route to switch off my server because node wasn't getting the signals from my terminal ('SIGTERM' and 'SIGINT').
Well, node was getting the signals from my terminal when doing node whatever.js but when delegating that task to a script (like the 'start' script in package.json --> npm start) it failed to be switched off by Ctrl+C, so this approach worked for me.
Please note I am under Cygwin and for me killing a server before this meant to close the terminal and reopen it again.
Also note that I am using express for the routing stuff.
var http=require('http');
var express= require('express');
var app= express();
app.get('/', function (req, res) {
res.send('I am alive but if you want to kill me just go to /exit');
});
app.get('/exit', killserver);
var server =http.createServer(app).listen(3000, function(){
console.log('Express server listening on port 3000');
/*console.log(process);*/
});
// Maintain a hash of all connected sockets
var sockets = {}, nextSocketId = 0;
server.on('connection', function (socket) {
// Add a newly connected socket
var socketId = nextSocketId++;
sockets[socketId] = socket;
console.log('socket', socketId, 'opened');
// Remove the socket when it closes
socket.on('close', function () {
console.log('socket', socketId, 'closed');
delete sockets[socketId];
});
// Extend socket lifetime for demo purposes
socket.setTimeout(4000);
});
// close the server and destroy all the open sockets
function killserver() {
console.log("U killed me but I'll take my revenge soon!!");
// Close the server
server.close(function () { console.log('Server closed!'); });
// Destroy all open sockets
for (var socketId in sockets) {
console.log('socket', socketId, 'destroyed');
sockets[socketId].destroy();
}
};
There is now a closeAllConnections() method in v18.2.0
I use Mocha to test my Node/Express.js services, and I'd like to automate these with Grunt to run against a test instance of the server (i.e., identical configuration just listening on a different port).
While I can fire up a new (read: unconfigured) instance of the server using grunt-contrib-connect, there doesn't seem to be a way to utilize my existing app.js directives that contain all the API paths, middleware, etc. I see a couple options, neither of which are attractive:
According to the documentation and examples -- https://github.com/gruntjs/grunt-contrib-connect/blob/master/Gruntfile.js -- I could pass all of the relevant statements from my config file to the 'middleware' option, but this seems to be as clear a case of reinventing the wheel as there can be.
On the other hand, I could use grunt-exec -- https://github.com/jharding/grunt-exec -- to fire up node, passing the configuration file as normal, along with an environment variable (e.g., NODE_ENV=test) that would cause said config file to bind to a different port. Unfortunately, this command blocks the execution of the tests, and would require another hack to shut it down when finished.
Thus, SO, I'm open to ideas! What is the most elegant way to automatically start my node server with full config directives so that I can test them with grunt and mocha?
We configure our app.js to start on a different port when being run from tests, so that we can keep the dev server running (using nodemon) on their regular ports at the same time. Here's our code:
// Start server.
if (process.env.TEST == 'true') {
var port = process.env.PORT || 3500; // Used by Heroku and http on localhost
process.env['PORT'] = process.env.PORT || 4500; // Used by https on localhost
}
else {
var port = process.env.PORT || 3000; // Used by Heroku and http on localhost
process.env['PORT'] = process.env.PORT || 4000; // Used by https on localhost
}
http.createServer(app).listen(port, function () {
console.log("Express server listening on port %d in %s mode", this.address().port, app.settings.env);
});
// Run separate https server if not on heroku
if (process.env.HEROKU != 'true') {
https.createServer(options, app).listen(process.env.PORT, function () {
console.log("Express server listening with https on port %d in %s mode", this.address().port, app.settings.env);
});
};
Then, a mocha file, such as this one testing the serving of the favicon, looks like this:
process.env['TEST'] = 'true'; // Use test database
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0" // Avoids DEPTH_ZERO_SELF_SIGNED_CERT error for self-signed certs
var request = require("request").defaults({ encoding: null });
var crypto = require('crypto');
var fs = require('fs');
var expect = require('expect.js');
var app = require("../../app.js");
var hash = function(file) { crypto.createHash('sha1').update(file).digest('hex') };
describe("Server", function () {
after(function () {
process.env['TEST'] = 'false'; // Stop using test database.
});
describe("Static tests", function () {
it("should serve out the correct favicon", function (done) {
var favicon = fs.readFileSync(__dirname + '/../../../public/img/favicon.ico')
request.get("https://localhost:" + process.env.PORT + "/favicon.ico", function (err, res, body) {
// console.log(res)
expect(res.statusCode).to.be(200);
expect(hash(body)).to.be(hash(favicon));
done();
});
});
});
});
Also note that while grunt is a great tool, you can call mocha from your package.json scripts section and then just npm test to run it.