Using Node's Domains in Mean.js - node.js

I am trying to get my MEAN application ready for production. The application was built on the Mean.js boilerplate. From my understanding, MEAN.js uses Forever.js to restart the application after an error (although documentation on preparing Mean.js for production is severely lacking); however, it appears the suggested way to handle the application crashing is using Node's Domains in conjunction with clusters. Here are a few references:
This is from Node's webpage on the deprecated uncaughtException Event:
Note that uncaughtException is a very crude mechanism for exception handling.
Don't use it, use domains instead.
Node.js domains : https://nodejs.org/api/domain.html
\http://shapeshed.com/uncaught-exceptions-in-node/
etc.
Although I have found many suggestions for using domains, I have yet to find one that tells me what needs to be done to incorporate domains in an application, especially one that has already been developed.
The Questions
What do I need to do to integrate node domains into a Mean.js application? From what I have gathered (from the Node.js domains webpage and here), you would go into server.js in the root of the Mean.js project and do something similar to this:
var cluster = require('cluster');
var PORT = +process.env.PORT || 1337;
if (cluster.isMaster) {
//Fork the master as many times as required.
cluster.fork();
cluster.fork();
cluster.on('disconnect', function(worker) {
console.error('disconnect!');
cluster.fork();
});
} else {
var domain = require('domain');
var d = domain.create();
d.on('error', function(er) {
console.error('error', er.stack);
try {
// make sure we close down within 30 seconds
var killtimer = setTimeout(function() {
process.exit(1);
}, 30000);
// But don't keep the process open just for that!
killtimer.unref();
// stop taking new requests.
server.close();
// Let the master know we're dead. This will trigger a
// 'disconnect' in the cluster master, and then it will fork
// a new worker.
cluster.worker.disconnect();
// try to send an error to the request that triggered the problem
res.statusCode = 500;
res.setHeader('content-type', 'text/plain');
res.end('Oops, there was a problem!\n');
} catch (er2) {
// oh well, not much we can do at this point.
console.error('Error sending 500!', er2.stack);
}
});
d.run(function() {
//Place the current contents of server.js here.
});
}
Do I need to wrap all of the backend controllers in domain.run()?

This answer was found by experimenting and a lot more digging. I had to edit both server.js and config/express.js to use domains. The domain is added part of the Express middleware for each request. Do not use the code in the question, it won't work as is.
First, the changes I made to server.js:
var init = require('./config/init')(),
config = require('./config/config'),
mongoose = require('mongoose'),
cluster = require('cluster');
var processes = 4; //Number of processes to run at the same time.
if(cluster.isMaster) {
for(var i = 0; i < processes; i++) {
cluster.fork();
}
cluster.on('disconnect', function(worker) {
console.error("Disconnect!");
cluster.fork();
});
} else {
/**
* Main application entry file.
* Please note that the order of loading is important.
*/
// Bootstrap db connection
var db = mongoose.connect(config.db, function(err) {
if (err) {
console.error('\x1b[31m', 'Could not connect to MongoDB!');
console.log(err);
}
});
// Init the express application
var expressConfig = require('./config/express');
var app = expressConfig.initialize(db);
app.use(function(err, req, res, next) {
console.error(err);
res.send(401).json({your_message_buddy: "Nice try, idiot."});
});
// Bootstrap passport config
require('./config/passport')();
// Start the app by listening on <port>
expressConfig.setServer(app.listen(config.port));
// Expose app
exports = module.exports = app;
// Logging initialization
console.log('MEAN.JS application started on port ' + config.port);
}
The necessary changes for config/express.js:
var domain = require('domain'),
cluster = require('cluster');
var appServer = null;
module.exports = {};
/**
* Since we begin listening for requests in server.js, we need a way to
* access the server returned from app.listen() if we want to close the
* server after an error. To accomplish this, I added this function to
* pass the server object after we begin listening.
*/
module.exports.setServer = function(server) {
appServer = server;
};
module.exports.initialize = function(db) {
//Initialize express app
var app = express();
//Globbing model files
config.getGlobbedFiles('./app/models/**/*.js').forEach(function(modelPath) {
require(path.resolve(modelPath));
});
//Set up domain for request BEFORE setting up any other middleware.
app.use(function(req, res, next) {
//Create domain for this request
var reqdomain = domain.create();
reqdomain.on('error', function(err) {
console.error('Error: ', err.stack);
try {
//Shut down the process within 30 seconds to avoid errors.
var killtimer = setTimeout(function() {
console.error("Failsafe shutdown.");
process.exit(1);
}, 30000);
//No need to let the process live just for the timer.
killtimer.unref();
//No more requests should be allowed for this process.
appServer.close();
//Tell master we have died so he can get another worker started.
if(cluster.worker) {
cluster.worker.disconnect();
}
//Send an error to the request that caused this failure.
res.statusCode = 500;
res.setHeader('Content-Type', 'text/plain');
res.end('Oops, there was a problem. How embarrassing.');
} catch(err2) {
//Well, something is pretty screwed up. Not much we can do now.
console.error('Error sending 500!\nError2: ', err2.stack);
}
});
//Add request and response objects to domain.
reqdomain.add(req);
reqdomain.add(res);
//Execute the rest of the request chain in the domain.
reqdomain.run(next);
});
//The rest of this function, which used to be module.exports, is the same.
};

Related

See client download progress from server

Currently, I have a lot of linux based clients downloading firmware updates from my webserver.
After the client has successfully downloaded the firmware file, my server needs to execute a few scripts, which logs in to the client and performs some tasks.
Is there a way for a node server to keep track of the clients download progress, so I can execute the needed scripts once the file has been downloaded?
Ok so I will try.
If you serve your Firmware as static files through Apache/Nginx and direct url call. You don't get the progress inside your NodeJS script.
If you serve your files via stream inside a express controller you can listen to the progress. Look at this answer here https://stackoverflow.com/a/42273080/3168392
You will have to use a socket connection to make sure the node server gets update from the client of the progress of the file being downloaded.
Something like this
CLIENT_CODE
var socket = io('http://localhost');
socket.on('connect', function(){});
socket.on('data_reciving', parts.pop(),function(percentage){
if(parse_data(percentage) === 100){
client.socket.emit('downloadCompleted', {fileName:'test'});
}else{
//do nothing
}
});
SERVER_CODE:
sockets.on('connection', function (socket) {
//listen to the event from client
socket.on('downloadCompleted', function (data) {
connect_to_client();
do_some_operation();
socket.emit('ALLDONE',{some_data});
});
});
I guess this helps ,you can use this post for reference.
If you just want to run some code when a download has finished, you can use on-finished:
const onFinished = require('on-finished');
app.use((req, res, next) => {
onFinished(res, (err, res) => {
...log some data, perform some housekeeping, etc...
});
next();
});
As is, it will attach a "finished" listener to all responses, which is probably not what you want. Since this is plain Express middleware, you can attach it to specific routes instead (but how depends on how exactly the files are being served).
I found some code that seems to fit my needs.
With the code below, I can detect both the progress of a user's download from the server-side, and fire an event once the file transfer completes.
var http = require("http");
var fs = require("fs");
var filename = "./large-file";
var serv = http.createServer(function (req, res) {
var sent = 0;
var lastChunkSize = 0;
var stat = fs.statSync(filename);
res.setHeader('Content-disposition', 'attachment; filename=large-file.iso');
res.setHeader('Accept-Ranges', 'bytes');
res.setHeader('Keep-Alive', 'timeout=5, max=100');
res.writeHeader(200, {"Content-Length": stat.size});
var fReadStream = fs.createReadStream(filename, { highWaterMark: 128 * 1024 });
fReadStream.on('data', function (chunk) {
if(!res.write(chunk)){
fReadStream.pause();
lastChunkSize = chunk.length;
console.log('Sent', sent, 'of', stat.size);
}
});
fReadStream.on('end', function () {
console.log('Transfer complete.');
res.end();
});
res.on("drain", function () {
sent += lastChunkSize;
fReadStream.resume();
});
});
serv.listen(3001);

How to customise time-out response using Express and connect-timeout?

I want to customise the response sent to the users when a timeout error is fired. More specifically, I want to redirect them to a static page explaining why a timeout error has been fired.
I want to write something like :
var express = require('express')
var timeout = require('connect-timeout')
var app = express();
var port = process.env.PORT || 8080;
app.use(timeout(10,{"respond":true}));
app.use(haltOnTimedout);
// GET ROUTES HERE
app.listen(port, function() {
console.log('Our app is running on port '+ port);
});
function haltOnTimedout(req,res,next) {
if (req.timedout) {
res.redirect('/timedout.html');
} else {
next();
}
};
The code above is not working as intended : even if a timeout is fired, it does not redirect the user to the static webpage timedout.html, instead it throws the error ServiceUnavailableError: Response timeout
Is it possible to do something like this or am I missing something ?
I finally find the answer and post it here if someone get the same question.
I placed the following line of code at the very end of my server.js code :
app.use(haltOnTimedout);
function haltOnTimedout(err,req,res,next) {
if (req.timedout === true) {
if (res.headersSent) {
next(err);
} else {
res.redirect('/timedout.html');
}
} else {
next();
}
};
and add if (!req.timedout) { res.send(something);} in all routes in order to avoid double send of the headers.

How to verify Cluster working in node js?

I am new to nodejs and currently playing with its features, one of the important feature I came across is Cluster, I tried to implement that for my sample application using expressjs, angular and nodejs.
Cluster code:
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
} else {
var express = require('express');
var app = express();
var exportRouter=require('./routers/exportRouter');
var process = require('process');
fakeDB = [];
app.use(express.static(__dirname + '/public'));
app.use(require('./routers/exportRouter.js'));
console.log('process Id :',process.pid);
app.listen(3000, function(){
console.log('running on 30000');
});
}
I have added following code in my routers to block the event loop,so when I make first request It will block one nodejs worker. so if another user makes call while first node is blocked second worker should pick that up.
router code :
var express = require('express');
var exportRouter = express.Router();
var process = require('process');
exportRouter.get('/getMe',function(req,res){
console.log('I am using process ',process.pid);
console.log('get is called');
fakeDB.push(req.query.newName+' '+ process.pid);
res.send(req.query.newName + ' ' + process.pid);
console.log('New name received ',fakeDB);
console.log('New name received ',fakeDB);
var d = new Date().getTime();
console.log('old ',d)
var x = d+10000;
console.log('should stop post ',x);
while(true){
var a = new Date().getTime();
//console.log('new ',a)
if(x<a){
break;
}
}
console.log('I am releasing event loop for ',process.pid);
});
module.exports = exportRouter;
it does not serve other request using another worker and waits for blocked node worker.. BTW I am using node js version 0.12.7(64bit) and 4 cpus.
THanks in advance..
it does not serve other request using another worker and waits for blocked node worker
Your testing methodology is probably wrong. Here's a simplified version of your sample.
var cluster = require('cluster')
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork()
}
} else {
var express = require('express')
var app = express()
console.log('process Id:', process.pid)
app.get('/', function (req, res) {
console.log('pid', process.pid, 'handler start, blocking CPU')
var i = 0;
while (i < 10e9) {
i++
}
console.log('pid', process.pid, 'unblocked, responding')
res.send('thanks')
})
app.listen(3003, function () {
console.log('running on 3003')
})
}
If I run this in one terminal, then open two other terminals and as quickly as possible fire off a curl localhost:3003 in each terminal, I can see the second request arrives and begins processing before the first request gets a response:
pid 53434 handler start, blocking CPU
pid 53437 handler start, blocking CPU
pid 53434 unblocked, responding
pid 53437 unblocked, responding

How to use socket.io with the latest mean.io?

I have fetched a copy of the latest Mean.io and noted quite a number of changes compared to the previous version I have been working with before. Now, what I am doing is creating a very basic chat application that uses socket.io with rooms. Following the basic setup in the Socket documentation I have to implement the following:
var app = require('express')()
, server = require('http').createServer(app)
, io = require('socket.io').listen(server);
server.listen(80);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
io.sockets.on('connection', function (socket) {
socket.emit('news', { hello: 'world' });
socket.on('my other event', function (data) {
console.log(data);
});
});
Where would I define the basic socket room setup?
socket.set("log level", 1);
var people = {};
var rooms = {};
var clients = [];
You can set the socket.io to listen on your server on
/server/config/system/bootstrap.js
Require the socket.io module
var express = require('express'),
appPath = process.cwd(),
io = require('socket.io');
Now set the socket.io to listen on your app
// Express settings
var app = express();
require(appPath + '/server/config/express')(app, passport, db);
io = io(app.listen(3000));
return io;
Then you need to inject the socket.io object into your app on bootstrapDependencies() function.
function bootstrapDependencies() {
...
// Register socket.io dependency
mean.register('io', function() {
return io;
});
}
Mean.uses this project for its dependency injection
https://www.npmjs.org/package/dependable
Finally you need to configure your app to listen on every socket connections
probably you want to do these on your main app's router at
/server/routes/index.js
Sample connection handler
var io = require('meanio').io;
io.on('connection', function (socket) {
// emit data to the clients
socket.emit('news', { hello: 'world' });
// event listeners
socket.on('my other event', function (data) {
// call your controller function here
Controller.action(data);
});
});
And more importantly, don't forget to setup socket.io on the client side.
// on '/server/views/includes/foot.html'
<script src='/socket.io/socket.io.js'></script>
<script>
var socket = io();
</script>
I've just responded to another SO post (Mean.io framwork with socket.io).
Note: I'm using mean.io v0.5.26 and socket.io v1.1.0.
Pasting my answer again, here.
I also faced the same issue and took me about a week to finally get it right. I'll try to explain what I did:
app.js
In this file, I just invoke the code that creates and sets up a socket.io object for me, which is then passed to the routes module.
'use strict';
/*
* Defining the Package
*/
var Module = require('meanio').Module;
var MeanSocket = new Module('chat');
/*
* All MEAN packages require registration
* Dependency injection is used to define required modules
*/
MeanSocket.register(function(app, http) {
var io = require('./server/config/socketio')(http);
//We enable routing. By default the Package Object is passed to the routes
MeanSocket.routes(io);
return MeanSocket;
});
server/config/socketio.js
This file simply configures the socket.io object. Please note that I had to upgrade meanio module to version 0.5.26 for this work, as http object (express server) is not available in older meanio versions. Moreover, in case you want to use ssl, you can inject https instead of http.
'use strict';
var config = require('meanio').loadConfig(),
cookie = require('cookie'),
cookieParser = require('cookie-parser'),
socketio = require('socket.io');
module.exports = function(http) {
var io = socketio.listen(http);
io.use(function(socket, next) {
var data = socket.request;
if (!data.headers.cookie) {
return next(new Error('No cookie transmitted.'));
}
var parsedCookie = cookie.parse(data.headers.cookie);
var sessionID = parsedCookie[config.sessionName];
var parsedSessionID = cookieParser.signedCookie(parsedCookie[config.sessionName], config.sessionSecret);
if (sessionID === parsedSessionID) {
return next(new Error('Cookie is invalid.'));
}
next();
});
return io;
};
routes/chat.js
Finally, use the routes file to define the socket events, etc.
'use strict';
// The Package is passed automatically as first parameter
module.exports = function(MeanSocket, io) {
io.on('connection', function(socket) {
console.log('Client Connected');
socket.on('authenticate', function(data, callback) {
});
});
};
Hope this helps!
The latest update v0.4.0 requires another strategy to get socket.io setup. I'm currently in discussion with one of the project contributors to validate my solution. I'll make sure to update my response once I'm 100% sure.
The meanio package is now where the bootstrap functionality is located, as well, where express setup is being called from.
Looks like the mean.io guys have recently released an official Socket.io implementation that integrates directly with their stack. Check it out on Github.

Node.js calling function on child_process.fork

I have a server.js module that exports a start() function to start my server.
I require this module and start the server from index.js.
I'm trying to unit test the server.js module in isolation (with Mocha) by starting the server in a child_process.fork call but I don't see how to call the exported start function.
It's currently working by passing 'index.js' to the fork call but then I don't see how to pass options through to the server.js module (sending a port number for example).
Here's my server.js and the unit test that uses index.js (which only requires and calls server.start()).
I'd like to test server.js directly so I can pass environment variables to it.
====EDIT====
I'm not sure what I thought I would gain by starting the server in a separate process.
I've changed the test to just start the server in the before block.
Suggestions welcome.
var assert = require("assert");
var request = require("request");
describe("Server", function(){
var server;
var port = 4008;
before(function(done){
server = require("../server");
server.start(port);
done();
});
it('listens on specified port (' + port + ')', function(done){
request('http://localhost:' + port, function(err, res, body){
assert(res.statusCode == 200);
done();
});
});
});
You may want to use the cluster module for this, which makes handling processes a little simpler. The following may be along the lines of what you need.
var cluster = require('cluster');
// Runs only on the master process.
if (cluster.isMaster) {
var environmentVariables = { PORT: 2020 };
var worker = cluster.fork(environmentVariables);
// Catch a message from the worker, and then destroy it.
worker.on('message', function(message) {
if (message.hasOwnProperty('testResult')) {
// Kill the worker.
worker.destroy();
// Then do something with the result.
}
});
}
// Runs only on a worker process.
if (cluster.isWorker) {
var server = require('./server');
server.start();
// Do some stuff for tests.
// Send the test result.
process.send({ testResults: 'pass', failReason: null });
}
I haven't tested this, but hopefully the gist is clear. You can pass in custom environment variables when the worker process is spawned, and then have the worker process message the master with the result. You probably need to handle exit events and a time out for when the worker crashes or hangs up.
As an aside, you should probably be wrapping the process.env.PORT in a parseInt.

Resources