Running socketio and nodejs cluster module - node.js

Server
var cluster = require('cluster');
// Code to run if we're in the master process
if (cluster.isMaster) {
// Count the machine's CPUs
var cpuCount = require('os').cpus().length;
// Create a worker for each CPU
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
// Listen for terminating workers
cluster.on('exit', function (worker) {
// Replace the terminated workers
console.log('Worker ' + worker.id + ' died :(');
cluster.fork();
});
// Code to run if we're in a worker process
}
else {
const app = express();
const server = require('http').createServer(app);
const io = require('socket.io')(server);
app.set('socketio', io);
const port = process.env.PORT || 9090;
server.listen(port,() => {
console.log('Server running at http://127.0.0.1:' + port + '/');
});
io.on('connection', function (socket) {
console.log("CONNECTED")
});
}
Client
import io from 'socket.io-client'
const socket = io('http://localhost:9090');
socket.on('notification', (data) => {
if(props.user && data.user._id === props.user._id) {
this.setNotification(data.notification);
}
})
error message
http://localhost:9090/socket.io/?EIO=3&transport=polling&t=MaDRz1u&sid=VzBUqt22usNbdqKCAAAb 400 (Bad Request)
When i remove the if else and keep the code that is in the else statement everything works. What do I need to add so the sessionID is not unknown.
The response object is {"code":1,"message":"Session ID unknown"}

This happens because each child process created by clusters are not in sync and do not know of each other. To overcome this you will need an adapter to communicate between the clusters.
Refer the Socket.io documentation to overcome this issue.

Related

How can i take the advantage of multi-core systems in node using cluster module

I read the cluster module from documentation of nodejs according to my understanding we can use this module to take advantage of multi-core systems
i.e
i can easily handle the load, means more number of hits on my system on same port but using different cpu's
for ensuring this i am just doing a simple test at my system
here is my code with cluster module
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
const express = require('express');
const path = require('path');
const servers = [];
const workers = [];
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
console.log("total no of cpus", numCPUs);
// Fork workers.
for (let i = 0; i < numCPUs; i++) {
workers.push(cluster.fork());
workers[i].on('listening', (address) => {
console.log("address", JSON.stringify(address));
});
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
let app = express();
// Workers can share any TCP connection
app.listen(2121, (serverInfo) => {
console.log("server listening at port 2121", JSON.stringify(serverInfo));
})
app.get('/',(req,res,next)=>{
res.json({success:'success'})
})
app.use(express.static(path.join(__dirname)))
app.get('/loaderio-edc7fc83ca1554036ee53a6807d5efb5', (req, res, next) => {
res.sendFile('./loaderio-edc7fc83ca1554036ee53a6807d5efb5.txt')
})
console.log(`Worker ${process.pid} started`);
}
and here is the code of without clustering
const express = require('express');
let app = express();
const path = require('path');
app.listen(2121 , (serverInfo) => {
console.log("server listening at port 2121", JSON.stringify(serverInfo));
})
app.get('/',(req,res,next)=>{
res.json({success:'success'})
})
app.use(express.static(path.join(__dirname)))
app.get('/loaderio-edc7fc83ca1554036ee53a6807d5efb5', (req, res, next) => {
res.sendFile('./loaderio-edc7fc83ca1554036ee53a6807d5efb5.txt')
})
And I just test it by loader.io i got the test results as below:
result using cluster module
result using without cluster module
I am not getting the results as i want
i got 24 timeouts in using cluster module
is my approach isn't correct if yes then how can i take the more advantages of multi-core systems in nodejs or is here anything that i missed?.
try this
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running in mode: ` + (process.env.NODE_ENV || 'dev'));
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died, restarting!`);
cluster.fork();
});
} else {
app.listen(PORT)
console.log(`worker ${process.pid} started`);
console.log('Listening on port ' + PORT);
}

How to use cluster on socket.io-client?

I want to use cluster on both side (Server and Client). I have got success on server side, but unable to do on client side. I am using node.js on both (Server side and Client side).
I am using below code on server side
var express = require('express'),
cluster = require('cluster'),
sio = require('socket.io');
var port = 3000,
num_processes = require('os').cpus().length;
if (cluster.isMaster) {
for (var i = 0; i < num_processes; i++) {
cluster.fork();
}
} else {
var app = new express();
var server = app.listen(port),
io = sio(server);
io.on('connection', function (client) {
client.on('evnt', function (data) {
console.log('evnt' + process.pid, data);
});
});
}
and on client side this one
var url = 'http://localhost:3000/';
var socket = require('socket.io-client')(url);
socket.on('connect', function () {
console.log('Connected with ', url);
setInterval(function () {
socket.emit('evnt', {sham: 'sakdf'});
}, 500)
});
socket.on('disconnect', function () {
console.log('Disconnected');
});
There is no need of cluster module on client side, simply we can create multiple process or child process if needed. We use cluster on server side because we have to bind multiple processes on same IP and port.

How to verify Cluster working in node js?

I am new to nodejs and currently playing with its features, one of the important feature I came across is Cluster, I tried to implement that for my sample application using expressjs, angular and nodejs.
Cluster code:
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
} else {
var express = require('express');
var app = express();
var exportRouter=require('./routers/exportRouter');
var process = require('process');
fakeDB = [];
app.use(express.static(__dirname + '/public'));
app.use(require('./routers/exportRouter.js'));
console.log('process Id :',process.pid);
app.listen(3000, function(){
console.log('running on 30000');
});
}
I have added following code in my routers to block the event loop,so when I make first request It will block one nodejs worker. so if another user makes call while first node is blocked second worker should pick that up.
router code :
var express = require('express');
var exportRouter = express.Router();
var process = require('process');
exportRouter.get('/getMe',function(req,res){
console.log('I am using process ',process.pid);
console.log('get is called');
fakeDB.push(req.query.newName+' '+ process.pid);
res.send(req.query.newName + ' ' + process.pid);
console.log('New name received ',fakeDB);
console.log('New name received ',fakeDB);
var d = new Date().getTime();
console.log('old ',d)
var x = d+10000;
console.log('should stop post ',x);
while(true){
var a = new Date().getTime();
//console.log('new ',a)
if(x<a){
break;
}
}
console.log('I am releasing event loop for ',process.pid);
});
module.exports = exportRouter;
it does not serve other request using another worker and waits for blocked node worker.. BTW I am using node js version 0.12.7(64bit) and 4 cpus.
THanks in advance..
it does not serve other request using another worker and waits for blocked node worker
Your testing methodology is probably wrong. Here's a simplified version of your sample.
var cluster = require('cluster')
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork()
}
} else {
var express = require('express')
var app = express()
console.log('process Id:', process.pid)
app.get('/', function (req, res) {
console.log('pid', process.pid, 'handler start, blocking CPU')
var i = 0;
while (i < 10e9) {
i++
}
console.log('pid', process.pid, 'unblocked, responding')
res.send('thanks')
})
app.listen(3003, function () {
console.log('running on 3003')
})
}
If I run this in one terminal, then open two other terminals and as quickly as possible fire off a curl localhost:3003 in each terminal, I can see the second request arrives and begins processing before the first request gets a response:
pid 53434 handler start, blocking CPU
pid 53437 handler start, blocking CPU
pid 53434 unblocked, responding
pid 53437 unblocked, responding

Node.js Chat Application Cluster Server using Socket.IO

I have created the code like for chat application
The above code is working fine for single server but for cluster server it gives error
client not handshaken client should reconnect, socket.io in cluster
can you please anyone help me to workout for cluster server
Thanks.
Edit: Code
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
var sio = require('socket.io');
var server;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
} cluster.on('online', function(worker) {
console.log('A worker with #' + worker.id);
});
cluster.on('listening', function(worker, address) {
console.log('A worker is now connected to ' + address.address + ':' + address.port);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
server = http.createServer(function(req, res) {
res.writeHead(200); res.end('hello world\n');
}).listen(8000);
_socketServer();
}
var _socketServer = function() {
io = sio.listen(server);
io.set("log level", 1);
io.sockets.on("connection", function (socket) { });
});
Error: client not handshaken client should reconnect, socket.io in cluster
Try using redis server in cluster and subscribe to events exposed by redis server. It is one of the solution to solve this problem.
Edit:
Some code sample/architecture/frameworks used and all other information would be very helpful
Code:
Hi Guys,
Actually i am trying to create a chat application using socket.io
For single server it is working fine but when i try creating using cluster server then its
not working for me
Throws error :
client not handshaken client should reconnect, socket.io in cluster
web socket invalid
These error are continuously arising in console
Can someone help me to proceed for cluster server for socket.IO
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
var sio = require('socket.io');
var server;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('online', function(worker) {
console.log('A worker with #' + worker.id);
});
cluster.on('listening', function(worker, address) {
console.log('A worker is now connected to ' + address.address + ':' + address.port);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
}
else {
server = http.createServer(function(req, res) {
res.writeHead(200);
res.end('hello world\n');
}).listen(8000);
_socketServer();
}
var _socketServer = function() {
io = sio.listen(server);
io.set("log level", 1);
io.sockets.on("connection", function (socket) {
});
});
I am getting error like this
client not handshaken client should reconnect, socket.io in cluster
You can use this way to store socket information for all workers:
var ClusterStore = require('strong-cluster-socket.io-store')(io);
io = require("socket.io").listen(server)
io.set({'store': new ClusterStore()});
Use npm to install module 'strong-cluster-socket.io-store'

Socket.io, cluster, express and sync events

I have a big problem sice 1 week. I try to convert my node.JS project actually run on single core to multi core with cluster.
With websockets, at this moment, i have no problems for events but, for xhr-polling or jsonp-polling, i have big problems with socket.io on cluster mode.
this is my server configuration :
00-generic.js
'use strict';
var http = require('http'),
os = require('os'),
cluster = require('cluster');
module.exports = function(done) {
var app = this.express,
port = process.env.PORT || 3000,
address = '0.0.0.0';
if(this.env == 'test'){
port = 3030;
}
var self = this;
var size = os.cpus().length;
if (cluster.isMaster) {
console.info('Creating HTTP server cluster with %d workers', size);
for (var i = 0; i < size; ++i) {
console.log('spawning worker process %d', (i + 1));
cluster.fork();
}
cluster.on('fork', function(worker) {
console.log('worker %s spawned', worker.id);
});
cluster.on('online', function(worker) {
console.log('worker %s online', worker.id);
});
cluster.on('listening', function(worker, addr) {
console.log('worker %s listening on %s:%d', worker.id, addr.address, addr.port);
});
cluster.on('disconnect', function(worker) {
console.log('worker %s disconnected', worker.id);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker %s died (%s)', worker.id, signal || code);
if (!worker.suicide) {
console.log('restarting worker');
cluster.fork();
}
});
} else {
http.createServer(app).listen(port, address, function() {
var addr = this.address();
console.log('listening on %s:%d', addr.address, addr.port);
self.server = this;
done();
});
}
};
03-socket.io.js
"use strict";
var _ = require('underscore'),
socketio = require('socket.io'),
locomotive = require('locomotive'),
RedisStore = require("socket.io/lib/stores/redis"),
redis = require("socket.io/node_modules/redis"),
v1 = require(__dirname + '/../app/socket.io/v1'),
sockets = require(__dirname + '/../../app/socket/socket'),
config = require(__dirname + '/../app/global'),
cluster = require('cluster');
module.exports = function () {
if (!cluster.isMaster) {
this.io = socketio.listen(this.server);
var pub = redis.createClient(),
sub = redis.createClient(),
client = redis.createClient();
this.io.enable('browser client minification'); // send minified client
this.io.enable('browser client etag'); // apply etag caching logic based on version number
this.io.enable('browser client gzip'); // gzip the file
this.io.set("store", new RedisStore({
redisPub : pub,
redisSub : sub,
redisClient : client
}));
this.io.set('log level', 2);
this.io.set('transports', [
'websocket',
'jsonp-polling'
]);
this.io.set('close timeout', 24*60*60);
this.io.set('heartbeat timeout', 24*60*60);
this.io.sockets.on('connection', function (socket) {
console.log('connected with ' + this.io.transports[socket.id].name);
// partie v1 #deprecated
v1.events(socket);
// partie v1.1 refaite
_.each(sockets['1.1'], function(Mod) {
var mod = new Mod();
mod.launch({
socket : socket,
io : this.io
});
}, this);
}.bind(this));
}
};
With polling, the client connects from time to time on a different process than that initiated listeners. Similarly, the communication server to the client with emit.
With a little searching, I found it necessary to pass by a store for socket.io to share the data connection. So I built RedisStore socket.io as shown in the documentation but even with that, I find myself with events not arriving safely and I still get this error message:
warn: client not handshaken client should reconnect
EDIT
Now, the warn error is not called. I change the redisStore to socket.io-clusterhub BUT now, events are not always called. Sometimes as if the polling request was captured by another worker than that which began the listeners and so it nothing happens. Here is the new configuration:
'use strict';
var http = require('http'),
locomotive = require('locomotive'),
os = require('os'),
cluster = require('cluster'),
config = require(__dirname + '/../app/global'),
_ = require('underscore'),
socketio = require('socket.io'),
v1 = require(__dirname + '/../app/socket.io/v1'),
sockets = require(__dirname + '/../../app/socket/socket');
module.exports = function(done) {
var app = this.express,
port = process.env.PORT || 3000,
address = '0.0.0.0';
if(this.env == 'test'){
port = 3030;
}
var self = this;
var size = os.cpus().length;
this.clusterStore = new (require('socket.io-clusterhub'));
if (cluster.isMaster) {
for (var i = 0; i < size; ++i) {
console.log('spawning worker process %d', (i + 1));
cluster.fork();
}
cluster.on('fork', function(worker) {
console.log('worker %s spawned', worker.id);
});
cluster.on('online', function(worker) {
console.log('worker %s online', worker.id);
});
cluster.on('listening', function(worker, addr) {
console.log('worker %s listening on %s:%d', worker.id, addr.address, addr.port);
});
cluster.on('disconnect', function(worker) {
console.log('worker %s disconnected', worker.id);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker %s died (%s)', worker.id, signal || code);
if (!worker.suicide) {
console.log('restarting worker');
cluster.fork();
}
});
} else {
var server = http.createServer(app);
this.io = socketio.listen(server);
this.io.configure(function() {
this.io.enable('browser client minification'); // send minified client
this.io.enable('browser client etag'); // apply etag caching logic based on version number
this.io.enable('browser client gzip'); // gzip the file
this.io.set('store', this.clusterStore);
this.io.set('log level', 2);
this.io.set('transports', [
'websocket',
'jsonp-polling'
]);
//this.io.set('close timeout', 24*60*60);
//this.io.set('heartbeat timeout', 24*60*60);
}.bind(this));
this.io.sockets.on('connection', function (socket) {
console.log('connected with ' + this.io.transports[socket.id].name);
console.log('connected to worker: ' + cluster.worker.id);
// partie v1 #deprecated
v1.events(socket);
// partie v1.1 refaite
_.each(sockets['1.1'], function(Mod) {
var mod = new Mod();
mod.launch({
socket : socket,
io : this.io
});
}, this);
}.bind(this));
server.listen(port, address, function() {
var addr = this.address();
console.log('listening on %s:%d', addr.address, addr.port);
self.server = this;
done();
});
}
};
From that source : http://socket.io/docs/using-multiple-nodes/
If you plan to distribute the load of connections among different
processes or machines, you have to make sure that requests associated
with a particular session id connect to the process that originated
them.
This is due to certain transports like XHR Polling or JSONP Polling
relying on firing several requests during the lifetime of the
“socket”.
To route connections to the same worker every time :
sticky-session
This is, in the socket.io documentation, the recommended way to route requests to the same worker every time.
https://github.com/indutny/sticky-session
A simple performant way to use socket.io with a cluster.
Socket.io is doing multiple requests to perform handshake and
establish connection with a client. With a cluster those requests may
arrive to different workers, which will break handshake protocol.
var sticky = require('sticky-sesion');
sticky(function() {
// This code will be executed only in slave workers
var http = require('http'),
io = require('socket.io');
var server = http.createServer(function(req, res) {
// ....
});
io.listen(server);
return server;
}).listen(3000, function() {
console.log('server started on 3000 port');
});
To pass messages between nodes :
socket.io-redis
This is, in socket.io documentation, the recommended way to share messages between workers.
https://github.com/automattic/socket.io-redis
By running socket.io with the socket.io-redis adapter you can run
multiple socket.io instances in different processes or servers that
can all broadcast and emit events to and from each other.
socket.io-redis is used this way :
var io = require('socket.io')(3000);
var redis = require('socket.io-redis');
io.adapter(redis({ host: 'localhost', port: 6379 }));
Also
I think you are not using socket.io v1.0.0. You might want to update your version in order to get more stability.
You can check their migration guide at http://socket.io/docs/migrating-from-0-9/
There is a step missing from the socket.io docs when using
var io = require('socket.io')(3000);
var redis = require('socket.io-redis');
io.adapter(redis({ host: 'localhost', port: 6379 }));
You need to tell the client that you want to use 'websockets' as the only form of transport or it will not work... so for the constructor on the client use
io.connect(yourURL , { transports : ['websocket']});
see my answer to a similar question here ( my answer might be more appropriate on this thread ):
https://stackoverflow.com/a/30791006/4127352
The below code work for me, this is socket.io who created clusters, i set config.clusterSticky on true for activate compatibility clusters and socket.io
'use strict';
/*
var cl = console.log;
console.log = function(){
console.trace();
cl.apply(console,arguments);
};
*/
var cluster = require('cluster'),
config = require('./config/all'),
deferred = require('q').defer(),
express = require('express'),
app = express(),
http = require('http'),
sticky = require('socketio-sticky-session'),
io = require('socket.io');
// Code to run if we're in the master process or if we are not in debug mode/ running tests
if ((cluster.isMaster) &&
(process.execArgv.indexOf('--debug') < 0) &&
(process.env.NODE_ENV !== 'test') && (process.env.NODE_ENV !== 'development') &&
(process.execArgv.indexOf('--singleProcess') < 0) &&
(!config.clusterSticky)) {
console.log('for real!');
// Count the machine's CPUs
var cpuCount = process.env.CPU_COUNT || require('os').cpus().length;
// Create a worker for each CPU
for (var i = 0; i < cpuCount; i += 1) {
console.log('forking ', i);
cluster.fork();
}
// Listen for dying workers
cluster.on('exit', function (worker) {
// Replace the dead worker, we're not sentimental
console.log('Worker ' + worker.id + ' died :(');
cluster.fork();
});
// Code to run if we're in a worker process
} else {
var port = config.http.port;
var workerId = 0;
if (!cluster.isMaster) {
workerId = cluster.worker.id;
}
var server = http.createServer(app);
io.listen(server);
//TODO routes etc (core)
server.on('listening', function () {
console.log('Slave app started on port ' + port + ' (' + process.env.NODE_ENV + ') cluster.worker.id:', workerId);
});
if(config.clusterSticky && (process.env.NODE_ENV !== 'test') && (process.env.NODE_ENV !== 'development')) {
sticky(server).listen(port);
} else {
server.listen(port);
}
deferred.resolve(server);
}
module.exports = deferred.promise;

Resources