I'm using express with socket.io and express-session with express-socket.io-session that helps me connect the session to my socket instance.
Here's the code I used for clustering
var cluster = require('cluster')
if(cluster.isMaster) {
// imports
const http = require('http'),
socketIO = require('socket.io')
var server = http.createServer(),
io = socketIO(server)
for (var i = 0; i < 3; i++) {
cluster.fork()
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died code: ' + code + 'signal: '+ signal)
})
}
if(cluster.isWorker) {
var express = require('express'),
app = express(),
deploy = require('./main.js'),
server = require('http').createServer(app),
io = require('socket.io').listen(server)
// add store here.
// deploy and listen
deploy(app, io)
console.log(cluster.worker.id)
app.listen(8080)
}
The Deploy Function runs the stuff like session, routes, etc.
POST http://localhost:8080/socket.io/?EIO=3&transport=polling&t=Lz9Ey8p 404 (Not Found)
I found my answer, But I'll make it public so that in future if somebody has this problem then they can immediately set this up quickly.
You need to use sticky cluster, refer to this config for Express.
Related
I have a cluster.js with the following code:
var numOfCpus = 16;
var cluster = require('cluster');
if (cluster.isMaster) {
for (var i = 0; i < numOfCpus; i++) {
cluster.fork();
}
console.log("master is running");
} else {
console.log('Worker %d started', cluster.worker.id);
var server = app.listen(8887);
var io = require('socket.io').listen(server);
var live_data = io.of('/live_data');
live_data.on('connection',function(socket){
console.log('Connected: %s', socket.id);
});
}
My client application works fine when the numOfCpus = 1 in cluster.js. When I have anything more than 1, the socket.io starts giving the following error:
Do I have to do anything special to make socket.io work with multiple node workers? Any help will be highly appreciated. Thanks.
The issue was solved using sticky session, as pointed by robertklep, in the comment. However, the package I used for the purpose is socketio-sticky-session.
My Final code of cluster.js looks like the following:
var sticky = require('socketio-sticky-session')
var cluster = require('cluster');
var os = require('os');
var options = {
proxy: false,
num: require('os').cpus().length
}
var server = sticky(options, function() {
var server = app.listen();
var io = require('socket.io').listen(server);
var live_data = io.of('/live_data');
live_data.on('connection',function(socket){
console.log('Connected: %s', socket.id);
});
return server
}).listen(8887, function() {
console.log((cluster.worker ? 'WORKER ' + cluster.worker.id : 'MASTER') + ' | HOST ' + os.hostname() + ' | PORT ' + 8887)
})
Details about the working and implementation of sticky sessions can be read # https://github.com/elad/node-cluster-socket.io
I am new to nodejs and currently playing with its features, one of the important feature I came across is Cluster, I tried to implement that for my sample application using expressjs, angular and nodejs.
Cluster code:
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
} else {
var express = require('express');
var app = express();
var exportRouter=require('./routers/exportRouter');
var process = require('process');
fakeDB = [];
app.use(express.static(__dirname + '/public'));
app.use(require('./routers/exportRouter.js'));
console.log('process Id :',process.pid);
app.listen(3000, function(){
console.log('running on 30000');
});
}
I have added following code in my routers to block the event loop,so when I make first request It will block one nodejs worker. so if another user makes call while first node is blocked second worker should pick that up.
router code :
var express = require('express');
var exportRouter = express.Router();
var process = require('process');
exportRouter.get('/getMe',function(req,res){
console.log('I am using process ',process.pid);
console.log('get is called');
fakeDB.push(req.query.newName+' '+ process.pid);
res.send(req.query.newName + ' ' + process.pid);
console.log('New name received ',fakeDB);
console.log('New name received ',fakeDB);
var d = new Date().getTime();
console.log('old ',d)
var x = d+10000;
console.log('should stop post ',x);
while(true){
var a = new Date().getTime();
//console.log('new ',a)
if(x<a){
break;
}
}
console.log('I am releasing event loop for ',process.pid);
});
module.exports = exportRouter;
it does not serve other request using another worker and waits for blocked node worker.. BTW I am using node js version 0.12.7(64bit) and 4 cpus.
THanks in advance..
it does not serve other request using another worker and waits for blocked node worker
Your testing methodology is probably wrong. Here's a simplified version of your sample.
var cluster = require('cluster')
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork()
}
} else {
var express = require('express')
var app = express()
console.log('process Id:', process.pid)
app.get('/', function (req, res) {
console.log('pid', process.pid, 'handler start, blocking CPU')
var i = 0;
while (i < 10e9) {
i++
}
console.log('pid', process.pid, 'unblocked, responding')
res.send('thanks')
})
app.listen(3003, function () {
console.log('running on 3003')
})
}
If I run this in one terminal, then open two other terminals and as quickly as possible fire off a curl localhost:3003 in each terminal, I can see the second request arrives and begins processing before the first request gets a response:
pid 53434 handler start, blocking CPU
pid 53437 handler start, blocking CPU
pid 53434 unblocked, responding
pid 53437 unblocked, responding
Does the latest Node.js version(4.2.3) supports multi threading now? Hard to find useful answers as most relevant discussions are from years ago.
How would a node.js cloud server handle node.js Web App requests? Say a 2-Core server runs a node.js app, does that mean no matter how heavy the traffic is, they all go through via a single core and the other core is always idle?
If the answer is yes, any recommendations of some third party libs could possibly achieve multi-threading for node.js Web Apps?
Thanks for the helps. In addition to the above question. I adopted the cluster code into an Express.js web app, it turns out the else block is not executed in the below code section.
var app = require('../app');
var debug = require('debug')('NewExpress:server');
var http = require('http');
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
/**
* Get port from environment and store in Express.
*/
var port = normalizePort(process.env.PORT || '3000');
app.set('port', port);
/**
* Create HTTP server.
*/
var server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
}
Any ideas how should I implement cluster in here?
You can use each of your cpus like this:
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
} else {
startServer();
}
Update
To give a fuller example, you can throw this in index.js:
var express = require('express');
var app = express();
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
} else {
startServer();
}
function startServer() {
var server = app.listen(3000, function () {
var port = server.address().port;
console.log('Example app listening at %s', port);
});
}
run node index.js in a terminal to start it up.
and it will log this with my 8 cpu monster:
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Example app listening at 3000
Update 2
After seeing the question edit, I adapted your example. This should work
var http = require('http');
var express = require('express');
var app = express();
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
console.log('in else statement');
var port = '3000';
app.set('port', port);
var server = http.createServer(app);
server.listen(port);
}
and it should show this in the console
in else statement
in else statement
in else statement
in else statement
in else statement
in else statement
in else statement
in else statement
Let me know if this is not running for you.
I'm trying to use socket.io & sticky-session to pass messages to my clients.
The problem is that client which connect to one of the processes won't get messages from other processes, only from the process he is connected to.
How can I make web sockets to work across all processes?
Server.js:
var cluster = require('cluster');
var app = require('./config/express')(db);
// Init the server to run according to server CPU's
if (cluster.isMaster) {
for (var i = 0; i < 4; i++) {
cluster.fork();
}
} else {
app.listen(config.port, function () {
console.log('Process ' + process.pid + ' is listening to all incoming requests');
});
}
Process.js:
var http = require('http');
var express = require('express');
var app = express();
var server = http.createServer(app);
var io = require('socket.io').listen(server);
var ns = io.of('/ns');
var sticky = require('sticky-session');
if (!sticky.listen(server, 8080)) {
// Master code
server.once('listening', function() {
console.log('server started on 8080 port');
});
}
client.js:
var io = require('socket.io-client');
var serverUrl = 'http://localhost:8080/ns';
var conn = io.connect(serverUrl);
conn.on('malware', function(infectedProcess){
console.log('infectedProcess: ' + infectedProcess);
});
I'm trying to use the cluster module to handle multiple http requests concurrently with Express.
With the code below I'm able to spawn multiple workers and have all of them listen on the same port. The large for loop is there to simulate heavy load on the web server.
What I'd like to see is that if a worker is busy processing one http request when a second request comes in, a different worker will get invoked and handle that second request. Instead, when I try to issue multiple requests using curl, all requests are processed sequentially by one single worker; no other workers are ever invoked even though they've been forked.
Could it be that I'm using Express incorrectly? Thanks in advance!
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
}
else {
var http = require('http'),
app = require('express')();
http.createServer(app).listen(31415, function () {
console.log(process.pid + " listening on 31415");
});
app.get('/', function (req, res) {
var t= 0;
for(var i=0; i < 100000000; i++){
t++;
}
res.send('done');
});
}
Try not to use built-in module ?
master.js
var cp = require('child_process');
var net = require('net');
// create tcp server listen to a port
var tcp = net.createServer();
tcp.listen(8000, function(){
// detect cpu number, and fork child process
for (var i=0;i< require('os').cpus().length; i++) {
var worker = cp.fork('child.js');
worker.send(i, tcp._handle);
}
tcp.close();
});
child.js
var express = require('express');
var app = express();
process.on('message', function(id, handle){
app.get('/',function(){
console.log(process.pid+' is listening ...');
});
app.listen(handle, function(){
console.log(process.pid + 'started');
});
});
this works fine with express 3.x