I'm using node + express as my web server, and has a cluster of 4 workers.
I tried several ways to deliberately kill a worker:
process.exit() in a controller, and triggered using a browser action. Thought this is just for a single worker process, but turned out all workers were killed.
Again in a controller, I let a worker send suicide announcement to the master:
process.send('suicide');
and here goes my master process:
if (cluster.isMaster) {
console.log(`Master cluster setting up ${numWorkers} workers...`);
for (let i = 0; i < numWorkers; i++) {
const worker = cluster.fork();
worker.on('message', msg => {
console.log(worker.process.pid + ' wants to suicide');
worker.kill();
process.kill(worker.process.pid);
});
}
}
It turned out, worker.kill() doesn't affect at all, and process.kill(worker.process.pid); killing all 4 workers again. Also, the console.log appeared 4 times which I don't understand. I used a browser to trigger some action that hence triggers the suicide announcement, shouldn't this be a single worker's behavior?
I'm also using WebSockets in the projects and keeps a connection, don't know if this matters. Any help is appreciated!
EDIT:
Thanks for #Mia I found the reason: when I put process.exit() in the else statement(when cluster.isWorker) it works fine, but when put in a specific controller, it turns out to affect all the workers. Don't know how to solve yet. Shouldn't the controller affect only one specific worker?
I am sorry to write here, I am unable to comment due to my reputation.
I have written like the following and it works fine.
first clusters are created and open servers, whenever a cluster gets a request app.get("/"), 2 seconds later it exits by process.exit method.
you can confirm that this works well by printing out remaining workers.
'use strict';
const cluster = require('cluster');
const os = require('os');
if (cluster.isMaster) {
const cpus = os.cpus().length;
for (let i = 0; i < cpus; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code) => {
console.log(`${worker.process.pid} is killed`);
console.log("remaining wokers");
const workers = Object.keys(cluster.workers);
for(let worker of workers){
console.log(cluster.workers[worker].process.pid);
}
});
} else {
console.log("process id ",process.pid);
const express = require("express");
const app = express();
const path = require("path");
const fs = require("fs");
const http = require('http').Server(app);
const io = require('socket.io')(http);
const bodyParser = require('body-parser');
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
app.use(express.static('public'));
app.get('/',function(req,res){
console.log("connected via" , process.pid);
setTimeout(()=>{
process.exit();
},2000)
res.sendFile(path.join(__dirname+"/public/view/index.html"));
});
http.listen(3000);
}
----------------------------code separated into modules---------------
Root (server.js)
'use strict';
const cluster = require('cluster');
const os = require('os');
if (cluster.isMaster) {
const cpus = os.cpus().length;
for (let i = 0; i < cpus; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code) => {
console.log(`${worker.process.pid} is killed`);
console.log("remaining wokers");
const workers = Object.keys(cluster.workers);
for(let worker of workers){
console.log(cluster.workers[worker].process.pid);
}
});
} else {
console.log("process id ",process.pid);
const express = require('./modules/express');
const http = require('http').Server(express);
http.listen(3000);
}
Express
const express = require("express");
const app = express();
const bodyParser = require('body-parser');
const route = require('../route');
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
app.use(express.static('public'));
route(app);
module.exports = app;
Route (index.js)
const HomeController = require('../controllers/HomeController');
module.exports = function(app){
app.get('/',HomeController.renderIndex);
app.get('/killProcess',HomeController.killProcess);
}
HomeController
const path = require('path');
exports.renderIndex = function(req,res){
console.log("connected via" , process.pid);
res.sendFile(path.join(__dirname+"/../public/view/index.html"));
}
exports.killProcess = function(req,res){
res.write(`current process ${process.pid} is killed`);
res.end();
process.kill(process.pid);
}
Client side (browser)
$("#kill").on("click",function(e){
$.ajax({
url:"/killProcess"
}).success(function(data){
console.log(data);
})
})
Related
I use cluster and have 2 nodes that were running. each node has one API that was doing a task for around 5-6 seconds.
For example
in cluster.js
const cluster = require("cluster");
if (cluster.isMaster) {
for (let i = 0; i < 2; i++) {
cluster.fork();
}
} else {
require("./server");
}
and here in server.js
const express = require("express");
const app = express();
const port = 8080;
app.get("/", async (req, res) => {
await sleep(5000);
res.send('handled by process: ' + process.pid);
});
async function sleep (ms){
if (ms <= 0) return;
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
};
app.listen(port, () => console.log(`port: ${port}`));
if I request to the API. The first node received the request and process around 5-6 seconds. Before it finishes the process and response, I send the second request to the API. But the request is sent to the first node, not the second node. How to implement a node cluster to do this?
I am new to nodejs and currently playing with its features, one of the important feature I came across is Cluster, I tried to implement that for my sample application using expressjs, angular and nodejs.
Cluster code:
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
} else {
var express = require('express');
var app = express();
var exportRouter=require('./routers/exportRouter');
var process = require('process');
fakeDB = [];
app.use(express.static(__dirname + '/public'));
app.use(require('./routers/exportRouter.js'));
console.log('process Id :',process.pid);
app.listen(3000, function(){
console.log('running on 30000');
});
}
I have added following code in my routers to block the event loop,so when I make first request It will block one nodejs worker. so if another user makes call while first node is blocked second worker should pick that up.
router code :
var express = require('express');
var exportRouter = express.Router();
var process = require('process');
exportRouter.get('/getMe',function(req,res){
console.log('I am using process ',process.pid);
console.log('get is called');
fakeDB.push(req.query.newName+' '+ process.pid);
res.send(req.query.newName + ' ' + process.pid);
console.log('New name received ',fakeDB);
console.log('New name received ',fakeDB);
var d = new Date().getTime();
console.log('old ',d)
var x = d+10000;
console.log('should stop post ',x);
while(true){
var a = new Date().getTime();
//console.log('new ',a)
if(x<a){
break;
}
}
console.log('I am releasing event loop for ',process.pid);
});
module.exports = exportRouter;
it does not serve other request using another worker and waits for blocked node worker.. BTW I am using node js version 0.12.7(64bit) and 4 cpus.
THanks in advance..
it does not serve other request using another worker and waits for blocked node worker
Your testing methodology is probably wrong. Here's a simplified version of your sample.
var cluster = require('cluster')
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork()
}
} else {
var express = require('express')
var app = express()
console.log('process Id:', process.pid)
app.get('/', function (req, res) {
console.log('pid', process.pid, 'handler start, blocking CPU')
var i = 0;
while (i < 10e9) {
i++
}
console.log('pid', process.pid, 'unblocked, responding')
res.send('thanks')
})
app.listen(3003, function () {
console.log('running on 3003')
})
}
If I run this in one terminal, then open two other terminals and as quickly as possible fire off a curl localhost:3003 in each terminal, I can see the second request arrives and begins processing before the first request gets a response:
pid 53434 handler start, blocking CPU
pid 53437 handler start, blocking CPU
pid 53434 unblocked, responding
pid 53437 unblocked, responding
I'm trying to use socket.io & sticky-session to pass messages to my clients.
The problem is that client which connect to one of the processes won't get messages from other processes, only from the process he is connected to.
How can I make web sockets to work across all processes?
Server.js:
var cluster = require('cluster');
var app = require('./config/express')(db);
// Init the server to run according to server CPU's
if (cluster.isMaster) {
for (var i = 0; i < 4; i++) {
cluster.fork();
}
} else {
app.listen(config.port, function () {
console.log('Process ' + process.pid + ' is listening to all incoming requests');
});
}
Process.js:
var http = require('http');
var express = require('express');
var app = express();
var server = http.createServer(app);
var io = require('socket.io').listen(server);
var ns = io.of('/ns');
var sticky = require('sticky-session');
if (!sticky.listen(server, 8080)) {
// Master code
server.once('listening', function() {
console.log('server started on 8080 port');
});
}
client.js:
var io = require('socket.io-client');
var serverUrl = 'http://localhost:8080/ns';
var conn = io.connect(serverUrl);
conn.on('malware', function(infectedProcess){
console.log('infectedProcess: ' + infectedProcess);
});
I'm trying to use the cluster module to handle multiple http requests concurrently with Express.
With the code below I'm able to spawn multiple workers and have all of them listen on the same port. The large for loop is there to simulate heavy load on the web server.
What I'd like to see is that if a worker is busy processing one http request when a second request comes in, a different worker will get invoked and handle that second request. Instead, when I try to issue multiple requests using curl, all requests are processed sequentially by one single worker; no other workers are ever invoked even though they've been forked.
Could it be that I'm using Express incorrectly? Thanks in advance!
var cluster = require('cluster');
if (cluster.isMaster) {
var cpuCount = require('os').cpus().length;
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
}
else {
var http = require('http'),
app = require('express')();
http.createServer(app).listen(31415, function () {
console.log(process.pid + " listening on 31415");
});
app.get('/', function (req, res) {
var t= 0;
for(var i=0; i < 100000000; i++){
t++;
}
res.send('done');
});
}
Try not to use built-in module ?
master.js
var cp = require('child_process');
var net = require('net');
// create tcp server listen to a port
var tcp = net.createServer();
tcp.listen(8000, function(){
// detect cpu number, and fork child process
for (var i=0;i< require('os').cpus().length; i++) {
var worker = cp.fork('child.js');
worker.send(i, tcp._handle);
}
tcp.close();
});
child.js
var express = require('express');
var app = express();
process.on('message', function(id, handle){
app.get('/',function(){
console.log(process.pid+' is listening ...');
});
app.listen(handle, function(){
console.log(process.pid + 'started');
});
});
this works fine with express 3.x
I'm doing a little OJT on my first node project and, while I can stand up a simple server, the app is going to get hammered so using cluster seems like a good idea. I've cobbled together some code snippets that I've found in various searches (including SO), but the server won't start. I'm sure my inexperience with node has me doing something stupid, but I don't see it.
var express = require( 'express' );
var cluster = require( 'cluster' );
var path = require( 'path' );
var cCPUs = require( 'os' ).cpus().length;
var port = 3000;
var root = path.dirname( __dirname );
if( cluster.isMaster ) {
for( var i = 0; i < cCPUs; i++ ) {
cluster.fork();
}
cluster.on( 'death', function( worker ) {
console.log( 'Worker ' + worker.pid + ' died.' );
});
}
else {
// eyes.inspect( process.env );
console.log( 'Worker: %s', process.env.NODE_WORKER_ID );
var app = express();
var routes = require( './routes' )( app );
app
.use( cluster.repl( root + 'cluster.repl' ) )
.use( cluster.stats({ connections: true, requests: true }) )
.use( cluster.reload( root ) )
.listen( port );
}
RESULT:
TypeError: Object #<Cluster> has no method 'repl'
If I remove the use calls, the workers start up correctly, but process.env.NODE_WORKER_ID is undefined. Inspecting process.env shows me that it's definitely not defined. Maybe the snippet I used was from an old version, but I'm not sure how to identify the worker thread in any other way.
If anyone can unscrambled whatever I've scrambled, I'd really appreciate it.
For anyone searching later, here's what I ended up with:
const cluster = require('cluster');
const express = require('express');
const path = require('path');
const port = 3000;
const root = path.dirname(__dirname);
const cCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Create a worker for each CPU
for (let i = 0; i < cCPUs; i++) {
cluster.fork();
}
cluster.on('online', function (worker) {
console.log('Worker ' + worker.process.pid + ' is online.');
});
cluster.on('exit', function (worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died.');
});
} else {
const app = express();
const routes = require('./routes')(app);
app.use(express.bodyParser()).listen(port);
}
I'm still very early in the node learning curve, but the server starts and appears to have a working running on each core. Thanks to JohnnyH for getting me on the right track.
Also take a look at cluster2. It's used by eBay and has an express example
var Cluster = require('cluster2'),
express = require('express');
var app = express.createServer();
app.get('/', function(req, res) {
res.send('hello');
});
var c = new Cluster({
port: 3000,
});
c.listen(function(cb) {
cb(app);
});
Here is my draft of Cluster.js class. Note that we should catch port conflict when you start master process.
/*jslint indent: 2, node: true, nomen: true, vars: true */
'use strict';
module.exports = function Cluster(options, resources, logger) {
var start = function () {
var cluster = require('cluster');
if (cluster.isMaster) {
require('portscanner').checkPortStatus(options.express.port, '127.0.0.1', function (error, status) {
if (status === 'open') {
logger.log.error('Master server failed to start on port %d due to port conflict', options.express.port);
process.exit(1);
}
});
// Each core to run a single process.
// Running more than one process in a core does not add to the performance.
require('os').cpus().forEach(function () {
cluster.fork();
});
cluster.on('exit', function (worker, code, signal) {
logger.log.warn('Worker server died (ID: %d, PID: %d)', worker.id, worker.process.pid);
cluster.fork();
});
} else if (cluster.isWorker) {
var _ = require('underscore');
var express = require('express');
var resource = require('express-resource');
// Init App
var app = express();
// App Property
app.set('port', process.env.PORT || options.express.port);
app.set('views', options.viewPath);
app.set('view engine', 'jade');
app.set('case sensitive routing', true);
app.set('strict routing', false);
// App Middleware
app.use(express.favicon(options.faviconPath));
app.use(express.logger({ stream: logger.stream() }));
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(express.responseTime());
app.use(app.router);
app.use(require('stylus').middleware(options.publicPath));
app.use(express['static'](options.publicPath));
if (options.express.displayError) {
app.use(express.errorHandler());
}
// App Format
app.locals.pretty = options.express.prettyHTML;
// App Route Handler
if (!_.isUndefined(resources) && _.isArray(resources)) {
_.each(resources, function (item) {
if (!_.isUndefined(item.name) && !_.isUndefined(item.path)) {
app.resource(item.name, require(item.path));
}
});
}
// Start Server
var domain = require('domain').create();
domain.run(function () {
require('http').createServer(app).listen(app.get('port'), function () {
logger.log.info('Worker server started on port %d (ID: %d, PID: %d)', app.get('port'), cluster.worker.id, cluster.worker.process.pid);
});
});
domain.on('error', function (error) {
logger.log.error(error.stack);
});
}
};
return {
start: start
};
};