does Express uses cluster by default? - node.js

I try to use JMeter running benchmark on pure node.js and express server.
And the result is :
node.js = 600 RPS, express = 1200 RPS
while running benchmark, node.js server always use 1 cpu, but express server uses all.
Does this means that express server uses cluster by default ?
UPDATE : benchmark code
node.js
var http = require('http');
var Server = http.createServer(function(req, res){
res.write('I heard you !');
res.end();
}).listen(8000);
Express (3.8)
var express = require('express');
var app = express();
// ----------- middleware -------------
app.configure(function(){
app.use(express.bodyParser());
app.use(express.cookieParser());
app.use('/public',express.static(__dirname+'/files'));
});
// ----------- route-------------
app.get('/', function(req, res){
res.send('I heard you!')
});
app.listen(8000);

Express.js does not use cluster.
The reason you see more cores than a single being busy is very likely due to the node.js with express more efficiently offloading operations to other workers through async code.
You can use cluster with an express, but as long as you are not having scaling issues that is just unnecessary hassle.
Below is an example code (source):
// Include the cluster module
var cluster = require('cluster');
// Code to run if we're in the master process
if (cluster.isMaster) {
// Count the machine's CPUs
var cpuCount = require('os').cpus().length;
// Create a worker for each CPU
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
// Code to run if we're in a worker process
} else {
// Include Express
var express = require('express');
// Create a new Express application
var app = express();
// Add a basic route – index page
app.get('/', function (req, res) {
res.send('Hello World!');
});
// Bind to a port
app.listen(3000);
console.log('Application running!');
}

Related

Node.js memory leak in streams

I detected a strange increase of the memory usage in an express.js application and after a lot of investigation I discovered that the problem was caused while writing to a file using writable streams.
To isolate the problem I created this simple app:
var express = require('express');
var http = require('http');
var path = require('path');
var fs = require('fs');
var app = express();
app.set('port', process.env.PORT || 3000);
var writable = fs.createWriteStream('/tmp/stream', {flags: 'a'});
var i = 1;
app.get('/', function (req, res) {
i = i + 1;
writable.write(i + '\n');
res.end();
});
app.get('/flush', function (req, res) {
writable.end();
res.end();
});
http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});
After launching several requests with a benchmark tool and analyzing the memory used with 'top', I realized that it grows and is never released.
The call to the flush route after finishing the requests doesn't free the memory either.
Is this a memory leak or I'm doing something wrong?
I've made the tests with node 0.8.6, 0.10.37 and 0.12.7 with the same results.

How to run multiple node.js servers

I have two Express servers running on different ports. The first server (web server) provides a website and the second server (data server) emits data every 100 ms using socket.io.
When I open the website in the browser the data appears as expected. But when I then open the website in a second browser, all the emitted data in the first browser slows remarkably down until the second browser is done loading the site.
I should probably mention that the application is running on a Raspberry Pi. The website loading time is not so critical, but the data emission is.
Is there some way to run the node.js servers so they won't be affected by the load on each other?
Right now the web server is in the main app.js file that is run by node and the data server is a module required by the web server. Something like this:
app.js:
var express = require('express');
var data = require('data-server.js');
var app = express();
// Web server setup and handling (mostly Express defaults)
app.listen(3000);
module.exports = app;
data-server.js:
var app = require('express')();
var server = require('http').Server(app).listen(3001)
var io = require('socket.io')(server);
// Socket.io code that emits data every 100 ms
module.exports = io;
Update
Turns out it was my database query that was holding on to node so it couldn't emit because the emission function was waiting for the database to be free.
You should use the same port and the same node process for both your regular HTTP traffic and your websocket traffic. It would minimise the performance impact on your raspberry pi. Example :
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
app.get('/', function(req, res){
res.sendfile('index.html');
});
io.on('connection', function(socket){
console.log('a user connected');
});
http.listen(3000, function(){
console.log('listening on *:3000');
});
Method 1: Infinite-Servers.js : Start 1000 servers in one run
const express = require('express')
var app=new Array(10)
for (let i = 3000; i <= 4000; i++) {
app[i]=express()
app[i].get('/',(req,res)=>res.send({'port': i}))
app[i].listen( i,()=>console.log('port', i,"started"))
}
Method 2: MultiServers.js : Start 2 servers:
const express = require('express')
server=express()
server2=express()
server.get('/',(req,res)=>res.send({"server":"3001","name":"aakash4dev","data":"some data 1"}))
server2.get('/',(req,res)=>res.send({"server":"3002","name":"aakash4dev","data":"some data 2"}))
server2.get('/somethig',(req,res)=>res.send({"server":"3002","name":"aakash4dev","data":"some data 2"}))
server.listen(3001)
server2.listen(3002)
Method 3: SepetareInfiniteServers.js
If you want to keep all files separately, e.g. port1.js port2.js port3.js .... port1000.js, all in different files and start all servers on a single run, then use module.exports (official docs: https://nodejs.org/api/modules.html#moduleexports ) along with the first InfiniteServers.js codes above. codes will look like this:
var portsData=new Array(3)
const express = require('express')
const cors=require('cors')
var app=new Array(3)
for (let i = 1; i <= 3; i++) {
portsData[i]=require('./port'+i)
app[i]=express()
app[i].use(cors())
app[i].get('/',(req,res)=>{
let req_requestedData={"name":"aakash4dev"}
res.send(require('./port1.js').fun1({req_requestedData,"our-Response":"Im server"}))
})
app[i].listen( i,()=>console.log('port', i,"started"))
}
note: import cors module and use app[i].use(cors()) in array or app.use(cors()) in normal servers to remove error cors policy no 'access-control-allow-origin'

NodeJS Express - separate routes on two ports

I have an express server, and while building it created several "helper" functions on their own routes. I'd like those routes to be accessed on a different port. Is there anyway to do this in express?
In the code below, the "/factory" route (and other functionality) would be on one port, and the helper routes of "/killallthings", "/listallthings", and "/killserver" would be on a separate port.
Here is a simplified version of the code:
var express = require('express');
var things = [];
var app = express();
var port = 8080;
app.post('/factory/', function(req, res) {
//Create a thing and add it to the thing array
});
//Assume more functions to do to things here....
app.post('/killallthings/', function(req, res) {
//Destroy all the things in the array
});
app.post('/listallthings/', function(req, res) {
// Return a list of all the things
});
app.post('/killserver/', function(req,res){
//Kills the server after killing the things and doing clean up
});
//Assume https options properly setup.
var server = require('https').createServer(options, app);
server.listen(port, function() {
logger.writeLog('Listening on port ' + port);
});
Is this possible with express?
Based on Explosion Pills suggestion above, I modified the code in roughly this way:
var express = require('express');
var things = [];
var app = express();
var admin_app = express();
var port = 8080;
var admin_port = 8081;
app.post('/factory/', function(req, res) {
//Create a thing and add it to the thing array
});
//Assume more functions to do to things here....
admin_app.post('/killallthings/', function(req, res) {
//Destroy all the things in the array
});
admin_app.post('/listallthings/', function(req, res) {
// Return a list of all the things
});
admin_app.post('/killserver/', function(req,res){
//Kills the server after killing the things and doing clean up
});
//Assume https options properly setup.
var server = require('https').createServer(options, app);
server.listen(port, function() {
logger.writeLog('Listening on port ' + port);
});
var admin_server = require('https').createServer(options, admin_app);
admin_server.listen(admin_port, function() {
logger.writeLog('Listening on admin port ' + admin_port);
});
I wish I knew how to give Explosion Pills the credit for the answer! :)
If you are trying to create multiple servers then why not crate multiple bin/www files with different ports and configurations. Another way could be pass port number directly from command line.

How to use sticky-session with cluster in express - node js

I created a cluster depending app with reference to this question
But I started facing issues in session handling. how to use sticky-session in express js with cluster.
I was trying to use this npm module. But this resulted in the same situation. how to fix this session issue.
sticky(http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
}););
Finally found solution just try this code. Its maintain sticky as well as it uses all the cpus [ process ] for other clients. You can use express cluster sticky session using following code. You can get sticky-session here https://github.com/indutny/sticky-session
var http = require('http');
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
It has nothing to do with Express.
You just forgot the listen() on the sticky function.
sticky(
http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});
).listen(app.get('port'),function() {
console.log('Sticky server started on port' + app.get('port'));
});

node cluster with socket.io and expressjs

Here's my server code:
I'm trying to use cluster with socket.io and expressjs. I'm testing this on my quad core desktop.
var cluster = require('cluster')
var numCPUs = require('os').cpus().length
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork()
}
console.log(new Date());
}
else {
// get required modules
var connect = require('connect');
var express = require('express');
var mongo = require('mongodb');
var MongoStore = require('connect-mongo')(express);
var server;
var redis = require('redis');
var http = require('http');
// create server object
app = exports.module = express();
function configureServer() {
//configure environments
app.configure('production', function() {
app.use(express.errorHandler());
});
//configure server instance
app.configure(function() {
app.use(express.compress())
// set jade as default view engine
app.set('view engine', 'jade');
app.use(express.bodyParser());
app.use(express.cookieParser("refdgfd"));
app.use(express.methodOverride());
app.use(app.router);
// use express logger
});
loadModules();
}
var server = http.createServer(app).listen(80);
socket = require('socket.io').listen(server);
var RedisStore = require('socket.io/lib/stores/redis')
, pub = redis.createClient()
, sub = redis.createClient()
, client = redis.createClient();
socket.set('store', new RedisStore({
redisPub : pub
, redisSub : sub
, redisClient : client
}));
socket.configure(function() {
socket.set('log level', 1);
socket.set('transports', ['websocket']);
});
configureServer();
}
Is there anything that stands out to be really wrong?
The logs indicate that everything happens 4 times. Which seems correctly. But my socket.io's channel users also indicate that whenever I connect to a page, I connect 4 times, (create 4 user sockets). That seems wrong. How do I fix it and are there other things wrong with my code. Thanks.
At this moment socket.io has no full support for node.js cluster. It works fine for me with websocket transport, but xhr-pooling fails. You can reed more in this issues: #939, #917. The problem is the xhr-pooling request can get into the thread different from the one which it was originally authorized.

Resources