Maximum performance for node HTTP server? - node.js

I'm running a test trying to draw maximum delivery speed from a Node HTTP server. It's a simple server.
In my test I have 50K virtual clients establishing a permanent connection with the server (I run ulimit -n 99999 before). Then, upon another event, an HTTP connection to a different port, the server sends one message to each virtual client. At the end all clients receive the corresponding message.
Sending all messages takes minutes in my tests. Are there any recommendations that would help me improve these measurements so that I can send 50K messages in seconds instead of minutes?
The server is running in a m1.medium AWS instance. The idea is to improve performance with the same platform.
Copying the server code:
var http = require("http");
var connectionResponse = [];
var connectionIndex = 0;
http.createServer(function(request, response) {
console.log("Received connection " + connectionIndex);
response.setTimeout(1200000, function() {
console.log("Socket timeout");
});
connectionResponse[connectionIndex] = response;
connectionIndex++;
}).listen(8888);
http.createServer(function(request, response) {
console.log("8887 connected - Will respond");
response.writeHead(200, {"Content-Type": "text/plain"});
response.write("Triggered all responses");
response.end();
console.log("Begin notifications:" + new Date().toISOString());
for(var i = 0; i < connectionIndex; i++) {
connectionResponse[i].writeHead(200, {"Content-Type": "text/plain", "Content-Length": 4, "transfer-encoding" : ""});
connectionResponse[i].write("CAFE");
connectionResponse[i].end();
}
console.log("End notifications" + new Date().toISOString());
}).listen(8887);

Setting this http://nodejs.org/api/http.html#http_agent_maxsockets to a sufficient number
var http = require('http');
http.globalAgent.maxSockets = xxx;
var https = require('https');
https.globalAgent.maxSockets = xxx;
Using nodejs clustering module, http://nodejs.org/api/cluster.html
Now, regarding the clustering, it really depends on what you want to do. The default example can go long way before you have to tweak it. An example would be
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
// Workers can share any TCP connection
// In this case its a HTTP server
http.createServer(function(req, res) {
res.writeHead(200);
res.end("hello world\n");
}).listen(8000);
}

Related

multiple instances of same node.js app on AWS

Is there an option on AWS to run multiple instances of same node.js app?
I try to run multiple instances of my node.js app on my computer, and it gives me EADDRINUSE error. When I change the port number in my code, save it as new js file, then I am able to run multiple instances. But how could I set this on AWS? Does maybe AWS Elastic Beanstalk have some option to set this? I couldn't find it..
This is part of my app where I communicate with clients, that's where I change the port number:
http.createServer(function(request, response){
var path = url.parse(request.url).pathname;
if(path=="/getjson"){
response.writeHead(200, {'Content-Type': 'text/plain'});
response.end('_testcb(\''+string+'\')');
console.log(new Date() + " - response sent to client");
}
}).listen(port);
Or maybe you have some better solution for running multiple instances of an same node.js app? Note that multiple instances need to share an small database.
You may want to use node's cluster api. documentation here
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('Cluster worker ' + worker.process.pid + ' died');
cluster.fork();
});
} else {
http.createServer(function(request, response){
var path = url.parse(request.url).pathname;
if(path=="/getjson"){
response.writeHead(200, {'Content-Type': 'text/plain'});
response.end('_testcb(\''+string+'\')');
console.log(new Date() + " - response sent to client");
}
}).listen(port);
}
Have you tried amazon Load Balancing? more details can be found here. Load balancer can also manage auto scaling depending on trafic. See more documentation here.

Nodejs for analytics data dumping in flat file

We are planning to use nodejs instead of tomcat or jetty. But we ran some test around it and node is performing slower than jetty. Test case we ran is on 8 core machine with 32gb of ram. For test we used 1000000 requests with 1000 concurrency. We used cluster module, url module, http module and os module with stream. Does these module add any overhead…? Is there any optimization that we could do to better the performance. Following is the code for node.
var http = require("http");
var url = require("url");
var cluster = require('cluster');
var fs = require('fs');
var numCPUs = require('os').cpus().length;
var stream = [];
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('death', function(worker) {
console.log('worker ' + worker.pid + ' died');
});
}
else{
var server = http.createServer(function(request, response) {
response.end();
});
stream = fs.createWriteStream("nodejsCluster"+cluster.worker.id+".txt");
stream.once('open', function(fd) {
server.listen(8084);
});
server.on('request', function(request, response){
var parsedUrl = url.parse(request.url, true, true);
var queryAsObject = parsedUrl.query;
stream.write( ( new Date() )+'This is the content to write into file '+queryAsObject.test+"\n");
});
console.log("Server is listening"+cluster.worker.id);
}
Thanks

how does node.js clustering work?

i am trying to understand the following piece of code that is use to create miltiple servers to make use of a multi core cpu.
var cluster = require("cluster");
var http = require("http");
var numCPUs = require("os").cpus().length;
var port = parseInt(process.argv[2]);
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on("exit", function(worker, code, signal) {
cluster.fork();
});
} else {
http.createServer(function(request, response) {
console.log("Request for: " + request.url);
response.writeHead(200);
response.end("hello world\n");
}).listen(port);
}
my question is, given every created server listens on same port, what guarantees that a request won't be served by more than one server?
In node v0.10, the OS kernel always chooses which child gets the request. In node v0.11+ and io.js v1.0.0+, manual round-robin scheduling is used (except on Windows for now). This default behavior is configurable by setting an environment variable though.

Node.js Chat Application Cluster Server using Socket.IO

I have created the code like for chat application
The above code is working fine for single server but for cluster server it gives error
client not handshaken client should reconnect, socket.io in cluster
can you please anyone help me to workout for cluster server
Thanks.
Edit: Code
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
var sio = require('socket.io');
var server;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
} cluster.on('online', function(worker) {
console.log('A worker with #' + worker.id);
});
cluster.on('listening', function(worker, address) {
console.log('A worker is now connected to ' + address.address + ':' + address.port);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
server = http.createServer(function(req, res) {
res.writeHead(200); res.end('hello world\n');
}).listen(8000);
_socketServer();
}
var _socketServer = function() {
io = sio.listen(server);
io.set("log level", 1);
io.sockets.on("connection", function (socket) { });
});
Error: client not handshaken client should reconnect, socket.io in cluster
Try using redis server in cluster and subscribe to events exposed by redis server. It is one of the solution to solve this problem.
Edit:
Some code sample/architecture/frameworks used and all other information would be very helpful
Code:
Hi Guys,
Actually i am trying to create a chat application using socket.io
For single server it is working fine but when i try creating using cluster server then its
not working for me
Throws error :
client not handshaken client should reconnect, socket.io in cluster
web socket invalid
These error are continuously arising in console
Can someone help me to proceed for cluster server for socket.IO
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
var sio = require('socket.io');
var server;
if (cluster.isMaster) {
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('online', function(worker) {
console.log('A worker with #' + worker.id);
});
cluster.on('listening', function(worker, address) {
console.log('A worker is now connected to ' + address.address + ':' + address.port);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
}
else {
server = http.createServer(function(req, res) {
res.writeHead(200);
res.end('hello world\n');
}).listen(8000);
_socketServer();
}
var _socketServer = function() {
io = sio.listen(server);
io.set("log level", 1);
io.sockets.on("connection", function (socket) {
});
});
I am getting error like this
client not handshaken client should reconnect, socket.io in cluster
You can use this way to store socket information for all workers:
var ClusterStore = require('strong-cluster-socket.io-store')(io);
io = require("socket.io").listen(server)
io.set({'store': new ClusterStore()});
Use npm to install module 'strong-cluster-socket.io-store'

Node.js Cluster not listening

I am trying to build a "Hello World" multi-process node.js HTTP server.
Using the code samples provided in the node docs here I can't get the "listening" event to fire, and thus an HTTP server to respond to requests. I am, however, getting the "online" event to fire.
How can I get this server to respond to requests?
I'm running on OSX 10.8.4, node v0.10.7.
Here's what I have:
var cluster = require('cluster');
var http = require('http');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('online', function(worker) {
console.log('A worker with #' + worker.id);
});
cluster.on('listening', function(worker, address) {
console.log('A worker is now connected to ' + address.address + ':' + address.port);
});
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
} else {
// Workers can share any TCP connection
// In this case its a HTTP server
http.createServer(function(req, res) {
res.writeHead(200);
res.end('hello world\n');
}).listen(8000);
}
I fixed the issue by running the file directly. Previously, I ran server.js which was a one liner:
server.js
require("./server/app.js");
server/app.js (code snippet in the question)
By running node app.js instead of node server.js the cluster started listening. Not sure why this made a difference.

Resources