Node.js memory leak in streams - node.js

I detected a strange increase of the memory usage in an express.js application and after a lot of investigation I discovered that the problem was caused while writing to a file using writable streams.
To isolate the problem I created this simple app:
var express = require('express');
var http = require('http');
var path = require('path');
var fs = require('fs');
var app = express();
app.set('port', process.env.PORT || 3000);
var writable = fs.createWriteStream('/tmp/stream', {flags: 'a'});
var i = 1;
app.get('/', function (req, res) {
i = i + 1;
writable.write(i + '\n');
res.end();
});
app.get('/flush', function (req, res) {
writable.end();
res.end();
});
http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});
After launching several requests with a benchmark tool and analyzing the memory used with 'top', I realized that it grows and is never released.
The call to the flush route after finishing the requests doesn't free the memory either.
Is this a memory leak or I'm doing something wrong?
I've made the tests with node 0.8.6, 0.10.37 and 0.12.7 with the same results.

Related

concurrency using node.js application

This is my sample code for web-server which hosts a 100MB files and provide users link to download it. This application works for few download requests
var http = require('http');
var path = require('path');
var express = require('express'); // The ExpressJS framework
var fs = require('fs');
var console = require('console');
var app = express();
app.set('port', process.env.PORT || 3000);
app.set('view engine', 'ejs');
app.get('/f1', function(req, res){
var filenameWithPath = __dirname+"/uploaded_files/f1";
res.setHeader("content-type", "binary");
console.log(filenameWithPath);
res.download(filenameWithPath, function (err, data) {
if (err) return console.error(err);
console.log(data);
});
});
app.get('/', function(req, res){
res.write('This is lift-downloadable app home page');
res.end();
});
var server = http.createServer(app);
server.listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
However when I run the concurrent test using script:
#!/bin/bash
cd tmp
rm *
for i in `seq 1 20`;
do
wget https://lift-downloadable.mybluemix.net/f1 &
done
It actually downloads the files partially. The observations are :
1) It takes time to start the download and the download start delay increases when we increase number of concurrent threads.
2) So suppose 2 out of 15 of the download threads started downloading and when the 3rd one starts downloading the first two's download ends abruptly and following up all other threads also end abruptly after starting download. 3) Also I am not able to see the download size during file download. It always say something like 2MB out of unknown.
I have verified that download speed is quite good so network bandwidth is good and we do not see memory of cpu issue on the server hosting this application.
Can you please help.

socket.io not working node.js

I am not able to run socket.io code in node.js, console.log() is also not displaying when running the code. Below is the code.
app.js
var express = require('express');
var http = require('http');
var app = express();
app.set('port', process.env.PORT || 3000);
app.post('/testStream',test.testStream);
var server = http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
module.exports.appServer = server;
and I have created a test.js file where I am accessing this exported variable appServer.
var server = require('../app.js');
exports.testStream = function(req,res){
var io = require('socket.io').listen(server.appServer);
io.on('connection',function(socket){
console.log("in socket");
fs.readFile('E:/temp/testimg.png',function(err,buf){
socket.emit('image',{image: true,buffer: buf});
console.log("test image");
});
})
}
when the code runs it stucks and not showing the console.logs(). What I am doing wrong over here. Any help is very much appreciated.
I would suggest following the code structure as suggested in socket.io docs.
Also, you should not be calling io.listen or io.on('connection') inside your testStream express middleware. These are things you should only be doing once, and ideally they should happen during startup, inside app.js and not in reaction to a POST request. In fact, I'm not sure what the purpose of your testStream middleware is, its not even returning any response (eg res.end())
If you want to handle socket connections in a separate module you can, but instead of exporting your app's server the way you are, try passing the io instance as variable to your submodule. In short, try this:
app.js
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var test = require('./test')(io);
app.set('port', process.env.PORT || 3000);
server.listen(app.get('port'), function() {
console.log('Express server listening on port ' + app.get('port'));
});
test.js
module.exports = function(io) {
io.on('connection', function(socket) {
console.log("in socket");
fs.readFile('E:/temp/testimg.png', function(err, buf) {
socket.emit('image', {
image: true,
buffer: buf
});
console.log("test image");
});
});
};

How to use sticky-session with cluster in express - node js

I created a cluster depending app with reference to this question
But I started facing issues in session handling. how to use sticky-session in express js with cluster.
I was trying to use this npm module. But this resulted in the same situation. how to fix this session issue.
sticky(http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
}););
Finally found solution just try this code. Its maintain sticky as well as it uses all the cpus [ process ] for other clients. You can use express cluster sticky session using following code. You can get sticky-session here https://github.com/indutny/sticky-session
var http = require('http');
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
It has nothing to do with Express.
You just forgot the listen() on the sticky function.
sticky(
http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});
).listen(app.get('port'),function() {
console.log('Sticky server started on port' + app.get('port'));
});

does Express uses cluster by default?

I try to use JMeter running benchmark on pure node.js and express server.
And the result is :
node.js = 600 RPS, express = 1200 RPS
while running benchmark, node.js server always use 1 cpu, but express server uses all.
Does this means that express server uses cluster by default ?
UPDATE : benchmark code
node.js
var http = require('http');
var Server = http.createServer(function(req, res){
res.write('I heard you !');
res.end();
}).listen(8000);
Express (3.8)
var express = require('express');
var app = express();
// ----------- middleware -------------
app.configure(function(){
app.use(express.bodyParser());
app.use(express.cookieParser());
app.use('/public',express.static(__dirname+'/files'));
});
// ----------- route-------------
app.get('/', function(req, res){
res.send('I heard you!')
});
app.listen(8000);
Express.js does not use cluster.
The reason you see more cores than a single being busy is very likely due to the node.js with express more efficiently offloading operations to other workers through async code.
You can use cluster with an express, but as long as you are not having scaling issues that is just unnecessary hassle.
Below is an example code (source):
// Include the cluster module
var cluster = require('cluster');
// Code to run if we're in the master process
if (cluster.isMaster) {
// Count the machine's CPUs
var cpuCount = require('os').cpus().length;
// Create a worker for each CPU
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
// Code to run if we're in a worker process
} else {
// Include Express
var express = require('express');
// Create a new Express application
var app = express();
// Add a basic route – index page
app.get('/', function (req, res) {
res.send('Hello World!');
});
// Bind to a port
app.listen(3000);
console.log('Application running!');
}

nodejs + https + express - Error 324 (net::ERR_EMPTY_RESPONSE)

I have a nodejs server structured like so:
(app.js):
var fs = require('fs'),
http = require('http'),
https = require('https'),
express = require('express'),
connect = require('express/node_modules/connect'),
app = module.exports = express();
var ssl_options = {
key: fs.readFileSync('/etc/nginx/ssl/server.key'),
cert: fs.readFileSync('/etc/nginx/ssl/server.crt')
};
var server = https.createServer(ssl_options, app);
// EXPRESS
// app.set('view options', { layout: false });
var auth_token = "asdfasfdasdfasdf";
var express_session_id = "express.sid";
app.configure(function () {
app.use(express.cookieParser());
app.use(express.session({secret: auth_token, key: express_session_id}));
app.set("view options", {layout: false});
});
app.get('/', function (req, res) {
console.log (req.headers);
fs.readFile(__dirname + '/index.html', function(err, data){
res.writeHead(200, {'Content-Type': 'text/html'});
res.write(data, 'utf8');
res.end();
});
});
app.listen = function(port) {
server.listen(port);
console_log.log('info', "COTTONMOUTH server listening on port " + port);
};
I have a cluster.js running the above app.js:
var cluster = require('cluster'),
os = require('os'),
workers = {};
if (cluster.isMaster) {
cluster.on('exit', function(worker, code, signal) {
if (worker.suicide === false) {
var exitCode = worker.process.exitCode;
console.error('worker ' + worker.process.pid + ' died ('+exitCode+'). restarting...');
cluster.fork();
}
});
var n = process.env.PROC || os.cpus().length;
for(var i = 0; i < n; i++ ) {
cluster.fork();
}
process.on('SIGTERM', function() {
console.log('[MASTER] Going for shutdown...');
for (var id in cluster.workers) {
console.log('\tkilling worker: ' + cluster.workers[id].process.pid);
cluster.workers[id].destroy();
}
console.log("[MASTER] Here's looking at you, kid.");
});
} else {
require('./app').listen(process.env.PORT || 3456);
}
My problem is that the following setup works fine on my localhost virtual box environment (ubuntu virtual running on a mac host). I am able to access the nodejs server with dev.domain.com:3456.
However, when I move this to my production rackspace server (same environment configs and setup), and try to access it by prod.domain.com:3456
The browser hangs for a bit and returns Error 324 (net::ERR_EMPTY_RESPONSE): The server closed the connection without sending any data.
I did some research and have found some leads but weren't too helpful.
Any Ideas?
UPDATE:
when i lower the port down to 90, it seems to work which is interesting.
I am going to leave it at port 90 for now but if someone has an answer to why this is.
Thanks
I got this error message when my request grew too large (>65K). My solution was to reduce the data into several snippets.
Temporary Workaroud:
when i lower the port down to 90, it seems to work which is interesting. I am going to leave it at port 90 for now but if someone has an answer to why this is.
Thanks
This issue can also be encountered when you add an additional '/' from frontend to call a backend API
For example:
my route is
baseUrl/api/customer
but im sending request to
baseUrl/api/customer/
it will not work, at least this was in my case. Too dumb to notice this.
Maybe this can be helpful to someone

Resources