nodejs - running multiple child_process, but some requests got closed unexpectly - node.js

On the server side, I have this nodejs code. I simplified the code here to make the question clear:
var express = require('express');
var app = express();
var spawn = require('child_process').spawn;
app.get('/print_N', function(req, res) {
var child = spawn('python', ['some_tick_data_process.py']);
req.on('close', function() {
console.log('req to close with pid=' + child.pid);
child.kill('SIGTERM');
});
child.stdout.pipe(res);
child.stdout.on('error', function(err) {
console.log(child.pid + " error !!!")
console.log(err);
});
});
app.listen(3000);
console.log('Listening on port 3000...');
The underlying some_tick_data_process.py is quite I/O intensive. On the client side, I have a small python application to read the stream. The problem is that some processes will run into error "req to close". With a small number of processes, it is OK. I tried:
var http = require('http');
http.globalAgent.maxSockets = 100;
But it doesn't help. Please share your thoughts, thanks!
After leveraging on P.T.'s advice, the fix is:
app.get('/:version/query', function(req, res) {
res.setTimeout(4 * 60 * 60 * 1000); // nodejs socket timeout 4-hour
}

The express request 'close' events mean that the connection closed (I believe the express request inherits this from the underlying http.IncomingMessage.
This could be either because the client gave up, or because the nodejs socket timeout hit 2 minutes.
You need to figure out which side of the connection is giving up. I would've thought it was the client side, but was surprised to discover the nodejs timeout. See http://nodejs.org/api/http.html#http_request_setsocketkeepalive_enable_initialdelay for setting the socket timeout on the request's socket.
See also: node.js http.IncomingMessage does not fire 'close' event

Related

How does Sticky session works?

I have this code(Copied from github) which create four(no of cpu cores) child instance of master.I have used sticky session for my socket connection to connect properly to backend and redis adapter message broker.But I cant understand how it is working.
Can someone please explain line line by what is happening
http = require('http'),
express = require('express'),
socketIO = require('socket.io'),
cluster = require('cluster'),
port = process.env.PORT || 3000;
socket_redis=require('socket.io-redis')
var app = express(), io;
server = http.Server(app);
app.set('view engine','ejs');
app.get('/', function (req, res) {
console.log('send message by worker: ' + cluster.worker.id);
res.render('abhi');
});
io = socketIO(server);
io.adapter(socket_redis({host:'localhost',port:'6379'}))
io.on("connection",s=>{
s.on("new",data=>{
console.log("hey you")
})
})
// Add your socket.IO connection logic here
if(!sticky.listen(server,port))
{
server.once('listening', function() {
console.log('Server started on port '+port);
});
if (cluster.isMaster) {
console.log('Master server started on port '+port);
}
}
else {
console.log('- Child server started on port '+port+' case worker id='+cluster.worker.id);
}
It's hard to explain sticky session based on your code you provided. But, in a short explanation; I believe socket.io keeps the connected sockets as a thread/object in it's thread. So when you send a packet/request to a socket(.io) server that packet/request has to reach to the process/server that you made your handshake. Or else it'll most likely fail.
If you are looking for a sticky session and clustering I'd advise you to check out socket.io-redis, it makes things quite easy and smooth.
And to run your nodejs process many times to utilize your entire cpu, you can either use Docker and summon containers for each thread/cpu depending on your setup(which I did for a while ago and they seemed quite good) or you can use nodejs' cluster library.

Nodejs request proxy stream(mjpeg) connection never ends

(unnecessary backstory)
I have a nodejs server with expressjs framework that's proxy streaming a webcam feed. The reason I need this is because the mjpg stream must come from this server due to complex CORS issues.
//proxy from webcam server to avoid CORS complaining
app.get('/stream1',function(req,res){
var url="http://camera.nton.lviv.ua/mjpg/video.mjpg"
request(url).pipe(res);
});
question :
The issue is simple. request(url).pipe(res) never closes, because the source is mjpeg which literally never ends. I need to find a way to force close this pipe when the client(browser; the destination) is no longer available - as in, closes the window.
The other answers did not work for me.
This line var pipe=request(url).pipe(res);
returns the pipe instead of the request object. So I needed to break the line up.
The request object is needed to abort. Calling the .end() didn't work either, but the .abort() did the trick. It took me hours to find the answer that worked for me, so I thought I would share.
app.get('/cam/frontdoor',function(req,res){
var request_options = {
auth: {
user: '',
pass: ''},
url: 'http:/xx.xx.xx.xx/mjpg/video.mjpg',
};
var req_pipe = request(request_options);
req_pipe.pipe(res);
req_pipe.on('error', function(e){
console.log(e)
});
//client quit normally
req.on('end', function(){
console.log('end');
req_pipe.abort();
});
//client quit unexpectedly
req.on('close', function(){
console.log('close');
req_pipe.abort()
})
})
Use socket.io to monitor the remote connection
// install it on your project
npm install socket.io
// require it on server side
var socket = require('socket.io');
// listen for sockets from your server
var mysocks = socket.listen(myexpressappvar);
// keep collection of sockets for use if needed
// its good practice
var connectedSockets = [];
// add event handelers on server side
mysocks.sockets.on("connection", function(socket){
// add socket to our collection
connectedSockets.push(socket);
// you will need to bind their stream id here.
exe.....
// listen for disconnected
socket.on("disconnect", function(){
// remove socket from collection
connections.splice(connections.indexOf(socket), 1);
// destory stream here
exe...
});
});
// last thing, is add socket.io to the client side.
<script src="/socket.io/socket.io.js"></script>
// then call connect from client side js file
var socket = io.connect();
I have found out a simpler way. Add a event listener for client connection closing, and force close the pipe when it happens.
app.get('/stream1',function(req,res){
var url="http://camera.nton.lviv.ua/mjpg/video.mjpg"
var pipe=request(url).pipe(res);
pipe.on('error', function(){
console.log('error handling is needed because pipe will break once pipe.end() is called')
}
//client quit normally
req.on('end', function(){
pipe.end();
}
//client quit unexpectedly
req.on('close', function(){
pipe.end();
}
});

Socket.io and Vagrant can't establish websocket connection

I've got a Vagrant box set up to port-forwards a socket.io application from internal port 5000 to external port 8081; when I try to connect from the client it starts long-polling the connection but I don't see any kind of response from the server and the server app never registers a connection attempt. The connection doesn't fail or return any error response code though, it just returns a 200 code with a blank response.
// Import utilities
var http = require('http'),
socketIO = require('socket.io'),
querystring = require('querystring');
// Init servers/external connections
var server = http.createServer(function baseHandler(req, res) {
// console.log(req.headers);
res.writeHead(200);
res.end(JSON.stringify({
message: 'This server only supports WebSocket connections'
}));
}),
io = socketIO(server);
server.listen(process.env.socket_port || 5000, function() {
var sockets = [];
console.log('App connected');
});
io.on('connection', function (socket) {
console.log('Socket connected');
console.log('Socket in rooms '+ socket.rooms.join(', '));
});
The same app works just fine when I'm trying to connect from the app running directly on my PC, so my code doesn't seem to be the problem here, especially given how it's basically duplicating the basic example in the docs; not really sure how to solve this from here.
This is one of those really stupid bugs which crop up when you're working on two different problems with the same codebase at the same time. Here's the client-side code line which was breaking:
var socket = io('127.0.0.1:8081/?access_token=1d845e53c4b4bd2e235a66fe9c042d75ae8e3c6ae', {path: '/auth/socket.io'});
Note the path key is set to point to a subdirectory, /auth, which is a leftover from my work to get an nginx folder proxying to an internal port which the server was working on.

ConnectJS + RequireJS hanging after a few reloads

I got a fairly massive requirejs based app that runs unbundled locally. I have a few hundred js files that get loaded in async. This is pretty quick locally and generally not a big deal. After maybe 10->20 page refreshes connectjs starts hanging for some reason. I got a half decent message once when I opened a different page and chrome indicated "waiting for available socket."
I'm guessing that at some point something ends up hanging and the connection never ends. At some point enough of these connections results in Node + connect to not accept any more requests. Has anyone experienced this and what is the solution? Is there a way to time out or reject requests from the server side?
Here is my connectjs server script:
var connect = require('connect');
var http = require('http');
var app = connect()
.use(connect['static'](__dirname))
.use(function (req, res) {
'use strict';
res.setHeader('Access-Control-Allow-Origin', '*');
// used to stub out ajax requests
if (req.url.indexOf('ajax/') !== -1) {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({}));
}
});
var server = http.createServer(app);
server.listen(3000, function () {
'use strict';
console.log('server is listening on port 3000');
});

Force close all connections in a node.js http server

I have an http server created using:
var server = http.createServer()
I want to shut down the server. Presumably I'd do this by calling:
server.close()
However, this only prevents the server from receiving any new http connections. It does not close any that are still open. http.close() takes a callback, and that callback does not get executed until all open connections have actually disconnected. Is there a way to force close everything?
The root of the problem for me is that I have Mocha tests that start up an http server in their setup (beforeEach()) and then shut it down in their teardown (afterEach()). But since just calling server.close() won't fully shut things down, the subsequent http.createServer() often results in an EADDRINUSE error. Waiting for close() to finish also isn't an option, since open connections might take a really long time to time out.
I need some way to force-close connections. I'm able to do this client-side, but forcing all of my test connections to close, but I'd rather do it server-side, i.e. to just tell the http server to hard-close all sockets.
You need to
subscribe to the connection event of the server and add opened sockets to an array
keep track of the open sockets by subscribing to their close event and removing the closed ones from your array
call destroy on all of the remaining open sockets when you need to terminate the server
You also have the chance to run the server in a child process and exit that process when you need.
For reference for others who stumble accross this question, the https://github.com/isaacs/server-destroy library provides an easy way to destroy() a server (using the approach described by Ege).
I usually use something similar to this:
var express = require('express');
var server = express();
/* a dummy route */
server.get('/', function (req, res) {
res.send('Hello World!');
});
/* handle SIGTERM and SIGINT (ctrl-c) nicely */
process.once('SIGTERM', end);
process.once('SIGINT', end);
var listener = server.listen(8000, function(err) {
if (err) throw err;
var host = listener.address().address;
var port = listener.address().port;
console.log('Server listening at http://%s:%s', host, port);
});
var lastSocketKey = 0;
var socketMap = {};
listener.on('connection', function(socket) {
/* generate a new, unique socket-key */
var socketKey = ++lastSocketKey;
/* add socket when it is connected */
socketMap[socketKey] = socket;
socket.on('close', function() {
/* remove socket when it is closed */
delete socketMap[socketKey];
});
});
function end() {
/* loop through all sockets and destroy them */
Object.keys(socketMap).forEach(function(socketKey){
socketMap[socketKey].destroy();
});
/* after all the sockets are destroyed, we may close the server! */
listener.close(function(err){
if(err) throw err();
console.log('Server stopped');
/* exit gracefully */
process.exit(0);
});
}
it's like Ege Özcan says, simply collect the sockets on the connection event, and when closing the server, destroy them.
I've rewriten original answers using modern JS:
const server1 = http.createServer(/*....*/);
const server1Sockets = new Set();
server1.on("connection", socket => {
server1Sockets.add(socket);
socket.on("close", () => {
server1Sockets.delete(socket);
});
});
function destroySockets(sockets) {
for (const socket of sockets.values()) {
socket.destroy();
}
}
destroySockets(server1Sockets);
My approach comes from this one and it basically does what #Ege Özcan said.
The only addition is that I set a route to switch off my server because node wasn't getting the signals from my terminal ('SIGTERM' and 'SIGINT').
Well, node was getting the signals from my terminal when doing node whatever.js but when delegating that task to a script (like the 'start' script in package.json --> npm start) it failed to be switched off by Ctrl+C, so this approach worked for me.
Please note I am under Cygwin and for me killing a server before this meant to close the terminal and reopen it again.
Also note that I am using express for the routing stuff.
var http=require('http');
var express= require('express');
var app= express();
app.get('/', function (req, res) {
res.send('I am alive but if you want to kill me just go to /exit');
});
app.get('/exit', killserver);
var server =http.createServer(app).listen(3000, function(){
console.log('Express server listening on port 3000');
/*console.log(process);*/
});
// Maintain a hash of all connected sockets
var sockets = {}, nextSocketId = 0;
server.on('connection', function (socket) {
// Add a newly connected socket
var socketId = nextSocketId++;
sockets[socketId] = socket;
console.log('socket', socketId, 'opened');
// Remove the socket when it closes
socket.on('close', function () {
console.log('socket', socketId, 'closed');
delete sockets[socketId];
});
// Extend socket lifetime for demo purposes
socket.setTimeout(4000);
});
// close the server and destroy all the open sockets
function killserver() {
console.log("U killed me but I'll take my revenge soon!!");
// Close the server
server.close(function () { console.log('Server closed!'); });
// Destroy all open sockets
for (var socketId in sockets) {
console.log('socket', socketId, 'destroyed');
sockets[socketId].destroy();
}
};
There is now a closeAllConnections() method in v18.2.0

Resources