I am having problems getting the sticky-sessions socket.io module to work properly with even a simple example. Following the very minimal example given in the readme (https://github.com/indutny/sticky-session), I am just trying to get this example to work:
var cluster = require('cluster');
var sticky = require('sticky-session');
var http = require('http');
if (cluster.isMaster) {
for (var i = 0; i < 4; i++) {
cluster.fork();
}
Object.keys(cluster.workers).forEach(function(id) {
console.log("Worker running with ID : " +
cluster.workers[id].process.pid);
});
}
if (cluster.isWorker) {
var anotherServer = http.createServer(function(req, res) {
res.end('hello world!');
});
anotherServer.listen(3000);
console.log('http server on 3000');
}
sticky(function() {
var io = require('socket.io')();
var server = http.createServer(function(req, res) {
res.end('socket.io');
});
io.listen(server);
io.on('connection', function onConnect(socket) {
console.log('someone connected.');
socket.on('sync', sync);
socket.on('send', send);
function sync(id) {
socket.join(id);
console.log('someone joined ' + id);
}
function send(id, msg) {
io.sockets.in(id).emit(msg);
console.log('someone sent ' + msg + ' to ' + id);
}
});
return server;
}).listen(3001, function() {
console.log('socket.io server on 3001')
});
and a simple client:
var socket = require('socket.io-client')('http://localhost:3001');
socket.on('connect', function() {
console.log('connected')
socket.emit('sync', 'secret')
});
The workers start up fine. The http servers work fine. But when the client connects, the console logs 'someone connected' and nothing more. The client never fires the on connect event, so I think the upgrade/handshake is failing or something. If anyone can spot what I am doing wrong that would help alot.
Thanks!
#jordyyy : I was facing same issue after googling I have fond answer.
Socket.Io handshaking task complete in more than one request and when you will run on sticky session it means you are using multiple process according to your core.
So handshaking request will distribute on different different process and they can't talk.(not IPC) (They are child process) and most of time connection will be failed/lost.(connection-disconnect event occurs frequently )
So what is solution ? Solution is socketio-sticky-session
Socketio-sticky-session, manage connection on IP based. So when you will request by any client then it will maintain ip address with respect process/worker. So further request will be forward to same process/worker and your connection properly stabilized.
And When you will use redies adapter then you can actually maintain socket
connection data b/w all processes/workers.
For more information
https://github.com/elad/node-cluster-socket.io
(you need some patch on worker_index method, if your server is supporting IPv6)
Just knowledge bytes. :) :)
One more thing, you don't need to fork process. It will be done by sticky session.
This was super old and wasn't really answered when i needed it, but my solution was to drop this bad module and any other super confusing module and just use pub/sub with redis adapter. The only other step was to force transports to websockets, and if that bothers anyone then use something else. For my purposes my solution was simple, readable, didn't mess with the 'typical' socket.io api, and best of all it worked extremely well.
Related
I have a socket running in nodejs and using this socket in html page this is working fine and some times I'm receiving the error on developer console as like
failed: Connection closed before receiving a handshake response. In this time my update not getting reflect on the user screen. Actually whenever the changes updated in admin screen I written the login in laravel to store this values into the redis and I have used the laravel event broadcast and in node js socket.io read the redis value change and push the values into the user screens.
I have code in laravel as like,
Laravel Controller,
public function updatecommoditygroup(Request $request)
{
$request_data = array();
parse_str($request, $request_data);
app('redis')->set("ssahaitrdcommoditydata", json_encode($request_data['commodity']));
event(new SSAHAITRDCommodityUpdates($request_data['commodity']));
}
In this above controller when the api call receives just store the values into this redis key and broadcast the event.
In my event class,
public $updatedata;
public function __construct($updatedata)
{
$this->updatedata = $updatedata;
}
public function broadcastOn()
{
return ['ssahaitrdupdatecommodity'];
}
Finally I have written my socket.io file as like below,
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var Redis = require('ioredis');
var redis = new Redis({ port: 6379 } );
redis.subscribe('ssahaitrdupdatecommodity', function(err, count) {
});
io.on('connection', function(socket) {
console.log('A client connected');
});
redis.on('pmessage', function(subscribed, channel, data) {
data = JSON.parse(data);
io.emit(channel + ':' + data.event, data.data);
});
redis.on('message', function(channel, message) {
message = JSON.parse(message);
io.emit(channel + ':' + message.event, message.data);
});
http.listen(3001, function(){
console.log('Listening on Port 3001');
});
When I have update the data from admin I'm passing to laravel controller, and controller will store the received data into redis database and pass to event broadcast.And event broadcast pass the values to socket server and socket server push the data whenever the redis key get change to client page.
In client page I have written the code as like below,
<script src="../assets/js/socket.io.js"></script>
var socket = io('http://ip:3001/');
socket.on("novnathupdatecommodity:App\\Events\\NOVNATHCommodityUpdates", function(data){
//received data processing in client
});
Everything working fine in most of the time and some times issue facing like
**VM35846 socket.io.js:7 WebSocket connection to 'ws://host:3001/socket.io/?EIO=3&transport=websocket&sid=p8EsriJGGCemaon3ASuh' failed: Connection closed before receiving a handshake response**
By this issue user page not getting update with new data. Could you please anyone help me to solve this issue and give the best solution for this issue.
I think this is because your socket connection timeout.
new io({
path:,
serveClient:,
orgins:,
pingTimeout:,
pingInterval:
});
The above is the socket configuration. If you are not configuring socket sometime it behaves strangely. I do not know the core reason, but i too have faced similar issues that implementing the socket configuration solved it.
Socket.io Server
Similar configuration should be done on the client side. There is an option of timeout in client side
Socket.io Client
For example.
Say this is your front-end code
You connect to the socket server using the following command:
io('http://ip:3001', { path: '/demo/socket' });
In your server side when creating the connection:
const io = require("socket.io");
const socket = new io({
path: "/demo/socket",
serveClient: false /*whether to serve the client files (true/false)*/,
orgins: "*" /*Supports cross orgine i.e) it helps to work in different browser*/,
pingTimeout: 6000 /*how many ms the connection needs to be opened before we receive a ping from client i.e) If the client/ front end doesnt send a ping to the server for x amount of ms the connection will be closed in the server end for that specific client*/,
pingInterval: 6000 /* how many ms before sending a new ping packet */
});
socket.listen(http);
Note:
To avoid complication start you http server first and then start you sockets.
There are other options available, but the above are the most common ones.
I am just describing what i see in the socket.io document available in github.socket_config. Hope this helps
I have a Node.js script which is supposed to regularly access a SailsJS application via a socket connection. Client and server run on physically different machines on different networks. The SailsJS application is proxied behind nginx. That works in general. However, at random times, the connection is established but the first post request within the websocket connection never reaches its destination.
The code looks basically like this:
var socketIOClient = require('socket.io-client');
var sailsIOClient = require('sails.io.js');
var io = sailsIOClient(socketIOClient);
io.sails.url = 'https://foo.foo:443';
io.sails.rejectUnauthorized = false;
io.socket.on('connect', function() {
console.log("Connected!")
io.socket.post('/someroute', { someOptions: "foo" } ,
function(data) {
console.log(data);
});
});
io.socket.on('disconnect', function(){
console.log("Disconnected!");
});
io.socket.on('connect_error',function () {
console.log("connect_error!");
});
In case of a failure, simply nothing happens after console.log("Connected!"). Nothing appears in nginx's logs (in contrast to successful cases), the callback of io.socket.post never gets executed.
The most important question for me is: At which side is the problem? Client or server?
How can I debug this and narrow down the problem? Could it be a networking issue? Or something wrong the configuration, implementation or with the script itself?
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
// Fork workers.
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
// Workers can share any TCP connection
// In this case it is an HTTP server
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
console.log(`Worker ${process.pid} started`);
}
I looked up the documentation for nodejs clustering and sticky-session
and another stack overflow answer regarding this
var cluster = require('cluster');
var http = require('http');
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
If the above snippet is run without forking it works fine but else never works as shown in the clustered example above in which the threads are started but server is never initialised .
I read there is alternative of sticky-cluster can somebody give a proper authoritative answer on this topic which will be useful for people looking for the same and the another main issue comes with this is the app.locals object which is used to store variables for an app instance and the occurrence multiple server instances causes this to break as values will be different across different instances so this approach causes a big issue and app breaks so .When answering please don't copy paste some code please give a detailed answer detailing the approach its benefit and short comings.
I am not looking for a answer that is limited to using sticky-sessions nodejs module, I welcome all other approaches in which all cores of the processor are used and but ensuring session continuity .
If it involves RedisStore or MongoDb store its ok,What I want to know is about a standard approach in case of nodejs application with clustering with session continuity
https://github.com/indutny/sticky-session
https://nodejs.org/api/cluster.html
https://stackoverflow.com/a/37769107/3127499
There is a small problem in your code.
"sticky-session" module already uses node.js "cluster" module within.You dont need to "fork()" because sticky-session will already do it for you. Lets find out how:
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var server = require('http').createServer(function(req, res) {
res.end('worker: ' + cluster.worker.id);
});
sticky.listen(server, 3000);
calling sticky.listen() will already spawn workers for you.See the listen() implementation below
function listen(server, port, options) {
if (!options)
options = {};
if (cluster.isMaster) {
var workerCount = options.workers || os.cpus().length;
var master = new Master(workerCount, options.env);
master.listen(port);
master.once('listening', function() {
server.emit('listening');
});
return false;
}
return true;
}
This line var master = new Master(workerCount, options.env) is responsible for spawning workers.
see the Master() implementation below:
function Master(workerCount, env) {
net.Server.call(this, {
pauseOnConnect: true
}, this.balance);
this.env = env || {};
this.seed = (Math.random() * 0xffffffff) | 0;
this.workers = [];
debug('master seed=%d', this.seed);
this.once('listening', function() {
debug('master listening on %j', this.address());
for (var i = 0; i < workerCount; i++)
// spawning workers
this.spawnWorker();
});
}
So indeed when you call sticky.listen(server,port) you are actually calling cluster.fork().hence you should not explicitly again call fork().
Now your code should look like:
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var server = require('http').createServer(function(req, res) {
res.end('worker: ' + cluster.worker.id);
});
//sticky.listen() will return false if Master
if (!sticky.listen(server, 3000)) {
// Master code
server.once('listening', function() {
console.log('server started on 3000 port');
});
} else {
// Worker code
}
One important thing to remember is that spawned workers will have its own EVENTLOOP and memory hence resources are not shared among each other.
You can use "REDIS" or other npm modules such as "memored" to share resources among different workers.
Hope this solves your both issues.
I think you are confusing sticky session with shared memory store.
Let me try to help:
Sticky-sessions module is balancing requests using their IP address. Thus client will always connect to same worker server, and socket.io will work as expected, but on multiple
processes!
Implementing sticky sessions means that you now have multiple nodes accepting connections. However, it DOES NOT guarantee that these nodes will SHARE the same memory, as each worker has their own eventloop and internal memory state.
In other words, data being processed by one node may not be available to other worker nodes, which explains the issue you pointed out.
...another main issue comes with this is the app.locals object which
is used to store variables for an app instance and the occurrence
multiple server instances causes this to break as values will be
different across different instances so this approach causes a big
issue and app breaks...
Thus, to resolve this, we would require using something like Redis so that data can be shared across multiple nodes.
Hope this helps!
If I understand your questions correctly, you are dealing with in-memory data storage or session storage. This is one of the known problems in the session based authentication in multi-node or in a cluster. Suppose you made a call to Node A and get the session called sessionA but for the next call you made it to Node B. Node B does not know anything about sessionA. People try to solve this issue by using sticky session but that is not enough. Good practice will be to use an alternative approach, such as JWT or oAuth2. I prefer JWT for service to service communication. JWT does not store anything and stateless. It works brilliantly with REST since REST is also stateless. Here https://www.rfc-editor.org/rfc/rfc7519 is the specification of the JWT implementation. If you need to have some sort of refresh token, that case you need to consider a storage. Storage can be anything like REDIS, MongoDB or any other SQL based DB. For further clarification about JWT in nodejs:
https://jwt.io/
https://jwt.io/introduction/
https://www.npmjs.com/package/jsonwebtoken
https://cloud.google.com/iot/docs/how-tos/credentials/jwts#iot-core-jwt-refresh-nodejs
I create a server with Node.js:
var net = require('net');
var PORT = 8181;
var server = net.createServer(
function(socket) {
console.log(this.address());
socket.on('data', function(data) {
var msg = data.toString().replace(/\n$/, '');
console.log('got: ' + msg);
});
process.stdin.on('readable',
function() {
var chunk = process.stdin.read();
if (chunk !== null) {
socket.write(chunk);
}
}
)
socket.write('heyyo\n');
}
)
Now, when multiple connections are coming in, this server sends out the typed in line only to the first connection.
I have two questions:
what is a standard way to handle this, i.e. to store the incoming sockets into an array?
exactly what happens that causes the readable event not to reach the other connections' callback function?
I would highly recommend using a library like socket.io. It makes handling connect/disconnect as well as placing sockets in rooms very simple. Additionally you can get the full list of available rooms and connected sockets through the adapter class it offers. A functional example is available in the docs.
I'm building a settings manager for my http server. I want to be able to change settings without having to kill the whole process. One of the settings I would like to be able to change is change the port number, and I've come up with a variety of solutions:
Kill the process and restart it
Call server.close() and then do the first approach
Call server.close() and initialize a new server in the same process
The problem is, I'm not sure what the repercussions of each approach is. I know that the first will work, but I'd really like to accomplish these things:
Respond to existing requests without accepting new ones
Maintain data in memory on the new server
Lose as little uptime as possible
Is there any way to get everything I want? The API for server.close() gives me hope:
server.close(): Stops the server from accepting new connections.
My server will only be accessible by clients I create and by a very limited number of clients connecting through a browser, so I will be able to notify them of a port change. I understand that changing ports is generally a bad idea, but I want to allow for the edge-case where it is convenient or possibly necessary.
P.S. I'm using connect if that changes anything.
P.P.S. Relatively unrelated, but what would change if I were to use UNIX server sockets or change the host name? This might be a more relevant use-case.
P.P.P.S. This code illustrates the problem of using server.close(). None of the previous servers are killed, but more are created with access to the same resources...
var http = require("http");
var server = false,
curPort = 8888;
function OnRequest(req,res){
res.end("You are on port " + curPort);
CreateServer(curPort + 1);
}
function CreateServer(port){
if(server){
server.close();
server = false;
}
curPort = port;
server = http.createServer(OnRequest);
server.listen(curPort);
}
CreateServer(curPort);
Resources:
http://nodejs.org/docs/v0.4.4/api/http.html#server.close
I tested the close() function. It seems to do absolute nothing. The server still accepts connections on his port. restarting the process was the only way for me.
I used the following code:
var http = require("http");
var server = false;
function OnRequest(req,res){
res.end("server now listens on port "+8889);
CreateServer(8889);
}
function CreateServer(port){
if(server){
server.close();
server = false;
}
server = http.createServer(OnRequest);
server.listen(port);
}
CreateServer(8888);
I was about to file an issue on the node github page when I decided to test my code thoroughly to see if it really is a bug (I hate filing bug reports when it's user error). I realized that the problem only manifests itself in the browser, because apparently browsers do some weird kind of HTTP request keep alive thing where it can still access dead ports because there's still a connection with the server.
What I've learned is this:
Browser caches keep ports alive unless the process on the server is killed
Utilities that do not keep caches by default (curl, wget, etc) work as expected
HTTP requests in node also don't keep the same type of cache that browsers do
For example, I used this code to prove that node http clients don't have access to old ports:
Client-side code:
var http = require('http'),
client,
request;
function createClient (port) {
client = http.createClient(port, 'localhost');
request = client.request('GET', '/create');
request.end();
request.on('response', function (response) {
response.on('end', function () {
console.log("Request ended on port " + port);
setTimeout(function () {
createClient(port);
}, 5000);
});
});
}
createClient(8888);
And server-side code:
var http = require("http");
var server,
curPort = 8888;
function CreateServer(port){
if(server){
server.close();
server = undefined;
}
curPort = port;
server = http.createServer(function (req, res) {
res.end("You are on port " + curPort);
if (req.url === "/create") {
CreateServer(curPort);
}
});
server.listen(curPort);
console.log("Server listening on port " + curPort);
}
CreateServer(curPort);
Thanks everyone for the responses.
What about using cluster?
http://learnboost.github.com/cluster/docs/reload.html
It looks interesting!