I have this code(Copied from github) which create four(no of cpu cores) child instance of master.I have used sticky session for my socket connection to connect properly to backend and redis adapter message broker.But I cant understand how it is working.
Can someone please explain line line by what is happening
http = require('http'),
express = require('express'),
socketIO = require('socket.io'),
cluster = require('cluster'),
port = process.env.PORT || 3000;
socket_redis=require('socket.io-redis')
var app = express(), io;
server = http.Server(app);
app.set('view engine','ejs');
app.get('/', function (req, res) {
console.log('send message by worker: ' + cluster.worker.id);
res.render('abhi');
});
io = socketIO(server);
io.adapter(socket_redis({host:'localhost',port:'6379'}))
io.on("connection",s=>{
s.on("new",data=>{
console.log("hey you")
})
})
// Add your socket.IO connection logic here
if(!sticky.listen(server,port))
{
server.once('listening', function() {
console.log('Server started on port '+port);
});
if (cluster.isMaster) {
console.log('Master server started on port '+port);
}
}
else {
console.log('- Child server started on port '+port+' case worker id='+cluster.worker.id);
}
It's hard to explain sticky session based on your code you provided. But, in a short explanation; I believe socket.io keeps the connected sockets as a thread/object in it's thread. So when you send a packet/request to a socket(.io) server that packet/request has to reach to the process/server that you made your handshake. Or else it'll most likely fail.
If you are looking for a sticky session and clustering I'd advise you to check out socket.io-redis, it makes things quite easy and smooth.
And to run your nodejs process many times to utilize your entire cpu, you can either use Docker and summon containers for each thread/cpu depending on your setup(which I did for a while ago and they seemed quite good) or you can use nodejs' cluster library.
Related
I create app with nodejs socket io. It works clearly at localhost (port: 3000). But when i deploy it to my server in there i can run my app on 3000 port but client side throw timeout. How can i solve it?
var fs = require('fs');
var https = require('https');
var options = {
key: fs.readFileSync('ssl.my-key.key'),
cert: fs.readFileSync('ssl.my-cert.crt')
};
var server = https.createServer(options);
var io = require('socket.io').listen(server);
var port = 3000;
const database = require('./Database');
io.on('connection', (socket) => {
socket.on('message', async (msg) => {
// I do some action here.
});
socket.on('disconnect', (msg) => {
// some action in here too
});
});
server.listen(port, () => {
console.log('listening on *:' + port);
});
It seems like your issue is with port forwarding.
In order for your server to be publicly accessed, it needs to have all ports forwarded appropriately. Locally and on the router.
Check this link to learn more about how to port forward on linux: https://linuxacademy.com/guide/11630-internal-port-forwarding-on-linux-using-the-firewall/
And this to learn more about router port forwarding, but this will really depend on your router.
https://www.noip.com/support/knowledgebase/general-port-forwarding-guide/
However, I don't recommend you to take care of hosting on your own machine(s). I
suggest you use Heroku, you can op in for their free servers, you don't need to pay.
More about heroku and NodeJS: https://linuxacademy.com/guide/11630-internal-port-forwarding-on-linux-using-the-firewall/
let we debug your node js app.
1) add some logs on database connection, http.createserver, also where you have to check if not success then catch exception
2) you should have to open port on centOs before start your node js app
3) you should have test you with domain name or ip address
as per you comment you got connection timeout , you mean node js server trying to connect with port 3000 but node not able to connect and its throws error with connection timeout
also send your sample code of your main index file so we can investigate your problen
thanks.
Please excuse any noobiness, I'm learning. :)
I have Socket.IO set up so that I can use io.sockets.emit inside of my routes, and I have that working. There are a few problems.
(SOLVED? SEE EDIT 3) To use, I cannot start with the word socket. I have to start with ioor I get "ReferenceError: socket is not defined." I'd like to be able to use socket.broadcast.emit to emit the event to all clients except for the current user. Right now I'm having to do a check on the client side to not execute the event if it's the current user and it's becoming a real headache as I'm having to emit more events as my project progresses.
(SOLVED, SEE EDIT 1 & 2) I have to run the application with node app.js and restart the server manually every time I make a server-side change. When I run nodemon, I get "Port 3000 is already in use." I feel that this must be related to the following...
(SOLVED, SEE EDIT 2) When pushing to Heroku, I have the port from the code below changed from 3000 to 80 in bin/www and app.js, but it does not work (I can see a 404 error for sockets in the console). If this and #2 are caused by dealing with http/ports in both places, how do I properly set this up and why does node app.js work?
I only need to run Socket.IO on the route shown below (battlefield). Am I already doing this with am I already doing this with require('./routes/battlefield')(io)?
bin/www
var app = require('../app');
var port = normalizePort(process.env.PORT || '3000');
app.set('port', port);
var server = http.createServer(app);
server.listen(port);
app.js
var app = express();
var http = require('http').Server(app);
http.listen(3000);
var io = require('socket.io')(http);
app.set('socketio', io);
var battlefield = require('./routes/battlefield')(io);
battlefield.js
var express = require('express');
var router = express.Router();
var returnRouter = function(io) {
router.get('/', function(req, res, next) {
// other stuff
io.sockets.emit('message', 'This works');
socket.broadcast.emit('message', 'Socket is undefined');
})
return router;
};
module.exports = returnRouter;
I tried wrapping my routes in io.on('connection', function (socket) { to be able to use socket, and instead of 'Socket is undefined,' the event does not occur.
var returnRouter = function(io) {
io.on('connection', function (socket) {
router.get('/', function(req, res, next) {
// other stuff
socket.emit('message', 'This is never emitted');
})
})
return router;
};
I apologize for such a lengthy question. THANK YOU for any help! 💜
EDIT1: Writing out this question helped me understand the problem better. I commented out server.listen(port); in my bin/www and nodemon now works. However, the app crashes on Heroku. My Procfile is web: node ./bin/www... does that need to be changed?
EDIT2: After figuring out Edit1 and a bit of Googling, I found that I can't have server.listen(); (bin/www) and http.listen(3000); (app.js).
In bin/www, I removed server.listen();.
In app.js, for clarity's sake I changed var http = ... to var server = ... and had it listen for process.env.PORT || '3000';, taken from bin/www. I also removed app.set('socketio', io); because it looks like that was doing nothing... I wonder why it was in there.
app.js
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var port = process.env.PORT || '3000';
server.listen(port);
This also makes Heroku work because of process.env.PORT, hurray! I'm guessing node app.js worked because I was initializing the app with app.js, I guess bin/www is not executed when you do that?
I still need help with #1 (using socket.broadcast.emit) 😇.
EDIT 3: Well, it took me literally the entire day but I believe I have it figured out with one quirk. Of course I couldn't use socket, it is a parameter given on connect. I also need to access socket across different routes and found this SO question. This is what I ended up doing in battlefield.js:
var returnRouter = function(io) {
var socket;
router.get('/', authenticatedUser, function(req, res, next) {
io.on('connection', function(client){
socket = client;
});
// other stuff
res.render('battlefield', {/* data */});
setTimeout(function(){socket.emit('test', 'It works!')}, 500);
});
router.post('/', function(req, res, next) {
// socket can be accessed
});
return router;
};
module.exports = returnRouter;
(Note: I took out a lot of my own code so I don't know if this is copy and pasteable ready, and you should probably check that socket is not null)
Without setTimeout, socket is undefined on GET '/'. To my understanding, the page must render first... Strange that 200 sometimes doesn't work for me and 500 does. I can leave it at 500, but this is for a game so time is pretty important.
My questions now are:
Can this be improved / is there a way I can do this without setTimeout? Am I 'connecting' clients properly with this code and am I (question #4 up there^) using Socket.IO efficiently?
P.S. If no one answers ^ these questions, I'll edit this, answer the question, and accept my answer as best answer.
When you use sockets when doing routing in Node its not that useful.
When ever you navigate to a different name space (eg www.example.com --> www.example.com/some-name-space) your front end variables are deleted and you need to resend them. This works great if you pass an object along with the GET request for that name space. But it doesn't need sockets.
Its done like this on your router file
var canAlsoBePassed = {some: "things"};
router.get('/', function(req, res, next) {
res.render('index', { items: "Can be passed directly", variables: canAlsoBePassed });
});
For sockets the best kind of applications are for single page apps or to replace AJAX requests. Another great thing sockets allows is for the server to be able to push information without the client asking for it.
To answer your question about SetTimeout, no you dont need this.
Make sure the socket script running on your client side is waiting for the document to be loaded.
$(document).ready(function() {
When an io.on('connection' event fires on your server side you know you have a new client to serve.
emit an event from the server side something like a welcome event that makes the client join a specific room. Once you have them in that room you can be listening for any events emitted to that room.
See socket.io official info
Custom namespaces
To set up a custom namespace, you can call the of function on the server-side:
var nsp = io.of('/my-namespace');
nsp.on('connection', function(socket){
console.log('someone connected'):
});
nsp.emit('hi', 'everyone!');
On the client side, you tell Socket.IO client to connect to that namespace:
var socket = io('/my-namespace');
Might not be the most accurate answer to your questions but I hope it pushes you in the right direction.
I've got a Vagrant box set up to port-forwards a socket.io application from internal port 5000 to external port 8081; when I try to connect from the client it starts long-polling the connection but I don't see any kind of response from the server and the server app never registers a connection attempt. The connection doesn't fail or return any error response code though, it just returns a 200 code with a blank response.
// Import utilities
var http = require('http'),
socketIO = require('socket.io'),
querystring = require('querystring');
// Init servers/external connections
var server = http.createServer(function baseHandler(req, res) {
// console.log(req.headers);
res.writeHead(200);
res.end(JSON.stringify({
message: 'This server only supports WebSocket connections'
}));
}),
io = socketIO(server);
server.listen(process.env.socket_port || 5000, function() {
var sockets = [];
console.log('App connected');
});
io.on('connection', function (socket) {
console.log('Socket connected');
console.log('Socket in rooms '+ socket.rooms.join(', '));
});
The same app works just fine when I'm trying to connect from the app running directly on my PC, so my code doesn't seem to be the problem here, especially given how it's basically duplicating the basic example in the docs; not really sure how to solve this from here.
This is one of those really stupid bugs which crop up when you're working on two different problems with the same codebase at the same time. Here's the client-side code line which was breaking:
var socket = io('127.0.0.1:8081/?access_token=1d845e53c4b4bd2e235a66fe9c042d75ae8e3c6ae', {path: '/auth/socket.io'});
Note the path key is set to point to a subdirectory, /auth, which is a leftover from my work to get an nginx folder proxying to an internal port which the server was working on.
I created a cluster depending app with reference to this question
But I started facing issues in session handling. how to use sticky-session in express js with cluster.
I was trying to use this npm module. But this resulted in the same situation. how to fix this session issue.
sticky(http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
}););
Finally found solution just try this code. Its maintain sticky as well as it uses all the cpus [ process ] for other clients. You can use express cluster sticky session using following code. You can get sticky-session here https://github.com/indutny/sticky-session
var http = require('http');
var cluster = require('cluster'); // Only required if you want the worker id
var sticky = require('sticky-session');
var express = require('express');
var app = express();
app.get('/', function (req, res) {
console.log('worker: ' + cluster.worker.id);
res.send('Hello World!');
});
var server = http.createServer(app);
sticky.listen(server,3000);
It has nothing to do with Express.
You just forgot the listen() on the sticky function.
sticky(
http.createServer(app).listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});
).listen(app.get('port'),function() {
console.log('Sticky server started on port' + app.get('port'));
});
So I switched from child.fork to cluster.fork to spawn chatbots that I run, as clusters share tcp ports. However, I can't seem to get the clusters to listen on the same port.
code:
var cluster = require('cluster');
var express = require('express');
if (cluster.isMaster) {
cluster.fork({path:'hello'});
cluster.fork({path:'goodbye'});
} else {
var web = express();
web.get("/"+process.env.path,function (req,res){
return res.end("hello, "+process.env.path);
});
web.listen(3000);
}
This is half working. I get no EADDRINUSE errors now, but only one of the paths is showing up.
It's not working for lots of reasons.
express.express dont exist. You are looking for plain express method
process.evn dont exist. You are looking for process.env
You are not returning anything to the client in your route. You should use res.end or another method to response the client. Check the http module documentation or the express one.
The workers can share tcp connections. The master can not.
Some code that works:
var cluster = require('cluster');
var express = require('express');
if (cluster.isMaster) {
cluster.fork({path:'hello'});
} else {
//this is in a required file
var web = express();
web.get("/"+process.env.path, function(req,res){
res.end("hello world!");
});
web.listen(3000);
}
If you want to use more than one worker, just fork more in the if(cluster.isMaster) condition.