So I switched from child.fork to cluster.fork to spawn chatbots that I run, as clusters share tcp ports. However, I can't seem to get the clusters to listen on the same port.
code:
var cluster = require('cluster');
var express = require('express');
if (cluster.isMaster) {
cluster.fork({path:'hello'});
cluster.fork({path:'goodbye'});
} else {
var web = express();
web.get("/"+process.env.path,function (req,res){
return res.end("hello, "+process.env.path);
});
web.listen(3000);
}
This is half working. I get no EADDRINUSE errors now, but only one of the paths is showing up.
It's not working for lots of reasons.
express.express dont exist. You are looking for plain express method
process.evn dont exist. You are looking for process.env
You are not returning anything to the client in your route. You should use res.end or another method to response the client. Check the http module documentation or the express one.
The workers can share tcp connections. The master can not.
Some code that works:
var cluster = require('cluster');
var express = require('express');
if (cluster.isMaster) {
cluster.fork({path:'hello'});
} else {
//this is in a required file
var web = express();
web.get("/"+process.env.path, function(req,res){
res.end("hello world!");
});
web.listen(3000);
}
If you want to use more than one worker, just fork more in the if(cluster.isMaster) condition.
Related
I am trying to make a simple server with socket.io and express and connect to it through a website.
when i followed a tutorial on socketio with localhost, everything worked fine, but when i put the server on a vserver, and tried to connect to it, i got this error:
Failed to load resource: net::ERR_SSL_PROTOCOL_ERROR
as well as:
GET https://54.53.0.254:47185/socket.io/?EIO=4&transport=polling&t=O09jjrs net::ERR_SSL_PROTOCOL_ERROR
here is my server code:
const express = require('express');
const app = express();
const server = app.listen(47185);
const socket = require('socket.io');
const io = socket(server)
console.log('server running on port 47185');
io.sockets.on('connection', newConnection);
function newConnection(socket) {
console.log('new connection: ' + socket.id);
socket.on('input', inputLog)
function inputLog(data) {
socket.broadcast.emit('input', data);
console.log(data);
}
}
and here is my client code (this is all that relates to socket.io, the rest is just for the website)
var options = {
rejectUnauthorized:false
}
var socket;
socket = io.connect('89.58.0.199:47185', options);
socket.on('input', foreignInput)
function foreignInput(data) {
terminal_animate('\n' + data)
}
i have tried many different fixes and googled everything i can think of, and i'm just not sure what the problem is.
can anyone help me out with this issue? thanks in advance.
In the documentation, according to the Client Initialization part, in node.js you should provide the protocol when connecting to the server.
// the following forms are similar
const socket = io("https://server-domain.com");
const socket = io("wss://server-domain.com");
const socket = io("server-domain.com"); // only in the browser when the page is served over https (will not work in Node.js)
The first two example shows the secure https/wss as protocol, for that you need to serve the required files from the server, example in the documentation.
With http/ws as protocol it should work, but the communication will not be secure.
The Server Initialization / With Express shows an example to call .listen on the return value of createServer from the http module, with the app given as a parameter.
const express = require("express");
const { createServer } = require("http");
const { Server } = require("socket.io");
const app = express();
const httpServer = createServer(app);
const io = new Server(httpServer, { /* options */ });
io.on("connection", (socket) => {
// ...
});
httpServer.listen(3000);
With a caution that says:
Using app.listen(3000) will not work here, as it creates a new HTTP server.
Currently I am introducing worker_threads to a game I am developing. Thea idea is to have each game room in a separate thread, so that they can be processed in parallel (yeah I know that will only really happen if there isn't more threads than cores).
However, I cannot get socket.io to work with worker_threads. So far I have tried the following:
1. Create the socket.io server within the server:
server.js
const { Worker, isMainThread } = require("worker_threads");
const express = require("express");
const app = express();
app.use(express.static(__dirname));
app.post("/newGame", (request, response) => {
let roomCode = request.query.room_code;
const worker = new Worker("./server-worker.js", {
workerData: {
roomCode: roomCode
},
});
});
app.listen(8080, () => {
console.info("Server started on http://localhost:8080");
});
server-worker.js
const { parentPort, workerData, threadId } = require("worker_threads");
const server = require("http").createServer();
const io = require("socket.io")(server);
server.listen(8080, () => {
console.info("Server socket.io on port 8080");
});
io.on("connection", (socket) => {
console.log(`Client with ID ${socket.id} connected to thread with ID ${threadID}`);
});
This results in the browser logging this:
GET http://localhost:8080/socket.io/?EIO=3&transport=polling&t= 404 (Not Found)
Also even if I got Express to forward the polling request to the worker I am guessing that I would not be able to open more than one room since the port would already be in use.
2. Create socket.io instance in main thread and pass it on
server.js
const { Worker, isMainThread } = require("worker_threads");
const express = require("express");
const app = express();
app.use(express.static(__dirname));
const server = require("http").createServer(app);
const io = require("socket.io")(server);
app.post("/newGame", (request, response) => {
let roomCode = request.query.room_code;
const worker = new Worker("./server-worker.js", {
workerData: {
roomCode: roomCode,
io: io
},
});
});
However, this does not work since I get a DataCloneError because worker_threads can only send native objects to workers.
Questions:
Is there an easier way to accomplish what I am doing? I.e.: Am I just using the wrong framworks?
Is there a way to get socket.io working with threads? I already saw a documentation on using it with node.js clusters using a Redis instance but that seems overkill for the problem at hand since I do not have multiple processes/ nodes.
Node's worker threads are not suitable for your purpose. They're for doing long-running computations, not handling network connections.
Also, it probably does not make sense to assign your rooms to particular server resources. Node is good at concurrently handling multiple app contexts and so is socket.io.
To scale up your app to use more processor cores, node offers clustering. To use a cluster you'll need to sort out session stickiness.
I have this code(Copied from github) which create four(no of cpu cores) child instance of master.I have used sticky session for my socket connection to connect properly to backend and redis adapter message broker.But I cant understand how it is working.
Can someone please explain line line by what is happening
http = require('http'),
express = require('express'),
socketIO = require('socket.io'),
cluster = require('cluster'),
port = process.env.PORT || 3000;
socket_redis=require('socket.io-redis')
var app = express(), io;
server = http.Server(app);
app.set('view engine','ejs');
app.get('/', function (req, res) {
console.log('send message by worker: ' + cluster.worker.id);
res.render('abhi');
});
io = socketIO(server);
io.adapter(socket_redis({host:'localhost',port:'6379'}))
io.on("connection",s=>{
s.on("new",data=>{
console.log("hey you")
})
})
// Add your socket.IO connection logic here
if(!sticky.listen(server,port))
{
server.once('listening', function() {
console.log('Server started on port '+port);
});
if (cluster.isMaster) {
console.log('Master server started on port '+port);
}
}
else {
console.log('- Child server started on port '+port+' case worker id='+cluster.worker.id);
}
It's hard to explain sticky session based on your code you provided. But, in a short explanation; I believe socket.io keeps the connected sockets as a thread/object in it's thread. So when you send a packet/request to a socket(.io) server that packet/request has to reach to the process/server that you made your handshake. Or else it'll most likely fail.
If you are looking for a sticky session and clustering I'd advise you to check out socket.io-redis, it makes things quite easy and smooth.
And to run your nodejs process many times to utilize your entire cpu, you can either use Docker and summon containers for each thread/cpu depending on your setup(which I did for a while ago and they seemed quite good) or you can use nodejs' cluster library.
I have two Express servers running on different ports. The first server (web server) provides a website and the second server (data server) emits data every 100 ms using socket.io.
When I open the website in the browser the data appears as expected. But when I then open the website in a second browser, all the emitted data in the first browser slows remarkably down until the second browser is done loading the site.
I should probably mention that the application is running on a Raspberry Pi. The website loading time is not so critical, but the data emission is.
Is there some way to run the node.js servers so they won't be affected by the load on each other?
Right now the web server is in the main app.js file that is run by node and the data server is a module required by the web server. Something like this:
app.js:
var express = require('express');
var data = require('data-server.js');
var app = express();
// Web server setup and handling (mostly Express defaults)
app.listen(3000);
module.exports = app;
data-server.js:
var app = require('express')();
var server = require('http').Server(app).listen(3001)
var io = require('socket.io')(server);
// Socket.io code that emits data every 100 ms
module.exports = io;
Update
Turns out it was my database query that was holding on to node so it couldn't emit because the emission function was waiting for the database to be free.
You should use the same port and the same node process for both your regular HTTP traffic and your websocket traffic. It would minimise the performance impact on your raspberry pi. Example :
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
app.get('/', function(req, res){
res.sendfile('index.html');
});
io.on('connection', function(socket){
console.log('a user connected');
});
http.listen(3000, function(){
console.log('listening on *:3000');
});
Method 1: Infinite-Servers.js : Start 1000 servers in one run
const express = require('express')
var app=new Array(10)
for (let i = 3000; i <= 4000; i++) {
app[i]=express()
app[i].get('/',(req,res)=>res.send({'port': i}))
app[i].listen( i,()=>console.log('port', i,"started"))
}
Method 2: MultiServers.js : Start 2 servers:
const express = require('express')
server=express()
server2=express()
server.get('/',(req,res)=>res.send({"server":"3001","name":"aakash4dev","data":"some data 1"}))
server2.get('/',(req,res)=>res.send({"server":"3002","name":"aakash4dev","data":"some data 2"}))
server2.get('/somethig',(req,res)=>res.send({"server":"3002","name":"aakash4dev","data":"some data 2"}))
server.listen(3001)
server2.listen(3002)
Method 3: SepetareInfiniteServers.js
If you want to keep all files separately, e.g. port1.js port2.js port3.js .... port1000.js, all in different files and start all servers on a single run, then use module.exports (official docs: https://nodejs.org/api/modules.html#moduleexports ) along with the first InfiniteServers.js codes above. codes will look like this:
var portsData=new Array(3)
const express = require('express')
const cors=require('cors')
var app=new Array(3)
for (let i = 1; i <= 3; i++) {
portsData[i]=require('./port'+i)
app[i]=express()
app[i].use(cors())
app[i].get('/',(req,res)=>{
let req_requestedData={"name":"aakash4dev"}
res.send(require('./port1.js').fun1({req_requestedData,"our-Response":"Im server"}))
})
app[i].listen( i,()=>console.log('port', i,"started"))
}
note: import cors module and use app[i].use(cors()) in array or app.use(cors()) in normal servers to remove error cors policy no 'access-control-allow-origin'
I try to use JMeter running benchmark on pure node.js and express server.
And the result is :
node.js = 600 RPS, express = 1200 RPS
while running benchmark, node.js server always use 1 cpu, but express server uses all.
Does this means that express server uses cluster by default ?
UPDATE : benchmark code
node.js
var http = require('http');
var Server = http.createServer(function(req, res){
res.write('I heard you !');
res.end();
}).listen(8000);
Express (3.8)
var express = require('express');
var app = express();
// ----------- middleware -------------
app.configure(function(){
app.use(express.bodyParser());
app.use(express.cookieParser());
app.use('/public',express.static(__dirname+'/files'));
});
// ----------- route-------------
app.get('/', function(req, res){
res.send('I heard you!')
});
app.listen(8000);
Express.js does not use cluster.
The reason you see more cores than a single being busy is very likely due to the node.js with express more efficiently offloading operations to other workers through async code.
You can use cluster with an express, but as long as you are not having scaling issues that is just unnecessary hassle.
Below is an example code (source):
// Include the cluster module
var cluster = require('cluster');
// Code to run if we're in the master process
if (cluster.isMaster) {
// Count the machine's CPUs
var cpuCount = require('os').cpus().length;
// Create a worker for each CPU
for (var i = 0; i < cpuCount; i += 1) {
cluster.fork();
}
// Code to run if we're in a worker process
} else {
// Include Express
var express = require('express');
// Create a new Express application
var app = express();
// Add a basic route – index page
app.get('/', function (req, res) {
res.send('Hello World!');
});
// Bind to a port
app.listen(3000);
console.log('Application running!');
}