My Node.js application is running on localhost:8080
These are the server configuration files:
var express = require('express');
var http = require('http');
var app = express();
var WS = require('ws');
var config = require('./config/main');
var Client = require('./client');
// HTTP
var server = http.createServer(app);
app.use(express.static(__dirname + '/../client/build/'));
server.listen(config.httpPort, function() {
console.info('HTTP listening on *:' + config.httpPort);
});
// WebSocket
var ws = new WS.Server({server: server});
console.info('Websocket server created');
ws.on('connection', function(ws) {
var client = new Client(ws);
console.log('New conection:', client.id);
});
and
module.exports = {
/* HTTP PORT */
httpPort: process.env.PORT || 8080,
/* WebSocket PORT */
wsPort: process.env.PORT || 8081
};
So I am trying to run it using the Nginx's reverse proxy:
location /myapp {
proxy_pass http://localhost:8080/;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
Now, when I link to localhost/myapp the page appears normally, with all the static files loaded, but it seems like there is no WebSocket connection. PS: I am using Nginx V 1.11.7
Is there something wrong in the configuration or did I miss something? Thank you
Changing the URL that the client uses for the WS connection solved this ..
So in my client side I changed this line:
new WebSocket(location.origin.replace(/^http/, "ws"));
to this line:
new WebSocket("ws://localhost/myapp");
Now it's working fine!
Related
I have used socket.io for live updates inside nodejs application. As When I integrated it on localhost it is working fine. I am getting all the events.
But as soon as I am deploying it to production server, my socket is not connecting and not even calling any events. I have deployed node js application on ubuntu server. And I have used nginx for reverse proxy. We have also integrated ssl into our domain.
This is my app.js
const express = require('express');
const cors = require('cors');
const app = express();
app.use(cors());
app.use('/api', rootRouter);
//socket setup
var http = require('http').createServer(app);
var io = require('socket.io')(http);
io.on('connection', (socket) => {
console.log('we have new connection');
socket.on('initiateConnection', async data => {
const { userId } = data;
socket.userId = userId;
socket.join(userId);
io.sockets.in(userId).emit('InitiatedConnection', userId);
console.log('User connected with id - ', data.userId);
});
socket.on("disconnect", async (reason) => {
if (socket.userId) {
const userId = socket.userId;
socket.leave(socket.userId);
console.log("user disconnected with id - " + userId);
}
});
socket.on('newOrderCreated', async data => {
console.log('newOrderCreated called', data)
io.sockets.in(data.userId).emit('OrderReceived', data);
})
});
const port = process.env.PORT || 3000;
http.listen(port, function () {
console.log(`listening on *:${port}`);
});
module.exports = app;
And For testing purpose I was trying to call socket from one of router file. That is I have initialised one client from my orders controller. Code for same is as follows
/controllers/ordersController.js
const io = require('socket.io-client');
//const SOCKET_ENDPOINT = 'http://localhost:3000/' // endpoint for localhost
const SOCKET_ENDPOINT = 'https://example.com/' // endpoint for live server
const socket = io(`${SOCKET_ENDPOINT}`);
socket.on('OrderReceived', (data) => {
console.log('New Request order received', data);
})
module.exports = {
findNearbyOutlet: async(req, res) => {
console.log('socket', socket)
let userId = req?.loggedInUserId ? req?.loggedInUserId : 0;
//emit this event to initiate connection with server
socket.emit('initiateConnection', {
userId
});
//emit this event to broadcast message about new order
socket.emit('newOrderCreated', {
orderId: 1,
userId: userId,
orderDate: '2022-05-02',
userName: 'Some user',
address: 'Atos society, C/1102, Jaipur, India-411110'
totalAmount: 100
})
}
}
For your information We have integrated SSL with our domain. Following is the nginx configuration for our site
server {
listen 443 ssl;
ssl_certificate /var/www/ssl/example.com/bundle.crt;
ssl_certificate_key /var/www/ssl/example.com/HSSL-6256604eeb4e6.key;
server_name example.com www.example.com;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://localhost:3000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_cache_bypass $http_upgrade;
client_max_body_size 100M;
}
}
server {
listen 80;
server_name example.com www.example.com;
return 301 https://$server_name$request_uri;
}
So I have did this much for deployment purpose. Rest of everything is working fine. Only thing is socket.io events are not triggering. Event socket connection is not working. Everything working fine on localhost but not on live server.
I have tried lot of stuff but no clue why socket events are not working. Please help me in this socket setup on live server. Let me know what am I missing.
Thanks in advance.
I am trying to get an app that uses socket.io v.3.1.1 to work on production.
It works well on development using webpack devServer for the client on 3000 and nodemon for the server on 4000.
But when I put it on the production server the client complains with:
Cross-Origin Request Blocked: The Same Origin Policy disallows reading the remote resource at http://localhost:5003/socket.io/?EIO=4&transport=polling&t=NUmy2Us.
Server
import express from 'express'
import { createServer } from 'http'
import { Server } from 'socket.io'
const app = express()
const prod = process.env.NODE_ENV === 'production'
const port = process.env.PORT || prod ? 5003 : 4000
const httpServer = createServer(app)
const io = new Server(httpServer, {
cors: {
origin: '*',
methods: ['GET', 'POST']
}
})
const connections = []
io.on('connection', (socket) => {
connections.push(socket)
console.log(`Socket id ${socket.id} connected`)
socket.on('disconnect', () => {
connections.splice(connections.indexOf(socket), 1)
})
})
httpServer.listen(port, () => console.log(`App listening on port ${port}.`))
....
Client
...
import { io } from 'socket.io-client'
const port = process.env.NODE_ENV === 'development' ? '4000' : '5003'
const socket = io(`http://localhost:${port}`)
This set up does work on development but when I put it on production on port 5003, it throws the CORS.
On the nginx server blocks I got
location /thisapp/ {
auth_basic $authentication;
auth_basic_user_file /var/www/.htpasswd;
try_files $uri $uri/ =404;
}
# And other proxies for express routing
location /api/process {
proxy_pass http://localhost:5003/api/process;
}
location /api/process/download {
proxy_pass http://localhost:5003/api/process/download;
}
I know the app is listening on 5003 on the server.
Pm2 log App
App listening on port 5003.
When I look at the network on the web sockets tab
On Dev I get this:
And on Production this:
The production server runs on https with let's encrypt but this has never been an issue for other apps I have run, I wonder if socket.io needs me to do something about it thou.
I tried multiple combinations of different approaches but I always get this:
I just ran into this last week - though not with Socket.io - so hopefully I can help.
Before the answer, a link to point you towards some reading on what's going on: https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy#how_to_allow_cross-origin_access
If your NGINX has access to use more_set_headers then try adding this inside your location block:
more_set_headers 'Access-Control-Allow-Origin: *';
If that works, you can next try paring that back further to:
more_set_headers 'Access-Control-Allow-Origin: http://localhost:5003';
If you don't have access to more_set_headers you can use add_headers instead, but it's confusingly named. It doesn't only add headers; it will also remove any headers applied by blocks further up the hierarchy, like in your server block. more_set_headers will not remove those headers and is truly additive.
The syntax differs a bit. If you're forced to use add_headers try:
add_header Access-Control-Allow-Origin *;
Or more restrictive:
add_header Access-Control-Allow-Origin http://localhost:5003;
Finally, if you need to support multiple origins you can do it like this to have NGINX automatically return a header that is compatible with the origin making the request.
Outside your server block:
map $http_origin $allow_origin {
~^(http://localhost:5003|http://example.com)$ $http_origin;
}
Inside your location block:
add_header Access-Control-Allow-Origin $allow_origin;
I'm not 100% sure about the syntax for using map together with more_set_headers but this should get you 95% of the way there.
In the end after a lot of back and forth it turned out not to have anything to do with the headers.
I think my problem was twofold.
On Dev I am using webpack devServer for the front end on port 3000 and nodemon for the backend on 4000, so I was not using Nginx or
Pm2, and it worked just fine.
Therefore on production I did not have any block for socket.io and Pm2 was running in cluster mode with two instances, the moment I changed it to a single instance on Pm2 and added the Nginx location block for socket.io, it started to work with this:
Server
import express from 'express'
import { createServer } from 'http'
import { Server } from 'socket.io'
const app = express()
const prod = process.env.NODE_ENV === 'production'
const port = process.env.PORT || prod ? 5003 : 4000
const httpServer = createServer(app)
const io = new Server(httpServer)
// Or to make it also work on Dev
const io = new Server(httpSever, { cors: true })
const connections = []
io.on('connection', (socket) => {
connections.push(socket)
console.log(`Socket id ${socket.id} connected`)
socket.on('disconnect', () => {
connections.splice(connections.indexOf(socket), 1)
})
})
httpServer.listen(port, () => console.log(`App listening on port ${port}.`))
Nginx
location /socket.io/ {
proxy_pass http://localhost:5003/socket.io/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
}
Client
import { io } from 'socket.io-client'
const socket = io()
// Or to make it also work on Dev
const dev = process.env.NODE_ENV === 'development'
const socket = dev ? io('http:localhost:4000') ? io()
My website is running via HTTPS at a public hoster and connects to a node server that is running on a Raspberry PI.
There is also a piece of hardware (lets call it decoder) in the same network with the PI which sends a data stream via TCP. The purpose of the PI is to read that stream and send it via WebSocket to the browser. So the goal is to output that stream on my website.
Now I'm running in a mixes content problem and have no idea how to solve it.
What I have done so far is to install an nginx webserver on the PI and have installed a Letsencrypt certificate. Both are running fine (tested via normal https:// call in a webbrowser).
The Websocket connection without SSL works fine as well and I get the data, but with SSL wont work. I guess that the problem is, that the decoder is not able to handle SSL.
So how can I "send", "convert", "tunnel" or "proxy" the non-SSL data stream to a HTTPS server?
Update
#Jake Holzinger: you are absolutely right. The given information was not enough. Sorry! I try to clarify:
nginx is without any further modification. So it is the configuration which comes from the installation
the website (Angular) does a let connection = new WebSocket('wss://domain:port');
The node server looks like follows:
const net = require('net');
const fs = require('fs');
const https = require('https');
const date = require('date-and-time');
const config = require('./server-config.json');
const httpProxy = require('http-proxy');
// SSL SERVER
try {
const privateKey = fs.readFileSync('/etc/letsencrypt/live/' + config.DNSROUTER + '/privkey.pem', 'utf8');
const certificate = fs.readFileSync('/etc/letsencrypt/live/' + config.DNSROUTER + '/cert.pem', 'utf8');
const ca = fs.readFileSync('/etc/letsencrypt/live/' + config.DNSROUTER + '/chain.pem', 'utf8');
const options = {
key: privateKey,
cert: certificate,
ca: ca
};
let proxy = httpProxy.createServer({
target: 'ws://localhost:9030',
ssl: {
key: privateKey,
cert: certificate
}
}).listen(9031);
}
catch (e) {
console.log("LETSENCRYPT certificates not found! No SSL!");
console.log(e)
}
/**
* server
*/
let connections = {};
let WebSocketServer = require('ws').Server;
// start WS via HTTP
const wss1 = new WebSocketServer({port: 9030});
wss1.on('connection', function(ws) {
CLIENTS.push(ws);
console.log('connection via HTTP');
ws.on('close', function () {
console.log('close HTTP!');
CLIENTS = CLIENTS.filter(item => item != ws);
})
})
/**
* client
*/
let connect = function() {
console.log(now(), 'Starting server...');
const socket = net.createConnection({ port: config.PORT, host: config.HOST }, () => {
console.log('Connected to server!');
})
socket.on('connect', () => {
console.log(now(), 'Connected to server!');
socket.on('data', data => {
sendAll(data);
});
});
socket.on('error', data => {
console.log(now(), "Connnection refused:", data.errno,data.code,"(IP:", data.address + ":" + data.port + ")");
setTimeout(() => {
socket.removeAllListeners();
console.log(now(),"Reconnecting...");
connect();
}, 1000);
});
socket.on('end', () => {
console.log(now(), 'Disconnected from server');
console.log(now(), "Reconnecting...");
socket.removeAllListeners();
connect();
});
}
connect();
I hope that this will get a better impression. Thx for your help!
Now I solved this issue in a different way.
Instead of creating a proxy server node implementation, I've have created the reverse-proxy on nginx webserver level to proxy all HTTPS -> HTTP calls to the PI. The code below is working fine for me now.
sudo nano /etc/nginx/sites-available/default
and change the content like this:
server {
listen 9031 ssl;
ssl_certificate /etc/letsencrypt/live/DOMAIN_DNS/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/DOMAIN_DNS/privkey.pem;
location / {
proxy_pass http://127.0.0.1:9030;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
}
If I go to mysite.com/db/read/getall, I get a blank page which says:
"Cannot GET /db/read/getall"
If I kill the app.js file in PM2, heading to the same url (mysite.com/db/read/getall) gives me the error:
502 bad gateway - nginx ubuntu x.xx
This makes me think the node app is running fine but there's something wrong with the routes or setup. I was hoping this stack answer would fix it but I tried the suggestion and got the '502 bad gateway' error after adding the "app.use('/db', router);" statement to my app.
Any ideas?
NGINX default file setup:
server {
listen 80;
server_name xxx.xx.xx.xxx;
location /db {
proxy_pass http://localhost:3005;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
}
}
Node.js / Express app.js:
var express = require('express');
// For encoded URL - receiving a stringifyd JSON sent via POST URL
var bodyParser = require('body-parser');
var http = require('http');
var io = require('socket.io')(http);
var app = express();
var server = http.createServer(app).listen(3005, 'localhost');
io = io.listen(server);
var cors = require('cors');
var path = require("path");
var MongoClient = require('mongodb').MongoClient
, assert = require('assert');
var url = 'mongodb://localhost:27017/db1';
app.get('/read/getall/', function (req, res) {
MongoClient.connect(url, function (err, db) {
assert.equal(null, err);
var collection = db.collection('clientLogs');
collection.find({}).toArray(function (err, docs) {
assert.equal(err, null);
res.setHeader('Content-Type', 'application/json');
res.send(JSON.stringify(docs) /*, null, '\t'*/);
db.close();
});
})
});
I'm having a problem configuring nginx and node to support socket.io over SSL.
My nginx config:
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
server {
listen 80;
listen 443 ssl;
listen [::]:80;
listen [::]:443 ssl;
access_log /var/log/nginx/livetest.log;
server_name live-test.dev www.live-test.dev;
ssl_certificate /etc/nginx/ssl/nginx.crt;
ssl_certificate_key /etc/nginx/ssl/nginx.key;
ssl_session_cache shared:SSL:50m;
ssl_session_timeout 5m;
if ($ssl_protocol = "") {
rewrite ^ https://$host$request_uri? permanent;
}
location / {
proxy_pass https://live_test;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
}
live_test is upstream for node.js running on port 6020. When testing in chrome it does stop on polling with (failed) status. When using wscat:
wscat --connect wss://live-test.dev
I receive:
error: Error: self signed certificate
I'm wondering what may be wrong here? Here is my node.js app:
var express = require('express');
var cookie = require('cookie');
var app = express();
var http = require('http').Server(app);
var socketIo = require('socket.io');
var redis = require('redis');
var redisClient = client = redis.createClient();
io.on('connection', function(socket){
var cookies = cookie.parse(socket.handshake.headers.cookie);
console.log(cookies);
});
http.listen(6020, function(){
console.log('listening on 6020');
});
I have a feeling I'm missing something in my node.js app. I thought that since nginx handle SSL node.js does not have to anymore, but perhaps I'm mistaken.
And yes, I'm using self-signed certificate for SSL. Will node.js / socket.io work with self-signed cert?
#UPDATE
Following some reading I changed my node.js app:
var express = require('express');
var cookie = require('cookie');
var fs = require('fs');
var app = express();
var https = require('https').Server(app, {
key: fs.readFileSync('/etc/nginx/ssl/nginx.key'),
cert: fs.readFileSync('/etc/nginx/ssl/nginx.crt'),
});
var socketIo = require('socket.io');
var redis = require('redis');
var redisClient = client = redis.createClient();
var io = new socketIo(https);
io.on('connection', function(socket){
var cookies = cookie.parse(socket.handshake.headers.cookie);
console.log(cookies);
});
https.listen(6020, function(){
console.log('listening on 6020');
});
#UPDATE2
Following the comment by abcdn I did try wscat with -n flag, now getting error:
error: Error: unexpected server response (502)
while nginx error.log contains:
2017/03/07 13:44:10 [error] 10556#10556: *140 upstream prematurely closed connection while reading response header from upstream
#UPDATE 3
After further reading, I turned my app.js back to http.
var fs = require( 'fs' );
var app = require('express')();
var https = require('https');
var io = require('socket.io')(https);
var HTTPSOptions = {
cert: fs.readFileSync('path to ssl certificate file'),
key: fs.readFileSync('path to ssl key file'),
requestCert: false,
rejectUnauthorized: false,
};
HTTPSOptions.agent = new https.Agent(HTTPSOptions);
var httpsServer = https.createServer(HTTPSOptions, app);
io = io.listen(httpsServer, {
log: false
});
//io.sockets.on('connection', function (sock) {
// console.log("CONNECTED");
//});
var Redis = require('ioredis');
var redis = new Redis();
redis.psubscribe('*', function () {});
redis.on('pmessage', function (subscribed, channel, message) {
console.log("channel: " + channel);
console.log("message: " + message);
message = JSON.parse(message);
io.emit(channel + ':' + message.event, message.data);
});
httpsServer.listen(6001, function(){
console.log('Listening on Port 6001');
});
I spent 3 days to find a solution for this and this is my final version that works perfectly. I hope it helps anyone that got stuck like me :)