Electron security (is my code is enough?) - security

I know about #reZach's answer about the electron security, and I know about his electron secured template.. I tried to use it, but it's just too much for today's me, I don't know anything about webpack, I know react not completely etc, the overall complexity of the template is too much for the current me. So I wanted to ask (maybe my code is good enough to be secured and safe?). I made a trading app, so I cannot give it to people not being sure that it is safe to use. I used just vanilla JS, express, localtunnel for receiving webhooks, and several modules. In short here's the code:
main.js
const { app, BrowserWindow, ipcMain } = require('electron');
const fs = require('fs');
const path = require('path');
const Store = require('electron-store');
const store = new Store();
const CryptoJS = require('crypto-js');
const axios = require('axios');
const WebSocket = require('ws');
const express = require('express');
const bodyParser = require('body-parser');
const localtunnel = require('localtunnel');
(async () => {
const tunnel = await localtunnel({ port: 3000, subdomain: 'mysubdomain' });
// the assigned public url for your tunnel
// i.e. https://abcdefgjhij.localtunnel.me
console.log(tunnel.url);
tunnel.on('close', () => {
// tunnels are closed
console.log('connection closed');
});
})();
const server = express();
const PORT = 3000;
let webhookMsg;
server.use(bodyParser.text());
server.listen(PORT, () => console.log(`Server running on port ${PORT}`));
server.get('/', function (req, res) {
res.send('Server is ready!');
});
server.post('/', (req, res) => {
webhookMsg = req.body;
res.sendStatus(200);
});
let win;
async function createWindow() {
// Create the browser window.
win = new BrowserWindow({
width: 500,
height: 550,
titleBarStyle: 'hidden',
webPreferences: {
nodeIntegration: false, // is default value after Electron v5
contextIsolation: true, // protect against prototype pollution
enableRemoteModule: false, // turn off remote
preload: path.join(app.getAppPath(), 'preload.js'), // use a preload script
backgroundThrottling: false,
},
});
// Load app
win.loadFile(path.join(__dirname, './index.html'));
win.setAlwaysOnTop(true, 'screen');
// win.removeMenu();
// rest of code..
}
app.on('ready', createWindow);
// NEXT IS MY APP MAIN LOGICS ON RECEIVING MESSAGES AND SENDING API REQUEST TO AN EXCHANGE PLATFORM AND GIVING BACK THE ANSWERS TO THE 'front-end script js file'
//for example
ipcMain.on('loadPrefs', () => {
const allPrefs = store.get('prefs');
win.webContents.send('prefs', allPrefs);
});
ipcMain.on('giveBalance', async e => {
const [apiKey, secret] = fs.readFileSync('api.txt').toString('UTF8').replace(/\r/g, '').split('\n');
const timestamp = Date.now().toString();
const params = {
api_key: apiKey,
timestamp: timestamp,
};
let orderedParams = '';
Object.keys(params)
.sort()
.forEach(function (key) {
orderedParams += key + '=' + params[key] + '&';
});
orderedParams = orderedParams.substring(0, orderedParams.length - 1);
var hmac = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, secret);
hmac.update(orderedParams);
const sign = hmac.finalize().toString(CryptoJS.enc.Hex);
try {
const res = await axios.get(`https://api.bybit.com/v2/private/wallet/balance?&api_key=${apiKey}&sign=${sign}&timestamp=${timestamp}`);
// console.log(res.data);
const responseObj = res.data.result.USDT.equity;
win.webContents.send('balance', { responseObj });
} catch (error) {
// console.log(error);
}
});
preload.js
const { contextBridge, ipcRenderer } = require('electron');
// Expose protected methods that allow the renderer process to use
// the ipcRenderer without exposing the entire object
contextBridge.exposeInMainWorld('api', {
send: (channel, data) => {
ipcRenderer.removeAllListeners(channel);
let validChannels = [
// myChannelsList
];
if (validChannels.includes(channel)) {
ipcRenderer.send(channel, data);
}
},
receive: (channel, func) => {
ipcRenderer.removeAllListeners(channel);
let validChannels = [
// myResponseChannelsList
];
if (validChannels.includes(channel)) {
// Deliberately strip event as it includes `sender`
ipcRenderer.on(channel, (event, ...args) => func(...args));
}
},
});
In short, I just copied this reZach's example from this answer and added my JS logics to it.
Could you please tell is it secured and safe to use, I just worry that somebody can get the user's api keys for example and take their money away:) Thank you

Related

Wait for an answer from Electron synchronously

I'm trying to make a desktop bybit trading app... and I can't figure it out how to make the code wait for the response from the main script..... I need to wait for a response with the needed info for example like wallet balance. Instead the code runs asynchronously and I the var I need it to render is undefined.... I found the article about the vulnerability of nodeIntegration: true from the author of electron security or smth. So I did everything like he said... but now I can't "pause" the code for the data to receive and render.... here's the code
main.js
const { app, BrowserWindow, ipcMain, Menu } = require('electron');
const path = require('path');
const CryptoJS = require('crypto-js');
const axios = require('axios');
let win;
async function createWindow() {
// Create the browser window.
win = new BrowserWindow({
width: 385,
height: 200,
titleBarStyle: 'hidden',
webPreferences: {
nodeIntegration: false, // is default value after Electron v5
contextIsolation: true, // protect against prototype pollution
enableRemoteModule: false, // turn off remote
preload: path.join(app.getAppPath(), 'preload.js'), // use a preload script
backgroundThrottling: false,
},
});
// Load app
win.loadFile(path.join(__dirname, './index.html'));
win.setAlwaysOnTop(true, 'screen');
// win.removeMenu();
// rest of code..
}
app.on('ready', createWindow);
ipcMain.on('giveBalance', async e => {
const apiKey = '';
const secret = '';
const timestamp = Date.now().toString();
const params = {
api_key: apiKey,
timestamp: timestamp,
};
let orderedParams = '';
Object.keys(params)
.sort()
.forEach(function (key) {
orderedParams += key + '=' + params[key] + '&';
});
orderedParams = orderedParams.substring(0, orderedParams.length - 1);
var hmac = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, secret);
hmac.update(orderedParams);
const sign = hmac.finalize().toString(CryptoJS.enc.Hex);
const res = await axios.get(`https://api.bybit.com/v2/private/wallet/balance?&api_key=${apiKey}&sign=${sign}&timestamp=${timestamp}`);
let responseObj = res.data.result.USDT.equity.toFixed(2);
console.log(res.data.result.USDT.equity.toFixed(2));
win.webContents.send('balance', { responseObj });
});
preload.js
const { contextBridge, ipcRenderer } = require('electron');
// Expose protected methods that allow the renderer process to use
// the ipcRenderer without exposing the entire object
contextBridge.exposeInMainWorld('api', {
send: (channel, data) => {
// whitelist channels
let validChannels = ['giveBalance', 'givePosition'];
if (validChannels.includes(channel)) {
ipcRenderer.send(channel, data);
}
},
receive: (channel, func) => {
let validChannels = ['balance', 'position'];
if (validChannels.includes(channel)) {
// Deliberately strip event as it includes `sender`
ipcRenderer.on(channel, (event, ...args) => func(...args));
}
},
});
script.js
const getBalance = () => {
window.api.send('giveBalance');
const res = window.api.receive('balance', data => {
console.log('hi');
return data.responseObj;
});
const myBalance = '$' + res;
document.querySelector('.myBalance').textContent = `My Balance: ${myBalance}`;
console.log('not in time');
};
getBalance();
I need to get the balance in the scipt.js, to stop the code untill the res var has received the data... but all of the code gets executed right away and only then it receives a message... thank you..
if I change the part in preload.js to this
changing .on to .sendSync
receive: (channel, func) => {
let validChannels = ['balance', 'position'];
if (validChannels.includes(channel)) {
// Deliberately strip event as it includes `sender`
ipcRenderer.sendSync(channel, (event, ...args) => func(...args));
}
},
then I get
Uncaught Error: An object could not be cloned.
I just needed to put the rest logic inside of the receive message scope...

I got the problem on the guildMemberAdd event on Discord.js v13.8.0

I got the problem on the guildMemberAdd event. When the event run, I got the error.
This is the console log
Here's my code (index.js):
const { Client, Intents, Collection } = require('discord.js');
const express = require('express');
const app = express();
const fs = require('fs');
const client = new Client({ intents: 32767 });
app.get('/', (req, res) => {
res.send('Hello Express app!');
});
app.listen(3000, () => {
console.log('server started');
});
//--------------------Discord Bot Code below -----------
client.commands = new Collection();
client.cooldowns = new Collection();
['eventsHandler', 'commandsHandler'].forEach(handler => {
require(`./Handlers/${handler}`)(client);
})
client.login(process.env['TOKEN']); //Login With Discord Token
And guildMemberAdd.js
module.exports = {
name: "guildMemberAdd",
execute(member) {
try {
console.log(member.guild.name); //<--- The problem's here
} catch (ex) {
console.error(ex);
};
};
};
Please help me. :((
Here's my event handler
const { readdirSync } = require('fs')
module.exports = (client) => {
const eventFolders = readdirSync(`./Events`)
for (const folder of eventFolders) {
const eventFiles = readdirSync(`./Events/${folder}`).filter(files => files.endsWith(".js"))
for (const file of eventFiles) {
const event = require(`../Events/${folder}/${file}`)
if (event.once) {
client.once(event.name, (...args) => event.execute(...args, client))
} else {
client.on(event.name, (...args) => event.execute(...args, client))
}
}
}
}
This is not how you fire the guildMemberAdd event.
it's a client parameter, so you need to do this
client.on('guildMemberAdd', async (member) => {
// Your code here
});
Read the docs

App Engine can't find default credentials to connect to Firestore in Google Cloud

I have a NextJS Typescript app running on Google App Engine. It fetches data from Firestore and everything works fine. In order to improve the speed of the app I'm experimenting new data fetching infrastructure in which the server listens to Firestore collections and updates all the data to JSON files in the tmp folder when changes are made in Firestore. This way all the data is up-to-date and available to the App Engine all the time. Locally this works like a charm.
There are some obvious things I need to improve, but the next step for me is to run a dev project in GCP and see if my memory usage is ok and if it works as quickly as I hope etc. But the problem is that when I change my NextJS infra to include a custom server, the connection between App Engine and Firestore vanishes.
The problem I'm seeing on GCP logs is:
Error: Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.
at GoogleAuth.getApplicationDefaultAsync (/workspace/node_modules/google-auth-library/build/src/auth/googleauth.js:180:19)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
at runNextTicks (node:internal/process/task_queues:65:3)
at listOnTimeout (node:internal/timers:526:9)
at processTimers (node:internal/timers:500:7)
at async GoogleAuth.getClient (/workspace/node_modules/google-auth-library/build/src/auth/googleauth.js:558:17)
at async GrpcClient._getCredentials (/workspace/node_modules/google-gax/build/src/grpc.js:145:24)
at async GrpcClient.createStub (/workspace/node_modules/google-gax/build/src/grpc.js:308:23)
The actual error message in the client is "502 Bad Gateway – nginx".
Earlier I had a basic NextJS app which has frontend pages and backend API routes. The routes connect to Firestore and serve that data to correct users etc. The main difference is that I've added a custom server that initiates the listeners:
import { Firestore } from '#google-cloud/firestore';
import express, { Request, Response } from 'express';
import next from 'next';
import fs from 'fs';
import os from 'os';
const dev = process.env.NODE_ENV !== 'production';
const app = next({ dev });
const handle = app.getRequestHandler();
const port = process.env.PORT || 3000;
let firestoreListeners: { [collectionId: string]: () => void } = {};
const unsubscribeAllListeners = () => {
for (const collectionId of Object.keys(firestoreListeners)) {
console.log('unsubscribing from', collectionId);
firestoreListeners[collectionId]();
}
firestoreListeners = {};
};
const skippedCollections = ['analytics', 'pageRevisions', 'newsItemRevisions'];
app.prepare().then(() => {
const server = express();
unsubscribeAllListeners();
const firestoreSettings = {} as FirebaseFirestore.Settings;
if (process.env.GCP_KEYFILE_NAME) {
firestoreSettings.keyFilename = process.env.GCP_KEYFILE_NAME;
}
const firestoreData: {
[collectionId: string]: {
[id: string]: any;
};
} = {};
const firestore = new Firestore(firestoreSettings);
firestore.listCollections().then((collections) => {
for (const collection of collections) {
if (
!firestoreListeners[collection.id] &&
!skippedCollections.includes(collection.id)
) {
console.log('listening to', collection.id);
firestoreData[collection.id] = {};
const listener = firestore
.collection(collection.id)
.onSnapshot((snapshot) => {
firestoreData[collection.id] = {};
for (const doc of snapshot.docs) {
firestoreData[collection.id][doc.id] = {
_id: doc.id,
...doc.data(),
};
}
if (!fs.existsSync(os.tmpdir() + '/data')) {
fs.mkdirSync(os.tmpdir() + '/data');
}
fs.writeFileSync(
os.tmpdir() + `/data/${collection.id}.json`,
JSON.stringify(firestoreData[collection.id])
);
console.log(
'updated',
collection.id,
'with',
snapshot.docs.length,
'docs'
);
});
firestoreListeners[collection.id] = listener;
}
}
});
server.all('*', (req: Request, res: Response) => {
return handle(req, res);
});
server.listen(port, (err?: any) => {
if (err) throw err;
console.log(
`> Ready on localhost:${port} - env ${process.env.NODE_ENV}`
);
});
server.on('close', function () {
unsubscribeAllListeners();
});
process.on('beforeExit', () => {
unsubscribeAllListeners();
});
});
The build and deploy scripts are ok, it works if I take the listener logic out of the equation and just deploy the custom server.
What's the problem? Is it some nginx problem or do I have something else off?
The problem apparently is that I cannot initiate my Firestore connection before listen or even at the listen callback. I have to do it a bit later (to give GAE a possibility to authenticate for Firestore?).
When I moved my listeners to listen to all endpoints, it worked. Below is a solution that helped with the problem. I don't feel it's that beautiful, but gets the job done.
import { Firestore } from '#google-cloud/firestore';
import express, { Request, Response } from 'express';
import next from 'next';
import fs from 'fs';
import os from 'os';
const dev = process.env.NODE_ENV !== 'production';
const app = next({ dev });
const handle = app.getRequestHandler();
const port = process.env.PORT || 3000;
let firestoreListeners: { [collectionId: string]: () => void } = {};
const unsubscribeAllListeners = () => {
for (const collectionId of Object.keys(firestoreListeners)) {
console.log('unsubscribing from', collectionId);
firestoreListeners[collectionId]();
}
firestoreListeners = {};
};
const skippedCollections = ['analytics', 'pageRevisions', 'newsItemRevisions'];
export const firestoreData: {
[collectionId: string]: {
[id: string]: any;
};
} = {};
let listenersInitiated = false;
const initiateListeners = () => {
if (listenersInitiated) {
return;
}
const firestoreSettings = {} as FirebaseFirestore.Settings;
if (process.env.GCP_KEYFILE_NAME) {
firestoreSettings.keyFilename = process.env.GCP_KEYFILE_NAME;
}
const firestore = new Firestore(firestoreSettings);
firestore.listCollections().then((collections) => {
for (const collection of collections) {
if (
!firestoreListeners[collection.id] &&
!skippedCollections.includes(collection.id)
) {
console.log('listening to', collection.id);
firestoreData[collection.id] = {};
const listener = firestore
.collection(collection.id)
.onSnapshot((snapshot) => {
firestoreData[collection.id] = {};
for (const doc of snapshot.docs) {
firestoreData[collection.id][doc.id] = {
_id: doc.id,
...doc.data(),
};
}
if (!fs.existsSync(os.tmpdir() + '/data')) {
fs.mkdirSync(os.tmpdir() + '/data');
}
fs.writeFileSync(
os.tmpdir() + `/data/${collection.id}.json`,
JSON.stringify(firestoreData[collection.id])
);
console.log(
'updated',
collection.id,
'with',
snapshot.docs.length,
'docs'
);
});
firestoreListeners[collection.id] = listener;
}
}
});
listenersInitiated = true;
};
app.prepare().then(() => {
const server = express();
unsubscribeAllListeners();
server.all('*', (req: Request, res: Response) => {
initiateListeners();
return handle(req, res);
});
server.listen(port, (err?: any) => {
if (err) throw err;
console.log(
`> Ready on localhost:${port} - env ${process.env.NODE_ENV}`
);
});
server.on('close', function () {
console.log('Closing');
unsubscribeAllListeners();
});
process.on('beforeExit', () => {
console.log('Closing');
unsubscribeAllListeners();
});
});
According to my initial tests this works very nicely in GAE. When setting the app.yaml settings correctly, it provides nice speed with low costs.
This does not really handle listeners failing if a server instance lives for a long time and also, it might initiate too many listeners, but the initial results of my tests are promising!

NodeJS WebRTC app using DataChannel isn't working in production server

I created a simple peer-to-peer app using NodeJS and WebRTC for something like a one-to-many livestreaming application.
So far it is working on my localhost but when I deployed the app on a production VM server on Google Cloud Platform, I can't create a DataChannel using peer.createDataChannel(). Or at least that is the issue that I see because it is not throwing any errors.
server.js
const port = process.env.PORT || 80;
const express = require('express');
const bodyParser = require('body-parser');
const webrtc = require('wrtc');
const app = express();
const status = {
offline: 'offline',
online: 'online',
streaming: 'streaming'
};
let hostStream;
let hostChannel;
let channelData = {
status: status.offline,
message: null
};
app.use(express.static('public'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.post('/broadcast', async ({ body }, res) => {
try {
let peer = new webrtc.RTCPeerConnection({
iceServers: [
{
urls: "stun:stun.stunprotocol.org"
}
]
});
peer.ontrack = (e) => handleTrackEvent(e, peer);
peer.ondatachannel = (e) => handleHostDataChannelEvent(e);
let desc = new webrtc.RTCSessionDescription(body.sdp);
await peer.setRemoteDescription(desc);
let answer = await peer.createAnswer();
await peer.setLocalDescription(answer);
let payload = {
sdp: peer.localDescription,
status: channelData.status
};
res.json(payload);
} catch (e) {
console.log(e);
}
});
function handleTrackEvent(e, peer) {
hostStream = e.streams[0];
}
function handleHostDataChannelEvent(e) {
let channel = e.channel;
channel.onopen = function(event) {
channelData.message = '[ SERVER ]: Peer-to-peer data channel has been created.';
channel.send(JSON.stringify(channelData));
channelData.message = null;
}
channel.onmessage = function(event) {
console.log(event.data);
}
hostChannel = channel;
}
app.listen(port, () => console.log('[ SERVER ]: Started'));
streamer.js
function createPeer() {
let peer = new RTCPeerConnection({
iceServers: [
{
urls: "stun:stun.stunprotocol.org"
}
]
});
let channel = peer.createDataChannel('host-server');
channel.onopen = function(event) {
channel.send('Host: Data Channel Opened');
}
channel.onmessage = function(event) {
let data = JSON.parse(event.data);
if('status' in data) {
$('body').removeClass().addClass(data.status);
}
if('message' in data && data.message != null) {
$.toast({
heading: 'Data Channel',
text: data.message,
showHideTransition: 'slide',
icon: 'info',
position: 'top-center',
stack: false
})
}
}
peer.onnegotiationneeded = () => handleNegotiationNeededEvent(peer);
return peer;
}
On my localhost, when the host (streamer.js) starts streaming media, the server outputs Host: Data Channel Opened on the console and on the host's browser, I see the toast with a message Server: Peer-to-peer data channel has been created.. However when I try the application on my production server the server doesn't log that on the console and the host's browser doesn't open a toast with the message saying the data channel has been created.
There are no errors on both the browser console nor the server console so I don't really know where the problem is.
I do not see the gathering of ice candidates in your code - so it is no surprise your peers cannot establish a connection with each other. Here is the working sample of what your code should look like.
streamer.js:
async function createPeer(configuration) {
const localCandidates = [];
// Step 1. Create new RTCPeerConnection
const peer = new RTCPeerConnection(configuration);
peer.onconnectionstatechange = (event) => {
console.log('Connection state:', peer.connectionState);
};
peer.onsignalingstatechange = (event) => {
console.log('Signaling state:', peer.signalingState);
};
peer.oniceconnectionstatechange = (event) => {
console.log('ICE connection state:', peer.iceConnectionState);
};
peer.onicegatheringstatechange = (event) => {
console.log('ICE gathering state:', peer.iceGatheringState);
};
// Step 5. Gathering local ICE candidates
peer.onicecandidate = async (event) => {
if (event.candidate) {
localCandidates.push(event.candidate);
return;
}
// Step 6. Send Offer and client candidates to server
const response = await fetch('/broadcast', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
offer: offer,
candidates: localCandidates,
}),
});
const {answer, candidates} = await response.json();
// Step 7. Set remote description with Answer from server
await peer.setRemoteDescription(answer);
// Step 8. Add ICE candidates from server
for (let candidate of candidates) {
await peer.addIceCandidate(candidate);
}
};
// Step 2. Create new Data channel
const dataChannel = peer.createDataChannel('host-server');
dataChannel.onopen = (event) => {
dataChannel.send('Hello from client!');
};
dataChannel.onclose = (event) => {
console.log('Data channel closed');
};
dataChannel.onmessage = (event) => {
console.log('Data channel message:', event.data);
};
// Step 3. Create Offer
const offer = await peer.createOffer();
// Step 4. Set local description with Offer from step 3
await peer.setLocalDescription(offer);
return peer;
}
const configuration = {
iceServers: [
{
urls: 'stun:global.stun.twilio.com:3478?transport=udp',
},
],
};
// Add turn server to `configuration.iceServers` if needed.
// See more at https://www.twilio.com/docs/stun-turn
createPeer(configuration);
server.js:
const express = require('express');
const bodyParser = require('body-parser');
const webrtc = require('wrtc');
const port = process.env.PORT || 80;
const configuration = {
iceServers: [
{
urls: 'stun:global.stun.twilio.com:3478?transport=udp',
},
],
};
// Add turn server to `configuration.iceServers` if needed.
const app = express();
app.use(express.static('public'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: true}));
app.post('/broadcast', async (req, res) => {
const {offer, candidates} = req.body;
const localCandidates = [];
let dataChannel;
// Step 1. Create new RTCPeerConnection
const peer = new webrtc.RTCPeerConnection(configuration);
peer.ondatachannel = (event) => {
dataChannel = event.channel;
dataChannel.onopen = (event) => {
dataChannel.send('Hello from server!');
};
dataChannel.onclose = (event) => {
console.log('Data channel closed');
};
dataChannel.onmessage = (event) => {
console.log('Data channel message:', event.data);
};
};
peer.onconnectionstatechange = (event) => {
console.log('Connection state:', peer.connectionState);
};
peer.onsignalingstatechange = (event) => {
console.log('Signaling state:', peer.signalingState);
};
peer.oniceconnectionstatechange = (event) => {
console.log('ICE connection state:', peer.iceConnectionState);
};
peer.onicegatheringstatechange = (event) => {
console.log('ICE gathering state:', peer.iceGatheringState);
};
peer.onicecandidate = (event) => {
// Step 6. Gathering local ICE candidates
if (event.candidate) {
localCandidates.push(event.candidate);
return;
}
// Step 7. Response with Answer and server candidates
let payload = {
answer: peer.localDescription,
candidates: localCandidates,
};
res.json(payload);
};
// Step 2. Set remote description with Offer from client
await peer.setRemoteDescription(offer);
// Step 3. Create Answer
let answer = await peer.createAnswer();
// Step 4. Set local description with Answer from step 3
await peer.setLocalDescription(answer);
// Step 5. Add ICE candidates from client
for (let candidate of candidates) {
await peer.addIceCandidate(candidate);
}
});
app.listen(port, () => console.log('Server started on port ' + port));
I found your stun server not fully functional, so I replaced it with another one from Twillio. Also, I added event handlers with which it is easy to track the state of the WebRTC session. You would do well to learn more about WebRTC connection flow, really.

NodeJS + WS access currently running WS server instance

I have implemented a simple REST API using NodeJS, ExpressJS and routing-controllers. I have also implemented a basic WebSocket server running alongside the REST API and using WS.
const app = express();
app.use(bodyParser.json({limit: "50mb"}));
app.use(bodyParser.urlencoded({limit: "50mb", extended: true}));
useExpressServer(app, {
controllers: [
UserController
]
});
const server = app.listen(21443, (err: Error) => {
console.log("listening on port 21443");
});
const wss = new WebSocket.Server({server});
wss.on("connection", (ws: WebSocket) => {
ws.on("message", (message: string) => {
console.log("received: %s", message);
ws.send(`Hello, you sent -> ${message}`);
});
ws.send("Hi there, I am a WebSocket server");
});
My question is how to I get access to the currently running WS instance so that I am able to send or broadcast from my controller methods. I have a number of POST methods that run long processes and so return a HTTP 200 to the client, I then would like to either send or broadcast to all connected WS clients.
What is the correct way to access the WebSocket.Server instance from within my controller classes?
You can create the websocket earlier and pass the instance around:
const notifier = new NotifierService();
notifier.connect(http.createServer(app));
app.get("/somethingHappened", () => {
notifier.broadcast("new notification!!");
});
app.use(routes(notifier))
Full code:
app.js
Pass the websocket to the other routes:
const express = require("express");
const http = require("http");
const NotifierService = require("../server/NotifierService.js");
const routes = require("./routes");
const app = express();
const server = http.createServer(app);
const notifier = new NotifierService();
notifier.connect(server);
app.get("/somethingHappened", () => {
notifier.broadcast("new notification!!");
});
// to demonstrate how the notifier instance can be
// passed around to different routes
app.use(routes(notifier));
server
.listen(4000)
.on("listening", () =>
console.log("info", `HTTP server listening on port 4000`)
);
NotifierService.js class that handles the websocket
const url = require("url");
const { Server } = require("ws");
class NotifierService {
constructor() {
this.connections = new Map();
}
connect(server) {
this.server = new Server({ noServer: true });
this.interval = setInterval(this.checkAll.bind(this), 10000);
this.server.on("close", this.close.bind(this));
this.server.on("connection", this.add.bind(this));
server.on("upgrade", (request, socket, head) => {
console.log("ws upgrade");
const id = url.parse(request.url, true).query.storeId;
if (id) {
this.server.handleUpgrade(request, socket, head, (ws) =>
this.server.emit("connection", id, ws)
);
} else {
socket.destroy();
}
});
}
add(id, socket) {
console.log("ws add");
socket.isAlive = true;
socket.on("pong", () => (socket.isAlive = true));
socket.on("close", this.remove.bind(this, id));
this.connections.set(id, socket);
}
send(id, message) {
console.log("ws sending message");
const connection = this.connections.get(id);
connection.send(JSON.stringify(message));
}
broadcast(message) {
console.log("ws broadcast");
this.connections.forEach((connection) =>
connection.send(JSON.stringify(message))
);
}
isAlive(id) {
return !!this.connections.get(id);
}
checkAll() {
this.connections.forEach((connection) => {
if (!connection.isAlive) {
return connection.terminate();
}
connection.isAlive = false;
connection.ping("");
});
}
remove(id) {
this.connections.delete(id);
}
close() {
clearInterval(this.interval);
}
}
module.exports = NotifierService;
routes.js
const express = require("express");
const router = express.Router();
module.exports = (webSocketNotifier) => {
router.post("/newPurchase/:id", (req, res, next) => {
webSocketNotifier.send(req.params.id, "purchase made");
res.status(200).send();
});
return router;
};
List of connected clients are stored inside wss object. You can receive and loop through them like this:
wss.clients.forEach((client) => {
if (client.userId === current_user_id && client.readyState === WebSocket.OPEN) {
// this is the socket of your current user
}
})
Now you need to somehow identify your client. You can do it by assigning some id to this client on connection:
wss.on('connection', async (ws, req) => {
// req.url is the url that user connected with
// use a query parameter on connection, or an authorization token by which you can identify the user
// so your connection url will look like
// http://example.com/socket?token=your_token
ws.userId = your_user_identifier
....
})
To broadcast use:
wss.clients.forEach((client) => {
if (client.readyState === WebSocket.OPEN) {
client.send(data);
}
});
If your controller and socket will be in different files (and I am sure they will), you will have to export the wss object in your socket file and import it in controller.

Resources