Unit Test cases for websocket server "ws" library using jest - node.js

I am trying to figure out how to write unit test cases for Websocket server which is using the ws library.
I did go through jest-websocket-mock but I think this is for browser based APIs and I want to test server using JEST.
Basic Code:
Server.js
import { createServer } from 'https';
import { WebSocketServer } from 'ws';
import { readFileSync } from 'fs';
const server = createServer({
cert: readFileSync(config.certs.sslCertPath),
key: readFileSync(config.certs.sslKeyPath),
});
const wss = new WebSocketServer({ noServer: true });
server.on('upgrade', (request, socket, head) => {
const origin = request && request.headers && request.headers.origin;
const corsRegex = <regex>;
if (origin.match(corsRegex) != null) {
wss.handleUpgrade(request, socket, head, (ws) => {
wss.emit('connection', ws, request);
});
} else {
socket.destroy();
}
});
wss.on('connection', (ws, req) => {
ws.on('message', (messageg) => {
try {
console.log(message);
} catch (error) {
console.log(error);
}
});
ws.on('close', () => {
console.log('close');
});
ws.on('error', (error) => {
console.log(error);
});
});
Can someone please help me with how can I test the original server?

you need to create some kind of dependency injection mechanism here
lets for example move all the socket initialization logic into a separate function
function initSocketEvents(wss) {
wss.on('connection', (ws, req) => {
ws.on('message', (messageg) => {
try {
console.log(message);
} catch (error) {
console.log(error);
}
});
ws.on('close', () => {
console.log('close');
});
ws.on('error', (error) => {
console.log(error);
});
});
return wss;
}
now at the server initilization just call the function from a different file
...
const {initSocketEvents} = require("./socket-handler")
const wss = new WebSocketServer({ noServer: true });
initSocketEvents(was);
...
everything stays the same except the fact its way much easier to test it now
at the test file
const {initSocketEvents} = require("./socket-handler")
const { assert } = require('console');
const { EventEmitter } = require('events');
class MyTestWebSocket extends EventEmitter { }
const mockWSS = new MyTestWebSocket()
initSocketEvents(mockWSS)
mockWSS.emit('connection', mockWSS)
assert(mockWSS.listenerCount('connection')===1)
assert(mockWSS.listenerCount('message')===1)
assert(mockWSS.listenerCount('close')===1)
assert(mockWSS.listenerCount('error')===1)
now it should be straightforward to separate each listener's logic and inject it outside the function and then assert the desired logic.

Related

How to setup routes for websocket event actions in express.js?

I'm trying to create a router for every websocket event action.
Let's user is on the chat room, I want the url be ws://localhost:3001/chat but for the other event, let's say vote should be smth like this ws://localhost:3001/vote.
I found this example: const ws = new ws.Server({server:httpServer, path:"/chat"}), but this is a generic one, and doesn't aplly to different event actions.
Here is my structure. Hopefully will make more sense
As you see there I have two event actions: chat and vote and my goal is to create a route for each action types.
Socket.ts
import { IncomingMessage } from 'http';
import internal from 'stream';
import { WebSocketServer } from 'ws';
const wss = new WebSocketServer({ noServer: true });
wss.on('connection', (ws, request) => {
...
ws.on('message', (data: string) => {
try {
switch (action) {
case 'chat':
wss.clients.forEach(each(client)=> {
if (client.readyState === WebSocket.OPEN) {
client.send(JSON.stringify(message));
}
});
break;
case 'vote':
wss.clients.forEach((player) => {
if (client.readyState === WebSocket.OPEN) {
player.client.send(JSON.stringify(voteMessage));
}
});
break;
default: {
throw new Error('Unknown message type.');
}
}
} catch (error) {
ws.send(`Error: ${error.message}`);
}
});
});
const socketUpgrade = (
request: IncomingMessage,
socket: internal.Duplex,
head: Buffer
) =>
wss.handleUpgrade(request, socket, head, (ws) =>
wss.emit('connection', ws, request)
);
export { socketUpgrade };
app.ts
import express from 'express';
import { socketUpgrade } from './Socket';
const app = express();
const port = 3001;
const server = app.listen(port, () => {
console.log(`listening on port ${port}`);
});
server.on('upgrade', socketUpgrade);
Any help will be appreciated

Jest websocket test is throwing an error "thrown: "Exceeded timeout of 5000 ms for a test"

I am using the ws library to create a websocket server
I am getting this error on running the test
src/server/server.ts
import { Server } from 'http';
import WebSocket from 'ws';
import { sessionHandler } from './sessionHandler';
export const websocketServer = new WebSocket.Server({
noServer: true,
clientTracking: false,
path: '/ws',
});
const WEBSOCKET_CHECK_IDLE_CONNECTION_FREQUENCY =
+process.env.WEBSOCKET_CHECK_IDLE_CONNECTION_FREQUENCY;
function checkIfConnectionIsAlive(ws: WebSocket) {
if (ws.isAlive === false) {
clearTimeout(ws.timer);
return ws.terminate();
}
ws.isAlive = false;
ws.ping();
ws.timer = setTimeout(
checkIfConnectionIsAlive,
WEBSOCKET_CHECK_IDLE_CONNECTION_FREQUENCY,
ws,
);
}
function heartbeat() {
this.isAlive = true;
}
export function upgradeToWebSocketConnection(server: Server) {
server.on('upgrade', (request, socket, head) => {
sessionHandler(request, {}, () => {
websocketServer.handleUpgrade(request, socket, head, (ws) => {
websocketServer.emit('connection', ws, request);
});
});
});
}
websocketServer.on('connection', (ws, request) => {
ws.isAlive = true;
ws.on('pong', heartbeat);
ws.send('hello');
checkIfConnectionIsAlive(ws);
});
websocketServer.on('close', () => {
console.debug('Shutting down websocket server');
});
I wrote a simple test to check if I get that hello message on connecting to the server
src/tests/websocket.test.ts
import http, { Server } from 'http';
import {
upgradeToWebSocketConnection,
websocketServer,
} from 'server/websocket';
import WebSocket from 'ws';
function startServer(port: number): Promise<Server> {
const server = http.createServer();
return new Promise((resolve) => {
server.listen(port, () => resolve(server));
});
}
function waitForSocketState(socket: WebSocket, state: number): Promise<void> {
return new Promise(function (resolve) {
setTimeout(function () {
if (socket.readyState === state) {
resolve();
} else {
waitForSocketState(socket, state).then(resolve);
}
});
});
}
let server: Server;
describe('Websocket server tests', () => {
beforeEach(async () => {
server = await startServer(8000);
upgradeToWebSocketConnection(server);
});
afterEach(() => {
websocketServer.emit('close');
server.close();
});
test('Server sends hello to the client on connect', async () => {
const client = new WebSocket(`ws://localhost:8000/ws`);
let message: WebSocket.Data;
client.on('message', (data) => {
message = data.toString();
client.close();
});
await waitForSocketState(client, client.OPEN);
await waitForSocketState(client, client.CLOSED);
expect(message).toBe('hello');
});
});
Can someone kindly tell me how to fix this error and make the test succeed? My guess is that it has a problem with the setTimeout section

How to stream json data from one nodejs server to another and process it at the receiver at runtime?

what I'm basically trying to achieve is to get all items of a mongodb collection on a nodejs server, stream these items (in json format) over REST to another nodejs server and pipe the incoming readstream to stream-json to persist the parsed objects afterwards in another mongodb.
(I need to use streams because my items can be deeply nested objects which would consume a lot of memory. Additionally I'm unable to access the first mongodb from the second server directly due to a strict network segmentation.)
Well, the code I got so far is actually already working for smaller amounts of data, but one collection has about 1.2 GB. Therefore the processing at the receiving side continues to fail.
Here's the code of the sending server:
export const streamData = async (res: Response) => {
try {
res.type('json');
const amountOfItems = await MyModel.count();
if (JSON.stringify(amountOfItems) !== '0'){
const cursor = MyModel.find().cursor();
let first = true;
cursor.on('error', (err) => {
logger.error(err);
});
cursor.on('data', (doc) => {
if (first) {
// open json array
res.write('[');
first = false;
} else {
// add the delimiter before every object that isn't the first
res.write(',');
}
// add json object
res.write(`${JSON.stringify(doc)}`);
});
cursor.on('end', () => {
// close json array
res.write(']');
res.end();
logger.info('REST-API-Call to fetchAllItems: Streamed all items to the receiver.');
});
} else {
res.write('[]');
res.end();
logger.info('REST-API-Call to fetchAllItems: Streamed an empty response to the receiver.');
}
} catch (err) {
logger.error(err);
return [];
}
};
And that's the receiving side:
import { MyModel } from '../models/my-model';
import axios from 'axios';
import { logger } from '../services/logger';
import StreamArray from 'stream-json';
import { streamArray } from 'stream-json/streamers/StreamArray';
import { pipeline } from 'stream';
const persistItems = async (items:Item[], ip: string) => {
try {
await MyModel.bulkWrite(items.map(item => {
return {
updateOne: {
filter: { 'itemId': item.itemId },
update: item,
upsert: true,
},
};
}));
logger.info(`${ip}: Successfully upserted items to mongoDB`);
} catch (err) {
logger.error(`${ip}: Upserting items to mongoDB failed due to the following error: ${err}`);
}
};
const getAndPersistDataStream = async (ip: string) => {
try {
const axiosStream = await axios(`http://${ip}:${process.env.PORT}/api/items`, { responseType: 'stream' });
const jsonStream = StreamArray.parser({ jsonStreaming : true });
let items : Item[] = [];
const stream = pipeline(axiosStream.data, jsonStream, streamArray(),
(error) => {
if ( error ){
logger.error(`Error: ${error}`);
} else {
logger.info('Pipeline successful');
}
},
);
stream.on('data', (i: any) => {
items.push(<Item> i.value);
// wait until the array contains 500 objects, than bulkWrite them to database
if (items.length === 500) {
persistItems(items, ip);
items = [];
}
});
stream.on('end', () => {
// bulkwrite the last items to the mongodb
persistItems(items, ip);
});
stream.on('error', (err: any) => {
logger.error(err);
});
await new Promise(fulfill => stream.on('finish', fulfill));
} catch (err) {
if (err) {
logger.error(err);
}
}
}
As I said, the problem occurs only on a bigger collection holding about 1.2 Gb of data.
The problem seems to occur a few seconds after the sending side server is closing the stream.
This is the error message I get at the receiving server:
ERROR: Premature close
err: {
"type": "NodeError",
"message": "Premature close",
"stack":
Error [ERR_STREAM_PREMATURE_CLOSE]: Premature close
at IncomingMessage.onclose (internal/streams/end-of-stream.js:75:15)
at IncomingMessage.emit (events.js:314:20)
at Socket.socketCloseListener (_http_client.js:384:11)
at Socket.emit (events.js:326:22)
at TCP.<anonymous> (net.js:676:12)
"code": "ERR_STREAM_PREMATURE_CLOSE"
}
Can I somehow prevent the read stream from closing too early?
The only workaround I can imagine right now is to save the stream locally to a file first, then create a new readstream from that file, process/persist the data and delete the file afterwards, although I would prefer not to do that. Additionally I'm not quite sure if that's going to work out or if the closing read stream issue will remain if I try to save a large dataset to a file.
Edit: Well, as I guessed, this approach results in the same error.
Is there a better approach I'm not aware of?
Thanks in advance!
Found a solution using a combination of:
Websockets with stream api and websocket-express to trigger the streaming over websockets via routes
Backend
app.ts
import router from './router/router';
import WebSocketExpress from 'websocket-express';
const app = new WebSocketExpress();
const port = `${process.env.APPLICATION_PORT}`;
app.use(router);
app.listen(port, () => {
console.log(`App listening on port ${port}!`);
});
router.ts
import { Router } from 'websocket-express';
import streamData from './streamData';
const router = new Router();
router.ws('/my/api/path', streamData);
export default router;
streamData.ts (did some refactoring to the above version)
import { MyModel } from '../models/my-model';
import { createWebSocketStream } from 'ws';
export const streamData = async (res: Response) => {
const ws = await res.accept();
try {
const duplex = createWebSocketStream(ws, { encoding: 'utf8' });
duplex.write('[');
let prevDoc: any = null;
// ignore _id since it's going to be upserted into another database
const cursor = MyModel.find({}, { _id: 0 } ).cursor();
cursor.on('data', (doc) => {
if (prevDoc) {
duplex.write(`${JSON.stringify(prevDoc)},`);
}
prevDoc = doc;
});
cursor.on('end', () => {
if (prevDoc) {
duplex.write(`${JSON.stringify(prevDoc)}`);
}
duplex.end(']');
});
cursor.on('error', (err) => {
ws.close();
});
duplex.on('error', (err) => {
ws.close();
cursor.close();
});
} catch (err) {
ws.close();
}
};
Client (or the receiving server)
import { MyModel } from '../models/my-model';
import StreamArray from 'stream-json';
import { streamArray } from 'stream-json/streamers/StreamArray';
import { pipeline } from 'stream';
import WebSocket, { createWebSocketStream } from 'ws';
export const getAndPersistDataStream = async (ip: string) => {
try {
const ws = new WebSocket(`ws://${ip}:${process.env.PORT}/my/api/path`);
try {
const duplex = createWebSocketStream(ws, { encoding: 'utf8' });
const jsonStream = StreamArray.parser({ jsonStreaming: true });
let items: Items[] = [];
const stream = pipeline(duplex, jsonStream, streamArray(), error => {
if (error) {
ws.close();
}
});
stream.on('data', (i: any) => {
items.push(<Items>i.value);
if (items.length === 500) {
persistItems(items, ip);
items = [];
}
});
stream.on('end', () => {
persistItems(items, ip);
ws.close();
});
stream.on('error', (err: any) => {
ws.close();
});
await new Promise(fulfill => stream.on('finish', fulfill));
} catch (err) {
ws.close();
}
} catch (err) {
}
(I removed a lot of (error)-logging stuff, because of that the catch block is empty...)

How to use node-redis client in node typescript

I have a node typescript project where I have created a TS file for the Redis connection which is below.
import { createClient } from 'redis';
import { promisify } from 'util';
import Logger from 'utils/logger';
const { REDIS_URL = 'redis://localhost:6379' } = process.env;
const options = {
legacyMode: true,
url: REDIS_URL,
}
const client = createClient(options);
// client.connect();
client.on('connect', () => {
Logger.info("Connected to Redis");
});
client.on('error', err => {
Logger.error('redis error: ' + err);
init();
});
client.on('ready', err => {
Logger.info("redis is ready");
});
client.on('end', err => {
Logger.info("redis connection is ended");
});
//reconnecting
client.on('reconnecting', err => {
Logger.info("redis connection is reconnecting");
});
const init = async () => {
await client.connect();
}
export { init,client };
then I am importing it and connected it to index.ts
import { init } from 'dataSource/redis';
(async () => {
await init();
})();
app.listen(PORT,() => {
// console.log(`server is running on PORT ${PORT}`)
Logger.info(`Server Started in port : ${PORT}!`);
})
then I am trying to use the client in my controller file.
import { client as redisClient } from 'datasource/redis';
redisClient.setEx("Key",Number(process.env.REDIS_EXPIRE_TIME),"VALUE");
but I am getting this error
Error: The client is closed
uncomment the "client.connect();" on line 13.
This should make it work

Acknowledge RabbitMQ message after socket IO event received from React browser

I have a Node server which consumes messages from a RabbitMQ queue and forwards them to a React frontend as a socket.io event. In the frontend, I have a button click which sends a socket.io event back to the Node server.
Currently, the Node server only logs the receipt of the socket.io event. In addition to logging, I would like to send a message ack to the RabbitMQ server upon receipt of the socket.io event.
The logging is working fine, but I've been struggling with the message acknowledgement part.
My node server looks like this:
server.js
const io = require('./socket');
const amqp = require('amqplib/callback_api');
const CONFIG = require('./config.json');
amqp.connect(`amqp://${CONFIG.host}`, (err, connection) => {
if (err) {
throw err;
}
connection.createChannel((err, channel) => {
if (err) {
throw err;
}
const queue = CONFIG.queueName;
channel.assertQueue(queue, {
durable: true
});
console.log(` [*] Waiting for messages in ${queue}.`);
channel.consume(queue, function(msg) {
console.log(' [x] Request received from RabbitMQ: %s', msg.content.toString());
io.client.emit('sendReview', msg.content.toString());
}, {
noAck: false
});
})
});
socket.js
const io = require('socket.io')();
const port = process.env.PORT || 8000;
module.exports = {
client : any = io.on('connection', (client) => {
console.log(' [*] New client connected with ID: ' + client.id);
client.on('reportReview', (msg) => {console.log(` [x] Response received from browser: ${msg}`)});
client.on('disconnect', () => console.log(` [*] User ${client.id} disconnected.`));
})
};
io.listen(port);
console.log(`Listening on port ${port}`);
My frontend looks like this:
App.js
import React, { Component } from "react";
import * as API from './api';
export default class App extends Component {
constructor(props, context) {
super(props, context);
this.state = {
data: ["Whoops - no reviews available"],
};
this.updateReview = this.updateReview.bind(this);
this.onMessageReceived = this.onMessageReceived.bind(this);
this.handleClick = this.handleClick.bind(this);
}
handleClick() {
API.reportClick(this.state.data[0]);
this.updateReview()
}
updateReview() {
const newArray = this.state.data.slice(1);
if (newArray.length === 0) {
this.setState({data: ["Whoops - no reviews available"]})
} else {
this.setState({data: newArray})
}
}
onMessageReceived(msg) {
console.log(`Request for review received: ${msg}`);
const updatedData = this.state.data.concat(msg);
this.setState({data: updatedData});
if (this.state.data[0] === "Whoops - no reviews available") {
this.updateReview()
}
}
componentDidMount() {
API.subscribe(this.onMessageReceived)
}
render() {
return (
<div className="App">
<p className="App-intro">
Click to confirm review #: {this.state.data[0]}
</p>
<button onClick={this.handleClick}>Click</button>
</div>
);
}
}
Api.js
import clientSocket from 'socket.io-client';
const socket = clientSocket('http://localhost:8000');
function subscribe(onMessageReceived) {
socket.on('sendReview', onMessageReceived);
}
function reportClick(msg) {
socket.emit('reportReview', msg);
}
export { reportClick, subscribe };
As far as I understand, in order to send a message ack I would have to call channel.ack(msg); somewhere on the Node server. However, I am not sure how to pass the channel object to the io module? I have also tried having the socket.io code in server.js so I would have access to the channel object but have not been able to get this to work, either - I have not been able to get the amqp connection and socket.io connection to work together other than using my current approach of having an io module.
Any help would be very much appreciated.
I ended up getting it to work by having the socket code in server.js like this:
const io = require('socket.io')();
function socketIOHandler(callback) {
io.on('connection', (socket) => {
socket.on('error', function(err) {
console.log(err.stack);
});
callback(socket);
});
}
var amqpConn = null;
// start amqp connection to rabbit mq
function start() {
amqp.connect(`amqp://${CONFIG.host}`, (err, connection) => {
if (err) {
throw err;
}
amqpConn = connection;
// start consume worker when connected
startWorker();
});
}
function startWorker() {
socketIOHandler((socket) => {
amqpConn.createChannel((error, channel) => {
... <---- all the bits as before
socket.on('msgSent', (msg) => {
channel.ack(msg);
});
})
});
io.listen(port);
}
start();

Resources