I've setup a simple server-sent-event for testing which sends data periodically once client gets connected. When client reloads the page or move to another page, browser closes the connection and stops receiving the events.
app.get("/stream", (req, res) =>{
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
});
const data = JSON.parse(fs.readFileSync("./data/group_list.json"))
let i = 0;
const intervalId = setInterval(function() {
++i;
console.log('iteration', i);
if ( i === 5 ) {
clearInterval(intervalId);
res.end();
return 1
}
res.write('data:' + JSON.stringify(data) + '\n\n');
res.flush();
}, 3000);
})
Here's my event handler in React.
handleSSE = () => {
console.log('here');
const url = "/test/stream"
//url can be your server url
if ('EventSource' in window) {
let source = new EventSource(url)
source.onopen = (e) => console.log('cnx successful', e);
source.onmessage = (e) => console.log('data-', JSON.parse(e.data));
source.onerror = (e) => {
console.log('cnx failed', e);
source.close();
};
}
}
The SSE only stops emitting the data once "i" reaches a certain number, I'd like the SSE to detect & stop emitting data when client closes the connection. what I specifically need is a way for setInterval() in my server to stop when the client closes the connection.
I think you need to add windows unload event inside handleSSE function for check the connection.
For that you need to add Event Listener of beforeunload inside handleSSE function.
So your final handleSSE function will be:
handleSSE = () => {
console.log('here');
const url = "/test/stream"
//url can be your server url
if ('EventSource' in window) {
let source = new EventSource(url)
source.onopen = (e) => console.log('cnx successful', e);
source.onmessage = (e) => console.log('data-', JSON.parse(e.data));
source.onerror = (e) => {
console.log('cnx failed', e);
source.close();
};
window.addEventListener("beforeunload", () => {
if (source.readyState !== EventSource.CLOSED) {
source.close();
}
});
}
}
Related
I'm trying to use the following example of Server Sent Events. Seems like the server is emitting the data, but the event is not firing and no data is recorded in the event stream (through the developer tools).
Angular code (service):
getAlertsEvent(fieldIds: Guid[]): Observable<responseModel.LoanAlert> {
return new Observable(obs => {
fieldIds.forEach(fieldId => {
const source = new EventSource(`http://localhost:3000/loan/alerts/${fieldId}`);
alert('Succesfully creates the EventSource, I see reuslt 200 in Networking tab but with 0 events');
source.onmessage = (ev): void => {
this.zone.run(() => {
alert('This alert will not happen');
obs.next(ev.data);
});
};
source.onerror = (err): void => this.zone.run(() => obs.error(err));
// Also tried this way with no luck:
// source.addEventListener('message', (event) => {
// obs.next(event.data);
// });
});
});
}
Component:
this.loansService.getAlertsEvent(this.fields.map(field => field.fieldId)).subscribe(alert => {
console.log(alert);
});
Node.js code:
const express = require('express');
const parser = require('body-parser');
const app = express();
const EventEmitter = require('events');
const Stream = new EventEmitter();
app.unsubscribe(parser.json());
app.use(
parser.urlencoded({
extended: true,
})
);
app.get('/loan/alerts/:fieldId', function(req, res) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Access-Control-Allow-Origin': "*",
Conection: 'keep-alive'
});
Stream.on(req.params.fieldId, function(event, data) {
res.write(JSON.stringify(data));
});
});
setInterval(function() {
const item = {
formId: 51415,
alertId: 'asdfasdfasdf',
messageStatus: 'NEW',
messageType: 'ACTION_MESSAGE_FROM_SERVER',
actionType: 'NAVIGATION',
message: 'New Message!',
archiverId: '12345',
documentCode: 3,
from: 'Internal Server Message',
messageTimestamp: new Date().toJSON(),
markedAsRead: false,
};
Stream.emit('aed29580-09fd-e411-b8e1-e61f13cf5d4b', 'message', item);
}, 5000);
app.listen(3000);
console.log('Express E2e Mock server is running');
When manually going to http://localhost:3000/loan/alerts/aed29580-09fd-e411-b8e1-e61f13cf5d4b I'm seeing the messages printed to the screen, so I guess that this is either an Angular or a missing security header.
Thanks!
I just realized, thanks to this answer, that events must be formatted in a specific way. I changed the value of res.write accordingly:
Stream.on(req.params.fieldId, function(event, data) {
res.write('event: ' + String(event) + '\n' + 'data: ' + JSON.stringify(data) + '\n\n');
});
When I POST data to my app, I write it to a file and share it with the other instances via PUT. I want to return from POST with the status of each stream (the file and the PUTs).
putResults is an array that's part of the enclosing class, meant to hold the results of each request.
How do I collect the responses? I could return an array of Promises from createWriteStreams but then how could I req.pipe to them? Can you stream to a Promise?
post(req, res, next) {
let listeners = this.getWriteStreams();
let c = listeners.length;
for (i = 0; i < c; i++) {
req.pipe(listeners[i]);
}
/* What do I put here to return after all requests finish? */
}
put(req, res, next) {
var fstream = fs.createWriteStream(this.path);
req.pipe(fstream);
req.on('end', () => {
fstream.close();
res.status(201).send("OK");
});
req.on('error', (err) => {
res.status(500).send(err);
});
}
createWriteStreams() {
let listeners = [];
// We always want to save to a file
listeners.push(fs.createWriteStream(this.path).on('close', ()=>{
this.putResults.push({ host: hutil.myHost, status: 201 });
}));
// If there are other servers in current env, send to them, too!
let otherGuys = hostutil.otherServers();
if (otherGuys.length > 0) {
for (i = 0; i < otherGuys.length; i++) {
let opts = {
hostname: hutil.fq(otherGuys[i]),
port: this.port,
path: this.endpoint,
method: 'PUT',
};
let req = https.request(opts, res => {
this.putResults.push({ host: opts.hostname, status: res.statusCode});
});
req.on('error', (e) => {
this.putResults.push({ host: opts.hostname, status: e });
});
listeners.push(req);
}
}
return listeners;
}
Well, in case anyone ever has this question, the key point of knowledge is that an input stream can be passed around as if it has multiple custom spigots on it - opening one doesn't seem to prematurely spray data all over the other places you're placing the hose!
So, since you cannot stream to a Promise, you still stream to the streams, and you can apparently take your time setting them up. Here's my solution: pass the request to the streams wrapped in the promises.
function post(req, res, next) {
let promises = this.streamPromises(req);
Promise.allSettled(promises).then((results) => {
// Remove the Promise container junk - results come in 2 completely different object structures. Great design, jeez. :-\
let noContainer = results.map(item => item.value).filter(i => i != undefined);
noContainer.push(...results.map(item => item.reason).filter(i => i != undefined));
res.status(200).json(noContainer);
}).catch(err => {
log.warn(`POST request for ${this.filename} failed, at least in part: ${err}`)
res.status(200).json({ host: hutil.myHost, status: err });
});
}
function put(req, res, next) {
var fstream = fs.createWriteStream(this.fqFile);
req.pipe(fstream);
req.on('end', () => {
fstream.close();
log.info(`${req.transID} Saved data to ${this.fqFile} sent by ${req.referer}`);
res.status(201).send("OK");
});
req.on('error', (err) => {
log.warn(`${req.transID} Error receiving/saving PUT file ${this.fqFile} sent by ${req.referer}`);
res.status(500).send(err);
});
}
function streamPromises(postReq) {
let listeners = [];
listeners.push(this.streamLocalFrom(postReq)); // add file first
// Send to other servers in the environment
let otherGuys = hosts.peerServers();
if (otherGuys.length > 0) {
for (i = 0; i < otherGuys.length; i++) {
let opts = {
hostname: hosts.fq(otherGuys[i]),
thatHost: otherGuys[i], // ducked this into the object to avoid parsing fq hostname
port: appconfig.port, // we are all listening here
path: this.endpoint,
method: 'PUT',
timeout: 1000,
ca: fs.readFileSync(appconfig.authorityFile)
};
let p = new Promise((resolve, reject) => {
let req = https.request(opts, res => {
log.info(`${this.filename}: Response from ${opts.hostname}:${opts.port}: ${res.statusCode}`);
// let hn = opts.hostname.match(/(.*?)\./)[1] || opts.hostname;
resolve({ host: opts.thatHost, status: res.statusCode });
});
req.on('error', (e) => {
log.warn(`Error piping ${this.filename} to ${opts.hostname}:${opts.port}: ${e}`);
reject({ host: opts.thatHost, status: e });
});
postReq.pipe(req);
});
listeners.push(p);
}
}
return listeners;
}
function streamLocalFrom(postReq) {
return new Promise((resolve, reject) => {
let fileError = false;
let fstream = fs.createWriteStream(this.fqFile);
fstream.on('close', (err) => {
if (!fileError) {
log.info(`Saved data to file at ${this.fqFile}`);
resolve({ host: me, status: 201 });
}
});
fstream.on('error', (err) => {
log.warn(`Could not save ${this.fqFile} because ${err}`);
reject({ host: hutil.myHost, status: 500 });
fileError = true;
fstream.close();
});
postReq.pipe(fstream);
});
}
I'm programming a method that uses web3js to transfer a token using a smart contract.
When you launch the transfer event, you get as a result the txHash, and if you want to get all the other values associated to the transfer, you have to subscribe to an event and wait for it to happen to get the values.
I have to return the values to the customer, so I subscribe to the transfer event and wait for it to broadcast to return the data.
The problem is that this might take a long time (think from 10 seconds to hours) and it gives me a timeout sometimes, so the frontend team suggested to inform me a webhook endpoint and I forward the event information to it when it happens.
So I have to split the process into two:
Do the transfer and inform the txHash, and start a separate process (2) that listens for the event.
Subscribe to the event and, when it happens, forward it to the webhook provided.
My code right now looks something like this:
function transfer(req, res, next) {
try {
contractRouter.transfer(from, to, tokenId).then(function(result){
transferToWebhook(whHostname, whPath, result);
next();
}).fail(function(err){
return res.status(500).json({status: 'error', name: err.name, message: err.message});
}
catch (ex) {
return res.status(500).json({status: 'error', name: ex.name, message: ex.message});
}
}
and the function that transfers to webhook looks like this:
function transferToWebhook(whHostname, whPath, txHash){
contractRouter.getTransferEvent(txHash).then(function(result){
var postData = JSON.stringify(result);
var options = {
hostname: whHostname,
port: 80,
path: whPath,
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
}
var req = https.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.log(`problem with request: ${e.message}`);
});
req.write(postData);
req.end();
});
}
The function that subscribes to the transfer event looks like this:
function getTransferEvent(txHash){
var deferred = q.defer();
ethereumHandler.setContract(config.get(cABIAddress), cAddress).then(function(abiContract){
Promise.resolve(txHash).then(function resolver(txHash){
abiContract.getPastEvents('Transfer',{filter: {transactionHash: txHash}}, function(error, events){})
.then(function(event){
var returnValues = {
from: event.returnValues.from,
to: event.returnValues.to,
tokenId: event.returnValues.tokenId
}
deferred.resolve(returnValues);
});
});
return deferred.promise;
});
}
The code for the last function works if I put it straight on the transfer function, but it's not called if I try to call it through the transferToWebhook function.
How can I launch the transferToWebhook function after having answered the first request?
You can spawn your process, using the spawn() method from the child_process module, then listen on the event (process.on('data')) and use a promsise to consume the returned datas. I am not sure it's gonna solve your case as your function is an object of contractRouter contractRouter.getTransferEvent(txHash), but you should be able to adapt it in some way. See an example of what i mean.
in your file.js
const { spawn } = require('child_process')
function transfertToWebHook(data) {
getTransfertEvent(data)
.then((result) => {
const dts = JSON.parse(result)
console.log('the res: ', dts)
// send the res back
})
.catch(e => console.log('handle the err: ', e))
console.log('carry on mother process')
}
function getTransfertEvent(data) {
return new Promise((resolve, reject) => {
const sp = spawn(process.execPath, ['childProcess.js'])
// pass the arguments, here it will be the txHash
sp.stdin.write(data)
sp.stdin.end()
sp.stdout.on('data', (d) => {
// when datas get proceed you get it back.
resolve(d.toString())
})
sp.stdout.on('error', (e) => {
reject(e)
})
console.log('run what ever need to be proceed on the mother process')
})
}
transfertToWebHook('test')
create an other file name childProcess.js.
Use the Tranform stream to process the process.sdtin datas then return them through the process.stdout
const { Transform, pipeline } = require('stream')
const createT = () => {
return new Transform({
transform(chunk, enc, next) {
// here run the code of you getTransfertEventFunction()
// simulate some async process
setTimeout(() => {
// chunk is your passed arguments
// !! chunk is a buffer so encode it as utf8 using 'toString()'
// make it upperCase to simulate some changement
const dt = chunk.toString().toUpperCase()
// return an object as it's what your func return
const proceededDatas = {
name: dt,
from: "from datas",
to: "to datas",
tokenId: "the token"
}
const dataString = JSON.stringify(proceededDatas)
next(null, dataString)
}, 1000)
}
})
}
pipeline(process.stdin, createT(), process.stdout, e => console.log(e))
run the code: node file.js
I'm trying to notify periodically all clients about changes in sports matches, for which I use Eventemitter in node. The notification works fine, the issue I have is that if the client closes/refreshes the browser window, I should remove the listener on the server and decrease the pool of listeners, otherwise I could easily bump into memory leak warnings.
Code:
const EventEmitter = require('events');
const Stream = new EventEmitter();
exports.getScheduleUpdates = async (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive'
});
Stream.on('push', (data) => {
let isAlive = res.write(`data: ${JSON.stringify(data)}\n\n`);
if (!isAlive) {
console.log("Should terminate");
}
console.log(isAlive);
});
}
setInterval(() => {
Stream.emit('push', { match: 'sending some data' });
}, 10000);
When I run it locally and refresh the browser for example 2 times, the event fires for 3 clients, 2 of which are closed and the res.write returned false.
I got this far with the help of Google, but I can't figure out how to handle the listener pool for the event and remove them if the isAlive flag is false. Not 100% sure if relying on res.write return value is correct.
Thank you
In the end, I found this tutorial:
https://alligator.io/nodejs/server-sent-events-build-realtime-app/
Which ended up working well for my case.
let connectedClients = [];
exports.getScheduleUpdates = async (req, res) => {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive'
});
const clientId = Date.now();
const newclient = {
id: clientId,
res
}
connectedClients.push(newclient);
req.on('close', () => {
connectedClients = connectedClients.filter(c => c.id !== clientId);
});
}
function sendScheduleUpdates(changesToSend) {
connectedClients.forEach(client => {
client.res.write(`data: ${JSON.stringify(changesToSend)}\n\n`);
});
}
setInterval(() => {
sendScheduleUpdates([{ match: 'sending some data' }]);
}, 10000);
Closing of old, unused responses is done in res.on('close', () => {...}) part.
I'm trying to set up a server to respond to socket.io clients using nodejs, express and socket.io. I want to write tests to probe the server and make sure it's handling the events correctly and sending appropriate responses to the client.
I tried writing some automated tests using jest but I couldn't figure out how to actually emit events to the server and have it respond.
Unit testing Node.js and WebSockets (Socket.io)
I checked out the above post but it didn't work for me...
Check out this boilerplate solution that's based on promises and good practice.
You can test your servers entire io events with it, no sweat.
You just need to copy a boilerplate test and add your own code as needed.
Checkout the repo on GitHub for full source code.
https://github.com/PatMan10/testing_socketIO_server
const io = require("socket.io-client");
const ev = require("../utils/events");
const logger = require("../utils/logger");
// initSocket returns a promise
// success: resolve a new socket object
// fail: reject a error
const initSocket = () => {
return new Promise((resolve, reject) => {
// create socket for communication
const socket = io("localhost:5000", {
"reconnection delay": 0,
"reopen delay": 0,
"force new connection": true
});
// define event handler for sucessfull connection
socket.on(ev.CONNECT, () => {
logger.info("connected");
resolve(socket);
});
// if connection takes longer than 5 seconds throw error
setTimeout(() => {
reject(new Error("Failed to connect wihtin 5 seconds."));
}, 5000);
});
};
// destroySocket returns a promise
// success: resolve true
// fail: reject false
const destroySocket = socket => {
return new Promise((resolve, reject) => {
// check if socket connected
if (socket.connected) {
// disconnect socket
logger.info("disconnecting...");
socket.disconnect();
resolve(true);
} else {
// not connected
logger.info("no connection to break...");
resolve(false);
}
});
};
describe("test suit: Echo & Bello", () => {
test("test: ECHO", async () => {
try {
// create socket for communication
const socketClient = await initSocket();
// create new promise for server response
const serverResponse = new Promise((resolve, reject) => {
// define a handler for the test event
socketClient.on(ev.res_ECHO, data4Client => {
//process data received from server
const { message } = data4Client;
logger.info("Server says: " + message);
// destroy socket after server responds
destroySocket(socketClient);
// return data for testing
resolve(data4Client);
});
// if response takes longer than 5 seconds throw error
setTimeout(() => {
reject(new Error("Failed to get reponse, connection timed out..."));
}, 5000);
});
// define data 4 server
const data4Server = { message: "CLIENT ECHO" };
// emit event with data to server
logger.info("Emitting ECHO event");
socketClient.emit(ev.com_ECHO, data4Server);
// wait for server to respond
const { status, message } = await serverResponse;
expect(status).toBe(200);
expect(message).toBe("SERVER ECHO");
} catch (error) {
logger.error(error);
}
});
test("test BELLO", async () => {
try {
const socketClient = await initSocket();
const serverResponse = new Promise((resolve, reject) => {
socketClient.on(ev.res_BELLO, data4Client => {
const { message } = data4Client;
logger.info("Server says: " + message);
destroySocket(socketClient);
resolve(data4Client);
});
setTimeout(() => {
reject(new Error("Failed to get reponse, connection timed out..."));
}, 5000);
});
const data4Server = { message: "CLIENT BELLO" };
logger.info("Emitting BELLO event");
socketClient.emit(ev.com_BELLO, data4Server);
const { status, message } = await serverResponse;
expect(status).toBe(200);
expect(message).toBe("SERVER BELLO");
} catch (error) {
logger.error(error);
}
});
});