How to stream data over socket.io to client - node.js

I have socket.io sending a basic object from server to client. This bit works fine.
Now want to send a stream from server to client, using event-stream (specifically the results of a block-chain query). I am getting unexpected results in the browser console..
var io = require('socket.io')(server);
var dsteem = require('dsteem')
var es = require('event-stream')
var util = require('util')
var client = new dsteem.Client('https://api.steemit.com')
var stream = client.blockchain.getBlockStream()
/* This sends results to stdout, fine
io.on('connection', function(socket){
stream.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n')
})).pipe(process.stdout);
// And this sends a simple object to the client
socket.emit('blockchainOps', {"Foo!":"Doo!"} );
});
*/
// Putting both together sends strange connection data to client
io.on('connection', function(socket){
socket.emit('blockchainOps', function() {
stream.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n');
}))
})
});
What I get in the client console appears to be some kind of TCP socket function,
ƒ (){if(!n){n=!0;var r=a(arguments);u("sending ack %j",r),e.packet({type:i.ACK,id:t,data:r})}}
Can anyone help me understand what's going on and what I'm doing wrong?
== EDIT UPDATE ==
As suggested in comments, I've tried socket.io-stream to augment event-stream.
var es = require('event-stream')
var util = require('util')
var ss = require('socket.io-stream');
var stream = ss.createStream();
io.on('connection', function(socket){
ss(socket).emit('blockchainOps', stream, function(){
client.blockchain.getBlockStream()
.pipe(es.map(function(block, callback) {
callback(null, util.inspect(block) + '\n')
}))
.pipe(process.stdout)
}());
});
This time I get a socket object returned in the browser console which does not seem to be the stream data I was hoping for.

If anyone is looking for a working socket.io stream example
// server side
const { pipeline } = require('stream')
const server = require('http').Server().listen(8080)
const io = require('socket.io')(server)
const ss = require('socket.io-stream')
io.on('connection', (socket) => ss(socket).on('stream', (stream) => {
pipeline(stream, process.stdout, (err) => err && console.log(err))
}));
// client side
const client = require('socket.io-client')
const socket = client.connect('http://localhost:8080')
socket.on('connect', () => {
const stream = ss.createStream()
ss(socket).emit('stream', stream)
pipeline(process.stdin, stream, (err) => err && console.log(err))
});

You're using socket.emit wrong, you're passing the ACK callback to the client instead of your stream. Have a look at socket.emit signature :socket.emit(eventName[, ...args][, ack]).
You probably want something like
socket.emit('blockchainOps', client.blockchain.getBlockStream());
However, I don't think plain socket io supports passing a Stream like that. To pipe a stream down to the client you could use socketio-stream. It would look like this:
var ss = require('socket.io-stream');
var stream = ss.createStream();
ss(socket).emit('blockchainOps', stream);
client.blockchain.getBlockStream().pipe(stream);
EDIT:
On the client, you should be able to read your stream like this:
<script src="socket.io/socket.io.js"></script>
<script src="socket.io-stream.js"></script>
...
ss(socket).on('blockchainOps', function(stream) {
var binaryString = "";
stream.on('data', function(data) {
for(var i=0;i<data.length;i++) {
binaryString+=String.fromCharCode(data[i]);
}
});
stream.on('end', function(data) {
console.log(binaryString);
binaryString = "";
});
});

Related

How to disconnect a socket after streaming data?

I am making use of "socket.io-client" and "socket.io stream" to make a request and then stream some data. I have the following code that handles this logic
Client Server Logic
router.get('/writeData', function(req, res) {
var io = req.app.get('socketio');
var nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
var nameNodeData = {};
async.waterfall([
checkForDataNodes,
readFileFromS3
], function(err, result) {
if (err !== null) {
res.json(err);
}else{
res.json("Finished Writing to DN's");
}
});
function checkForDataNodes(cb) {
nameNodeSocket.on('nameNodeData', function(data) {
nameNodeData = data;
console.log(nameNodeData);
cb(null, nameNodeData);
});
if (nameNodeData.numDataNodes === 0) {
cb("No datanodes found");
}
}
function readFileFromS3(nameNodeData, cb) {
for (var i in nameNodeData['blockToDataNodes']) {
var IP = nameNodeData['blockToDataNodes'][i]['ipValue'];
var dataNodeSocket = io.connect('http://'+ IP +":5000");
var ss = require("socket.io-stream");
var stream = ss.createStream();
var byteStartRange = nameNodeData['blockToDataNodes'][i]['byteStart'];
var byteStopRange = nameNodeData['blockToDataNodes'][i]['byteStop'];
paramsWithRange['Range'] = "bytes=" + byteStartRange.toString() + "-" + byteStopRange.toString();
//var file = require('fs').createWriteStream('testFile' + i + '.txt');
var getFileName = nameNodeData['blockToDataNodes'][i]['key'].split('/');
var fileData = {
'mainFile': paramsWithRange['Key'].split('/')[1],
'blockName': getFileName[1]
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
s3.getObject(paramsWithRange).createReadStream().pipe(stream);
//dataNodeSocket.disconnect();
}
cb(null);
}
});
Server Logic (that gets the data)
var dataNodeIO = require('socket.io')(server);
var ss = require("socket.io-stream");
dataNodeIO.on('connection', function(socket) {
console.log("Succesfully connected!");
ss(socket).on('sendData', function(stream, data) {
var IP = data['ipValue'];
var blockName = data['blockName'];
var mainFile = data['mainFile'];
dataNode.makeDir(mainFile);
dataNode.addToReport(mainFile, blockName);
stream.pipe(fs.createWriteStream(mainFile + '/' + blockName));
});
});
How can I properly disconnect the connections in function readFileFromS3. I have noticed using dataNodeSocket.disconnect() at the end does not work as I cannot verify the data was received on the 2nd server. But if I comment it out, I can see the data being streamed to the second server.
My objective is to close the connections in Client Server side
It appears that the main problem with closing the socket is that you weren't waiting for the stream to be done writing before trying to close the socket. So, because the writing is all asynchronous and finishes sometime later, you were trying to close the socket before the data had been written.
Also because you were putting asynchronous operations inside a for loop, you were also running all your operations in parallel which may not be exactly what you want as it makes error handling more difficult and server load more difficult.
Here's the code I would suggest that does the following:
Create a function streamFileFromS3() that streams a single file and returns a promise that will notify when it's done.
Use await in a for loop with that streamFileFromS3() to serialize the operations. You don't have to serialize them, but then you would have to change your error handling to figure out what to do if one errors while the others are already running and you'd have to be more careful about concurrency issues.
Use try/catch to catch any errors from streamFileFromS3().
Add error handling on the stream.
Change all occurrences of data['propertyName'] to data.propertyName. The only time you need to use brackets is if the property name contains a character that is not allowed in a Javascript identifier or if the property name is in a variable. Otherwise, the dot notation is preferred.
Add socket.io connection error handling logic for both socket.io connections.
Set returned status to 500 when there's an error processing the request
So, here's the code for that:
const ss = require("socket.io-stream");
router.get('/writeData', function(req, res) {
const io = req.app.get('socketio');
function streamFileFromS3(ip, data) {
return new Promise((resolve, reject) => {
const dataNodeSocket = io.connect(`http://${ip}:5000`);
dataNodeSocket.on('connect_error', reject);
dataNodeSocket.on('connect_timeout', () {
reject(new Error(`timeout connecting to http://${ip}:5000`));
});
dataNodeSocket.on('connection', () => {
// dataNodeSocket connected now
const stream = ss.createStream().on('error', reject);
paramsWithRange.Range = `bytes=${data.byteStart}-${data.byteStop}`;
const filename = data.key.split('/')[1];
const fileData = {
'mainFile': paramsWithRange.Key.split('/')[1],
'blockName': filename
};
ss(dataNodeSocket).emit('sendData', stream, fileData);
// get S3 data and pipe it to the socket.io stream
s3.getObject(paramsWithRange).createReadStream().on('error', reject).pipe(stream);
stream.on('close', () => {
dataNodeSocket.disconnect();
resolve();
});
});
});
}
function connectError(msg) {
res.status(500).send(`Error connecting to ${NAMENODE_ADDRESS}`);
}
const nameNodeSocket = io.connect(NAMENODE_ADDRESS, { reconnect: true });
nameNodeSocket.on('connect_error', connectError).on('connect_timeout', connectError);
nameNodeSocket.on('nameNodeData', async (nameNodeData) => {
try {
for (let item of nameNodeData.blockToDataNodes) {
await streamFileFromS3(item.ipValue, item);
}
res.json("Finished Writing to DN's");
} catch(e) {
res.status(500).json(e);
}
});
});
Other notes:
I don't know what paramsWithRange is as it is not declared here and when you were doing everything in parallel, it was getting shared among all the connections which is asking for a concurrency issue. In my serialized implementation, it's probably safe to share it, but the way it is now bothers me as it's a concurrency issue waiting to happen.

The socket.io is emitting to all rooms instead of the specified ones

I'm new to node and socket io. I'm trying to implement a realtime notification system for couple of my own apps. So, using node, express and socket io, the code is given below:
Server Side Code:
io.on('connection', function (socket) {
socket.on('subscribe', function(room) {
socket.join(room);
});
socket.on('unsubscribe', function(room) {
socket.leave(room);
});
});
Client Side Code:
var sio = io.connect('http://localhost:9000');
var ch1 = sio.emit('subscribe', 'channel1');
ch1.on('log', function (data) {
console.log('channel1: ', data);
});
var ch2 = sio.emit('subscribe', 'channel2');
ch2.on('log', function (data) {
console.log('channel2: ', data);
});
I'm firing/emitting the event from a route (express) for example:
app.get('/', function(req, res) {
var data1 = {
channel: 'channel1',
evennt: 'log',
message: 'Hello from channel1...'
};
io.to(data1.channel).emit(data1.event, data1);
});
When I'm hitting the route, the io.to(data1.channel).emit(data1.event, data1); is working but it sending the data to both rooms/channels but I was expecting to get the data only in ch1 because data1.channel contains channel1 so I was expecting the following handler will receive the data:
ch1.on('log', function (data) {
console.log('channel1: ', data);
});
Notice that, both channels have same log event. Am I on the right track. Is it possible at all?
var ch1 = sio.emit('subscribe', 'channel1');
var ch2 = sio.emit('subscribe', 'channel2');
You're subscribing the same socket (sio) to both rooms. Also, ch1 and ch2 are references to sio.
If you want to test it properly, you should create a second socket for the second channel:
var sio2 = io.connect('http://localhost:9000');
var ch2 = sio2.emit('subscribe', 'channel2');

Socket.IO server not receiving message from client

I'm playing around with Node, Socket.IO and BDD by creating a chat application. During one of the tests, I get a timeout error stating:
Error: timeout of 2000ms exceeded. Ensure the done() callback is being called in this test.
The affected test is
it('#must be able to receive a message', function(done)
{
chatterServer.on('chatterMessage', function(data)
{
console.log('Incoming message!');
expect(data).to.have.property('message');
expect(data.message).to.be('Hello, world!');
done();
});
console.log('Sending message!');
chatterClient.send('chatterMessage', { message: 'Hello, world!' });
console.log('Sent!');
});
I found that the cause of this issue is that the chatterMessage event is not being caught by the server. Whilst I did specify it.
The console's output is:
Sending message!
Sent!
Error: timeout of 2000ms exceeded. Ensure the done() callback is being called in this test.
I'm probably doing something wrong. I'm not too familiar with Node and Socket.IO, so I'm sorry if this question is very obvious.
I looked around Google with the search terms 'socket.io server not receiving from client', but from what I found, nothing helped me to solve my issue so far.
I did however try the solution in this question, but that didn't fix it for me.
I'm using Mocha and expect.js
The complete test is:
var util = require('util');
var Chatter = require('../src/index');
var ChatterServer = Chatter.Server;
var ChatterClient = Chatter.Client;
var express = require('express');
var expect = require('expect.js');
var socketIO = require('socket.io');
var socketIOClient = require('socket.io-client');
var host = 'http://localhost';
var port = 8080;
describe('Chatter', function()
{
'use strict';
var chatterServer;
var chatterClient;
var server;
before(function()
{
var app = express();
server = app.listen(port);
});
beforeEach(function()
{
chatterServer = new ChatterServer(socketIO(server));
chatterClient = new ChatterClient(socketIOClient, util.format('%s:%s', host, port.toString()));
});
...
it('#must be able to receive a message', function(done)
{
chatterServer.on('chatterMessage', function(data)
{
console.log('Incoming message!');
expect(data).to.have.property('message');
expect(data.message).to.be('Hello, world!');
done();
});
console.log('Sending message!');
chatterClient.send('chatterMessage', { message: 'Hello, world!' });
console.log('Sent!');
});
});
My Client (ChatterClient) is:
(function()
{
'use strict';
function Client(socketIO, url)
{
this.socketIO = socketIO(url);
}
Client.prototype.send = function(event, data)
{
this.socketIO.emit(event, data);
};
Client.prototype.on = function(event, callback)
{
this.socketIO.on(event, callback);
};
if (module !== undefined && module.hasOwnProperty('exports')) {
module.exports = Client;
} else {
window.Chatter = {
Client: Client,
};
}
}());
The Server (ChatterServer) is:
(function()
{
'use strict';
function Server(socketIO)
{
this.socketIO = socketIO;
this.connectedUsers = {};
this.on('connection', (function(user)
{
var userID = user.client.id;
this.connectedUsers[userID] = user;
user.emit('chatterConnectionAcknowledged', { id: userID });
}).bind(this));
}
Server.prototype.on = function(event, handler)
{
this.socketIO.on(event, handler);
};
module.exports = Server;
}());
You need to change your code in two sides.
First side, you will need to listen incoming socket connections on the socketIO object. (see the emphasized code below)
//.. some code
function Server(socketIO)
{
this.socketIO = socketIO;
this.connectedUsers = {};
this.socketIO.on('connection', (function(user)
{
var userID = user.client.id;
this.connectedUsers[userID] = user;
user.emit('chatterConnectionAcknowledged', { id: userID });
}).bind(this));
}
//.. some code
Second side, when you are adding new events to listen on the server, you need to bind those events to the sockets since they are ones that are going to listen when events are emitted from the socket clients.
Server.prototype.on = function (event, handler) {
Object.keys(this.connectedUsers).map(function (key) {
this.connectedUsers[key].on(event, handler);
}.bind(this));
};

Streaming data in node.js with ws and websocket-stream

I'm trying to stream data from a server to a client using websockets -- in particular, ws and websocket stream. However, the stream seems to be closing too early, and I only receive two chunks worth of data. Here is the simple example.
I have a server:
var WebSocketServer = require('ws').Server;
var websocketStream = require('websocket-stream');
var wss = new WebSocketServer({port: 8098});
var fs = require('fs');
var util = require('util');
wss.on('connection', function connect(ws) {
var stream = websocketStream(ws);
var rs = fs.createReadStream('foo.big');
rs.pipe(stream);
});
and a client that connects and streams foo.big to a local file using pipe():
var fs = require('fs');
var util = require('util');
var websocket = require('websocket-stream');
var rs = fs.createWriteStream('big.out');
rs.on('open', function () {
var ws = websocket('http://localhost:8098');
ws.on('open', function () {
ws.pipe(rs);
}).on('error', function (err) {
console.log(err);
}).on('close', function (err) {
console.log("Closing");
});
});
Any ideas why I would only be getting a few packets of data? If I remove the call in the client to pipe, and just receive the data in chunks, it seems to receive all the data.
There is a couple of things wrong with the above (as of 2017).
You would rather want to listen for the fs.readStream() close event
You should not be listening to the websocket-stream open event, as you can assume it to be open
Also, performance: ws is likely not suited for server to server communication. Consider alternatives, like net, like here, or various other lower than ws level ways.
Find a full example here on my GH.
const fs = require('fs')
const websocket = require('websocket-stream')
const writeStream = fs.createWriteStream('big.out')
writeStream.on('open', () => {
let ws = websocket('http://localhost:8098')
ws.pipe(writeStream)
ws.on('error', (err) => {
if (err) throw err
}).on('close', (err) => {
if (err) throw err
console.log(`Closing ws with: ${fs.statSync('big.out').size} bytes`)
})
writeStream.on('close', () => {
console.log(`Closing file stream with: ${fs.statSync('big.out').size} bytes`)
})
})

get a response from cassandra binary protocol using node streams

Trying to learn more about streams and cassandra, so figured I'd try out the binary protocol. I can't even get it to respond though.
var net = require('net');
var util = require('util');
var stream = require('stream');
var session;
session = net.connect({ port: 9160 }, function () {
console.log('connected');
var header = new Buffer([ 0x01, 0x00, 0x01, 0x01 ]);
var length = new Buffer(4);
var body = new Buffer("{'CQL_VERSION':'3.0.0'}", 'utf8');
length.writeUInt32BE(body.length, 0);
session.write(header);
session.write(length);
session.write(body);
setTimeout(function () { session.end(); }, 5000);
});
session.on('error', function (err) {
console.log(err);
});
var client = new (stream.Writable);
client._write = function (chunk, _, next) {
console.log('response received');
next();
};
session.pipe(client);
The program runs for 5 seconds, printing "connected" almost immediately, but the writable stream never receives content from the database. Any help would be much appreciated!
I'm working off of this documentation for the binary protocol and this guide to node streams.
Cassandra is running, version 2.0.5.
9160 is the Thrift's port. Have you tried your code with port 9042?

Resources