Using the response from bidirectional streaming callback in Node.js gRPC client - node.js

I have a grpc server that is written in Go and runs on a unix domain socket. I'm writing a nodeJS client for for this. Since grpc-node doesn't support unix sockets, i had to use #grpc/grpc-js.
Now the issue i'm facing is that I need to perform some operation on the response that i get back from the server. I currently have this and it works. But is there a cleaner/nicer way of accomplishing this?
let appEncryptionDef = protoLoader.loadSync(__dirname + '../../../../protos/appencryption.proto', {
keepCase: true,
defaults: true,
oneofs: true
});
let appEncryptionProto = grpc.loadPackageDefinition(appEncryptionDef);
let client = new appEncryptionProto.asherah.apps.server.AppEncryption(`unix://${socket}`, grpc.credentials.createInsecure());
let call = client.session();
call.on('data', function (sessionResponse) {
switch (sessionResponse.response) {
case 'x':
data = parse_server_response;
call.write(data);
call.end();
break;
case 'error_response':
console.log('error received: ' + sessionResponse.error_response.message);
break;
}
});
Is there a better way of doing this? I've looked at the grpc-caller library but using that with #grpc/grpc-js gives me a Channel's second argument must be a ChannelCredentials error.
Here's the client I've written so far: https://github.com/godaddy/asherah/blob/servicelayer_node/server/samples/clients/node/appencryption_client.js

Related

Exposing websocket data through an HTTP endpoint

I'm currenty suscribing to a websocket channel in order to get data that I needs to be consumed by clients using a GET endpoint.
Clients don't need to consume the full data stream, just the last message received.
I thought about storing the data in memory or into a database, and using that data to serve said GET requests, but I suspect that's not the right implementation. Any ideas on how it should be done will be appreciated.
EDIT: I'm not asking for code, just for an idea on what patter should I follow, BTW i'm using express.
Websocket code:
const WebSocket = require('ws')
const connection = new WebSocket('wss://ws.bitso.com')
connection.onopen = function () {
connection.send(JSON.stringify({ action: 'subscribe', book: 'btc_usd', type: 'orders' }))
}
connection.onmessage = function (message) {
const data = JSON.parse(message.data)
if (data.type === 'orders' && data.payload) {
console.log(data)
}
}
I just stored the data in memory and used that as the return value for my endpoint.

Tracking WS Clients in Nodejs

I am using ws through an express plugin, express-ws. My back end needs to track clients and, send message only to a specific or clients. Right now, I am just saving the client socket along with a token. So, when the client connects, a message is sent like the following by the client:
{"path":"/openUserSocket","token":"<CLIENT_TOKEN_VALUE>"}
This registers a socket in the back end. The socket is managed by a class wscManager which, saves the socket in a plain object. So, to register a socket, I do this:
// adding web socket support
const expressWs = require('express-ws')(app, null, {
wsOptions: {
clientTracking: true
}
});
let man = new wscManager();
// then in the web socket handler
app.ws('/', (sck,req) => {
sck.on('message',(msg)=>{
// handle messages
if(msg.path === '/openUserSocket'){
man.set(msg.token, sck);
return sck.send(JSON.stringify({ status: 200, message: 'Ok' }));
}
else if(msg.path === '/<SOME_OTHER_PATH>'){
// use that socket
}
// ... other route handling
}
}
Now, as I showed in the code above, I am trying to save the socket for later use. set just uses the token as a key for saving the socket object in another object. Later, get(token) can be used with that token to use the socket. I also extended the express app prototype to allow other route handles to use the wscManager:
app.response._man = man;
Now, my question is this the right approach to client tracking? In other route handlers, the manager is used like this:
// in route handler
res._man.send(JSON.stringify({ status: <STATUS>, message: <MSG> }));
Thanks for your time and patience.

Terminate EventSource event listener?

I'm trying to work around a problem to do with rest streaming between the Nest API and a service (ST) that does not support streaming.
To get around this, I have built a service on Sails which takes a post request from ST containing the Nest Token, and then triggers an EventSource event listener that sends the data back to ST.
It is heavily based off the Nest rest-streaming example here:
https://github.com/nestlabs/rest-streaming and my code is as follows:
startStream: function(req, res) {
var nestToken = req.body.nestToken,
stToken = req.body.stToken,
endpointURL = req.body.endpointURL,
source = new EventSource(sails.config.nest.nest_api_url + '?auth=' + nestToken);
source.addEventListener('put', function(e) {
var d = JSON.parse(e.data);
var data = { devices: d.data.devices, structures: d.data.structures},
config = { headers : {'Authorization': 'Bearer ' + stToken}};
sendData(endpointURL, data, config);
});
source.addEventListener('open', function(e) {
console.log("Connection opened");
});
source.addEventListener('auth_revoked', function(e){
console.log("Auth token revoed");
});
source.addEventListener('error', function(e) {
if (e.readyState == EventSource.CLOSED) {
console.error('Connection was closed! ', e);
} else {
console.error('An unknown error occurred: ', e);
}
}, false);
}
};
The problem I foresee though is that once a request is received by the node server, it start the event listener, however I cannot for the life of me figure out how I can kill the event listener.
If I cannot figure out a way to stop this, then every EventListener will run indefinitely which is obviously not suitable.
Has anyone got any suggestions on how to overcome the issue?
Each SSH client connection is a dedicated socket.
If a particular client doesn't want event streaming, don't make the connection. If they start event streaming, but want to turn it off, call source.close();source=NULL;
If from server-side you want to stop sending the messages, close the socket.
You didn't show the server-side code, but if it is running a dedicated process per SSE client then you just exit the process. If you are maintaining a list of sockets, one per connected client, close the socket. On node.js you might be running a function on setInterval. To close the connection you do and clearInterval() and response.end();.

How to check if ElasticSearch client is connected?

I'm working with elasticsearch-js (NodeJS) and everything works just fine as long as long as ElasticSearch is running. However, I'd like to know that my connection is alive before trying to invoke one of the client's methods. I'm doing things in a bit of synchronous fashion, but only for the purpose of performance testing (e.g., check that I have an empty index to work in, ingest some data, query the data). Looking at a snippet like this :
var elasticClient = new elasticsearch.Client({
host: ((options.host || 'localhost') + ':' + (options.port || '9200'))
});
// Note, I already have promise handling implemented, omitting it for brevity though
var promise = elasticClient.indices.delete({index: "_all"});
/// ...
Is there some mechanism to send in on the client config to fail fast, or some test I can perform on the client to make sure it's open before invoking delete?
Update: 2015-05-22
I'm not sure if this is correct, but perhaps attempting to get client stats is reasonable?
var getStats = elasticClient.nodes.stats();
getStats.then(function(o){
console.log(o);
})
.catch(function(e){
console.log(e);
throw e;
});
Via node-debug, I am seeing the promise rejected when ElasticSearch is down / inaccessible with: "Error: No Living connections". When it does connect, o in my then handler seems to have details about connection state. Would this approach be correct or is there a preferred way to check connection viability?
Getting stats can be a heavy call to simply ensure your client is connected. You should use ping, see 2nd example https://github.com/elastic/elasticsearch-js#examples
We are using ping too, after instantiating elasticsearch-js client connection on start up.
// example from above link
var elasticsearch = require('elasticsearch');
var client = new elasticsearch.Client({
host: 'localhost:9200',
log: 'trace'
});
client.ping({
// ping usually has a 3000ms timeout
requestTimeout: Infinity,
// undocumented params are appended to the query string
hello: "elasticsearch!"
}, function (error) {
if (error) {
console.trace('elasticsearch cluster is down!');
} else {
console.log('All is well');
}
});

How to disable Multiplexing with Socket.io

I am using Socket.io to stream live tweets to my users using Twitter's Streaming API (my implementation is more or less based on this tutorial).
The problem is that every time a connection event is fired by Socket.io the newly connected client causes every other client connected to the server to cease updating. While it would take too long to go through all the hacks that I tried, I will say that I played with it enough that I believe the problem is caused by Socket.io's multiplexing of the connections from multiple clients (enabled by default) as a performance boost to allow multiple clients or connections to share the same underlying socket. In short, I believe this to be the case because I don't think it would be possible for new connections to affect older connections in this manner if not for the connection multiplexing. In other words, if a new, independent connection with its own underlying (TCP) socket were created every time a client connected it would be impossible for this to occur since one connection would know nothing about the other and therefore couldn't affect any other client's state as is currently happening. This also leads me to believe that simply disabling the multiplexing functionality would be the simplest way to get around this problem since I am not concerned about scaling because Node.js already handles all the concurrency I'm likely to need to handle very adequately.
I have gone through Socket.io's documentation but could not see where the ability to "demultiplex" the connections is exposed via the API, so if anyone knows how to do this I'd create appreciate your response.
My code below is pretty standard and simple. But just to be clear, the issue is that whenever a new client connects to Socket.io every other client stops receiving new tweets and updates are no longer pushed to the older client unless I refresh the browser in which case the newly refreshed client will begin to update and receive fresh tweets again, but the other still connected clients will then stop updating.
Server-side Code:
// Code also uses ntwitter (https://github.com/AvianFlu/ntwitter) as an abstraction over Twitter's Streaming API
io.sockets.on('connection', function (socket) {
tweet.stream('statuses/filter', { track : 'new orleans' }, function (stream) {
stream.on('data', function (data) {
// The following lines simply pre-process data sent from Twitter so junk isn't
// unnecessarily sent to the client.
if (data.user) {
tweets = {
text : data.text,
image : data.user.profile_image_url,
user : data.user.name
};
var t = JSON.stringify(tweets);
console.log(t);
socket.send(t);
}
});
});
});
Client-Side Code
// Notice the option that I passed in as the second argument. This supposedly forces every
// new client to create a new connection with the server but it either doesn't work or I'm
// implementing it incorrectly. It is the very last configuration option listed in the
// documentation linked to above.
var socket = io.connect('http://' + location.host, {'force new connection' : true });
socket.on('message', function (tweet) {
var t = JSON.parse(tweet);
if (t.image) {
$('.hero-unit').prepend('<div class="media"><a class="pull-left" href="#"><img class="media-object" alt="64x64" style="width: 64px; height: 64px;" src="' + t.image + '"></a><div class="media-body"><h4 class="media-heading">' + t.user + '</h4>' + t.text + '</div></div>');
}
});
If I am thinking of this incorrectly or if there's something wrong with my code I am definitely open to any suggestions. I'd also be happy to reply with any additional details.
I would try something like this
Serverside:
io.sockets.on('connection', function (socket) {
//Other Connectiony goodness here.
});
});
tweet.stream('statuses/filter', { track : 'new orleans' }, function (stream) {
stream.on('data', function (data) {
// The following lines simply pre-process data sent from Twitter so junk isn't
// unnecessarily sent to the client.
if (data.user) {
tweets = {
text : data.text,
image : data.user.profile_image_url,
user : data.user.name
};
var t = JSON.stringify(tweets);
console.log(t);
io.sockets.emit("tweet", t);
}
});
Client-side:
var socket = io.connect('http://' + location.host, {'force new connection' : true });
socket.on('tweet', function (tweet) {
var t = JSON.parse(tweet);
if (t.image) {
$('.hero-unit').prepend('<div class="media"><a class="pull-left" href="#"><img class="media-object" alt="64x64" style="width: 64px; height: 64px;" src="' + t.image + '"></a><div class="media-body"><h4 class="media-heading">' + t.user + '</h4>' + t.text + '</div></div>');
}
});
Basically have the stream from twitter outside your socket, and then on a new tweet emit a message to all connected.

Resources