Difference between inbound and outbound sockets in node - node.js

I am writing a function that will make sure a node instance is gracefully shut down.
To do that, I make sure I unref() all sockets. This is what I am doing:
function tidyUp(){
console.log("Closing the server...");
server.close();
console.log("Ordering a hotplate shutdown...");
process.emit( 'hotplateShutdown');
// This will give time to server.close() to actually work..
setTimeout( function() {
console.log("Calling unref() for all Sockets and for the Server:");
wtf.dump();
var handles = Array.prototype.slice.call( process._getActiveHandles() );
handles.forEach( ( h, i ) => {
var name = h.constructor.name;
if( h.unref && typeof h.unref == 'function' & ( name == 'Socket' || name == 'Server' ) ){
console.log("Unreffing:", i );
h.unref();
}
});
console.log("After unreffing:");
wtf.dump();
setTimeout( function(){
console.log("This process should soon close");
console.log("Here is the event queue keeping it alive:");
wtf.dump( true );
}, 1000);
}, 1000 );
};
I am concerned because the server also sends email, and I want to make absolute sure that any that is being sent is indeed sent.
Basically:
"Give a Socket object, how do you tell if it's an INBOUND socket (one receiving connections, as node's HTTP server would open) or an OUTBOUND socket (one open by nodemailer to send an email)."
I will want to unref() all inbound sockets, and leave the outbound ones in peace, till all email has been sent.
Hints?

I ended up keeping track of the inbound sockets opened:
var server = http.createServer(app);
// Keep track of all inbound sockets. They will be
// unref()ed at cleanup time
var inboundSockets = {};
var socketId = 0;
server.on('connection',function( socket ){
socket.__id = socketId ++;
inboundSockets[ socket.__id ] = socket;
socket.on('close',function(){
delete inboundSockets[ socket.__id ];
});
});
process.on('uncaughtException', function (err) {
console.error( "Error caught: ");
console.error(err);
tidyUp();
})
process.on('SIGTERM', function(){
console.log("TERMINATING THIS INSTANCE!");
tidyUp();
});
function tidyUp(){
console.log("Closing the server...");
server.close();
// The next line is important AS IS as it will tell naps that the
// server is no longer functional
console.log("THE SERVER HAS STOPPED");
console.log("Ordering a hotplate shutdown...");
process.emit( 'hotplateShutdown');
// This will give time to server.close() to actually work..
setTimeout( function() {
console.log("Calling unref() for inbound sockets:");
Object.keys( inboundSockets ).forEach( ( __id ) => {
var s = inboundSockets[ __id ];
console.log("Unreffing inbound socket:", s.__id );
s.unref();
});
db.unref();
console.log("After unreffing:");
wtf.dump();
setTimeout( function(){
console.log("This process should soon close");
console.log("Here is the event queue keeping it alive:");
wtf.dump( true );
}, 5000);
}, 1000 );
};
Note that I assign an ID to the socket at creation time (there doesn't seem to be a clear universal way to get a unique ID) and then unref them when the server goes down...

Related

Socket.io emitting multiple times to client

Basically we use the socket to call deliverers for an app made in flutter, in the part of the app we made the socket work in the foreground service, however with the loss of internet connection the socket accumulates the emits and ends up crashing the app causing a memory leak.
I'm out of ideas to fix this now.
In the server side
global.ioDeliveryman = require('./socketio.js').init(deliveryManServer, {
pingInterval: 10,
pingTimeout: 5,
});
global.ioDeliveryman.on('connect', function (socket) {
socket.on('USER_CONNECTED', function(data){
global.connectedDeliverymanUsers[data.id] = {socket_id: socket.conn.id , aceite: data.aceite}
});
socket.on('USER_DISCONNECTED', function(data){
if(global.connectedDeliverymanUsers[data.id]){
try {
global.ioDeliveryman.sockets.connected[global.connectedDeliverymanUsers[data.id].socket_id].disconnect();
delete global.connectedDeliverymanUsers[data.id]
} catch (TypeError) {
console.log("Disconnect of undefined");
}
}
});
});
Client side
if (!socket.connected) {
conectar();
var jsao;
socket.on(describeEnum(SocketFunction.ALOCAR_ENTREGADOR), (data) async => {
socket.emit(describeEnum(SocketFunction.PONG), { "id": userId } ),
Vibration.vibrate(pattern: [1, 1000, 500, 2000]),
//the var jsao is setted here
NotificationUtils.showNotificationNovaCorrida(jsao),
await FGS.ForegroundService.sendToPort(jsao),
// check if app it's in the background aind wait 45 seconds to
Timer(Duration(seconds: 43), () async {
await FGS.ForegroundService.sendToPort('{"order": {"funcao": "VERIFICAR_BACKGROUND"}}');
if (isBackground) {
Timer(Duration(seconds: 2), () {
enviarAceiteEntregador(false, "${data["order"]["user_id"]}");
});
}
isBackground = true;
}),
});
Emit inside a function callback on backend
here we do ping pong to see if the user is connected, but this memory leak sometimes happens
it`s done in a loop with callback to cycle through the available deliverers one by one until one of them accepts
// PING
try{
if (alertMap.get(entregador.deliveryman_id.toString())) {
callback();
return;
}
//emit for deliveryman with socketId
global.ioDeliveryman.to(socketId).emit('ALOCAR_ENTREGADOR', {order: actualOrder, clientName: clientName});
} catch (TypeError) { console.log("Disconnect of undefined"); }
// PONG
pongMap.set(entregador.deliveryman_id.toString(), false);
everything works until the moment when the delivery person loses connection, then the socket starts to emit madly

How to separate heartbeat to its own thread for RabbitMQ connection

I have a process that uses RabbitMQ and NodeJS to do image processing. Due to the intensive task, I think I have the same issue as the link here https://github.com/squaremo/amqp.node/issues/261
I am trying to figure out how to implement the last comment on that issue.
"Yep. NodeJS is single-threaded, and if you use that thread to do something for a long time, nothing else will happen. As #michaelklishin suggests, the known solution for this general problem is using a child process, or the cluster module."
EDIT:
I Updated the code below with a sample of how I think I can do this with the amqp-connection-manager module. Right now I use a global variable to hold the actual message to be able to ack. I am guessing there is a better way to do this.
//Used to be an example for how to keep the connection thread and the working thread separate
//in order to fix the issue of missing heartbeat intervals due to processing on the same thread
const cluster = require('cluster');
var amqp = require('amqp-connection-manager');
var config = require('./config.json');
var working_queue = "Test_Queue";
//DONT REALLY DO THIS
var rabbit_msg_data;
//******* CLUSTER SETUP *******
// This will spawn off the number of worker for this process.
if (cluster.isMaster) {
console.log("Master "+process.pid+" is running");
worker = cluster.fork();
cluster.on('exit', (worker, code, signal) => {
if(signal)
{
console.log("worker was killed by signal: "+signal);
console.log(worker);
}
else if (code !== 0)
{
console.log("worker exited with error code: "+code);
console.log(worker);
}
else
{
console.log("Worker "+worker.process.pid+" exited successfully");
console.log(worker);
//Not sure if this works this way or if I need to put this worker into variables
}
});
//testing sending a message back and forth
// setTimeout(function() {
// worker.send("I got a request!");
// }, 1000);
//******** RABBIT MQ CONNECTION **********
// Create a connection manager to rabbitmq
var connection = amqp.connect(config.rabbit_connections_arr, {json: true, heartbeatIntervalInSeconds: 2});
connection.on('connect', function() {
console.log('Connected to rabbitmq');
});
connection.on('disconnect', function(params) {
console.log('Disconnected from rabbitmq:', params.err.stack);
});
// Set up a channel listening for messages in the queue.
var channelWrapper_listening = connection.createChannel({
setup: function(channel) {
// `channel` here is a regular amqplib `ConfirmChannel`.
return Promise.all([
channel.assertQueue(working_queue, {durable: true}),
channel.prefetch(1),
channel.consume(working_queue, function(data){
rabbit_msg_data = data;
worker.send(data.content.toString());
}, requeue = false)
]);
}
});
worker.on('message', function(msg){
// console.log("Worker to Master (ack): ", msg.content.toString());
console.log("Worker to Master (ack): ", msg);
//console.log(msg.content.toString());
channelWrapper_listening.ack(rabbit_msg_data);
});
}
else //All worker processes (MAIN LOGIC)
{
console.log("Worker "+process.pid+" started");
process.on('message',function(msg){
console.log("Master to Worker (working): ", msg);
//send msg back when done working on it.
setTimeout(function() {
process.send(msg);
}, 5000);
});
}

Emit multiple messages socket.io from one open connection

I am new to socket.io, and trying to figure out how to send multiple messages. Here is the scenario I am working on,
function setupServer(server) {
var socketIO = require("socket.io").listen(server);
socketIO.sockets.on('connection', function (socket) {
console.log("client is connected");
socket.emit('update', { progress: "starting..." })
});
}
I have to call setupServer(server) from another method, and I am receiving "starting..." on client side.
But the problem is, i want to send more/multiple messages LATER ON. Can not send array of messages as my application is building messages strings in say every 10 milliseconds (in a callback function) and i want to send as soon as they are created.
Any solution? Is it possible to get the socket object reference to reuse outside this function?
From socket.io docs: http://socket.io/docs/#broadcasting-messages
To broadcast, simply add a broadcast flag to emit and send method calls. Broadcasting means sending a message to everyone else except for the socket that starts it.
function setupServer(server) {
var socketIO = require("socket.io").listen(server);
socketIO.on('connection', function (socket) {
console.log("client is connected");
socket.emit('update', { progress: "starting..." });
startBroadCastFromSocket(socket, 3);
socket.on('disconnect', function(){
stopBroadcastFromSocket(socket);
});
});
// broadcast message to all sockets
setInteval(function() {
broadcastMessageToEveryone(socketIO, {body: 'Hello everyone (FROM SERVER)', timestamp: new Date());
}, 1000);
}
function broadcastMessageToEveryone(io, body) {
io.emit('message', {body: body, timestamp: new Date()});
}
function broadcastMessageFromSocket(socket, body) {
socket.broadcast.emit('message', {body: body, timestamp: new Date()});
}
var socketIntervals = {};
function stopBroadcastFromSocket(socket) {
if(socketIntervals[socket.id]) {
clearInterval(socketIntervals[socket.id]);
}
}
function startBroadcastFromSocket(socket, seconds) {
socketIntervals[socket.id] = setInterval(function(){
broadcastMessageFromSocket(socket, 'Hello!');
}, seconds*1000);
}

React and Socket.io: Able to get initial data - but view doesn't update when a new post comes in

Not sure if the issue is how I have my sockets setup - or if I am incorrectly trying to render the data with React.
I can successfully pull in data with my socket - yet it doesn't live update state when new data is posted to the server. My intention is for the state in React to automatically render new data, which is always live because of the socket connection.
Here is my client app that gets messages from the server and renders them:
var Chat = React.createClass({
getInitialState: function() {
return {
messages: null
}
},
componentWillMount: function(){
var self = this;
socket.emit('getMessages');
socket.on('serverMessages', function (data) {
self.setState({messages: data})
});
},
render: function() {
var messages = this.state.messages ? <MessageList messages={this.state.messages}/> : null
return (
<div className="jumbotron">
{ messages }
<MessageForm submitMessage={this.submitMessage}/>
</div>
);
}
});
Just in case here is my server code that emits data:
io.on('connection', function (socket) {
socket.on('getMessages', function (data) {
Message.find(function(err, messages){
socket.emit('serverMessages', messages);
})
});
});
As of right now, you're "just" grabbing data from the server once the component has been loaded. To get something a bit more "real time" you'll want to either ping the server with the same emit statement you specified regularly (which defeats the point of using websockets, really, you could use long-polling) or have the server regularly send new data to all clients.
You can do EITHER:
A) Client side: "Polling" for information [Not Ideal]
Note: I initially put this in my answer because I saw the OP was "polling" when the controller was loaded. I didn't click on that this might be because the controller may not be loaded with the websocket so sending data on connect might not work here. My bad.
Replace socket.emit('getMessages') with something that will "poll" the websocket regularly for data:
setInterval(function () {
socket.emit('getMessages')
}, 10000); /* Request data from the socket every 10 seconds */
OR
B) Server side: Send new data as it becomes available. [Best Way]
Track all clients via a clients array and delete them from it when their session ends.
var clients = [];
io.on('connection', function (socket) {
clients.push(socket);
socket.on('end', function () {
// Could also splice the array below, but it still works.
delete clients[clients.indexOf(socket)];
});
/* Previous logic for server goes here */
});
Run this code when you need to push new messages from the database/data storage:
for (var i in clients) {
clients[i].emit('serverMessages', /* messages object */);
}
Your server code is only firing upon initial socket connection.
Server:
socket.on('getMessages', function (data) {
Message.find(function(err, messages){
socket.emit('serverMessages', messages);
})
});
Client:
var Chat = React.createClass({
getInitialState: function() {
return {
messages: null
}
},
componentWillMount: function(){
var self = this;
socket.emit('getMessages');
socket.on('serverMessages', function (data) {
self.setState({messages: data})
});
},
render: function() {
var messages = this.state.messages ? <MessageList messages={this.state.messages}/> : null
return (
<div className="jumbotron">
{ messages }
</div>
);
}
});
Based on naming convention, it also appears that your Message.find() is pulling a single message. I would recommend clarifying the labeling to match cardinality.
Try this:
var Chat = React.createClass({
getInitialState: function() {
return {
messages: null
}
},
componentWillMount: function(){
var self = this;
socket.emit('getMessages');
socket.on('serverMessages', function (data) {
self.setState({messages: data})
});
},
render: function() {
var messages = this.state.messages ? <MessageList messages={this.state.messages}/> : null
return (
<div className="jumbotron">
{ messages }
<MessageForm submitMessage={this.submitMessage}/>
</div>
);
}
});
Could it be possible its due to the componentWillMount lifecycle method? Could you try the componentDidMount instead.
It looks like render will see the state update but only gets executed once despite the state change according to facebook.

Redis subscribe timeout

How can i set timer function for Redis sub, so when client connect and message is not received from pubblisher in 200ms socket emit an event?
This is what i have now:
io.on('connection', function(socket){
const sub = redis.createClient();
sub.subscribe("data");
sub.on("message", function(channel, message) {
io.emit("data", JSON.parse(message));
});
sub.on("error", function(err){
console.log("Error" + err);
});
socket.on("disconnect", function(){
sub.removeListerner('message', function(){
io.emit("disconnected");
})
});
});
I know its a late question but hopefully this answer can help someone else or if you still havent found one. You can use a timeout function that calls a handler function just in case the message isnt received (im guessing that's what your asking)
I'm using redis as well and I do a call to other databases to check if they have the user with the requested id. To make sure the user doesn't stay and wait I added a timeout function which I set to 500ms. so its a race condition and whichever comes first I unsubscribe the redis client to make sure its a one off as well.
return User.mongoose.findAsync({ _id: params.id })
.then ( results => {
if (sails.IS.empty(results)) {
const request_id = Date.now() + '#' + sails.HOST_NAME + '/user/' + params.id
sails.REDIS_SUB.subscribe(request_id)
sails.REDIS_SUB.on('message', (channel, message) => {
if (channel == request_id) {
const data = JSON.parse(message)
sails.dlogwarn(params.id + '\t<=\t(' + data.responder + ')')
sails.REDIS_SUB.unsubscribe(request_id)
return data.params.results
}
})
sails.REDIS_PUB.publish('/user/find', JSON.stringify({
request_id: request_id,
params: {
_id: params.id
}
}))
setTimeout( () => {
sails.REDIS_SUB.unsubscribe(request_id)
return results
}, Number(process.env.REDIS_MAX_QUERY_TIMEOUT))
} else return results
})

Resources