Socket.io Redis Server Broadcast - node.js

I'm missing something really basic here. I have two very standard express processes with socket io. I'm trying to get a server broadcast on one process to broadcast the same message to another servers clients. I have following setup:
var server = require('http').createServer(app);
var socketio = require('socket.io')(server, {
serveClient: (config.env !== 'production'),
path: '/socket.io-client'
});
require('./config/express')(app);
require('./routes')(app);
server.listen(config.port, config.ip, function () {
logger.info('Express server listening on %d, in %s mode', config.port, app.get('env'));
});
var redis = require('socket.io-redis');
var adapter = redis({ host: config.redis.uri, port: config.redis.port });
adapter.pubClient.on('error', function(){
logger.error('Error on pub client');
});
adapter.subClient.on('error', function(){
logger.error('Error on sub client');
});
socketio.adapter(adapter);
require('./config/socketio')(socketio);
Then my socket io file is:
module.exports = function (socketio) {
//listen to other server broadcasts
socketio.on('socket.io#/#', function(data) {
logger.debug('received message from other process');
});
//client connection code
socketio.on('connection', function (socket) {
socket.address = socket.handshake.address !== null ?
socket.handshake.address + ':' + socket.handshake.address.port :
process.env.DOMAIN;
socket.connectedAt = new Date();
// Call onDisconnect.
socket.on('disconnect', function () {
onDisconnect(socket);
logger.debug('[%s] DISCONNECTED', socket.address);
});
// Call onConnect.
onConnect(socketio, socket);
logger.debug('[%s] CONNECTED', socket.address);
});
};
From a different server I'm just calling a standard emit
socketio.emit('message', object);
In terms of debugging, I load up the redis cli and check the channels that are created:
1) "socket.io#/#"
2) "socket.io-sync#request#/#"
I even run SUBSCRIBE "socket.io#/#" in the cli and can see the message I'm trying to get on the other server displayed via the cli.
This seems so basic, how do I properly subscribe to events on the redis channel 'socket.io#/#'?
EDIT: So I figured out that I receive messages on my other process if I do
socketio.sockets.adapter.subClient.on('message', function (channel, message) {
logger.warn('GOT STUFF ');
});
However here I'm just going straight to the redis sub client. The channel and message are just buffers of non usable data. Isn't this suppose to provide a higher level of abstraction?
EDIT 2: I've got it "mostly" working now. Doing this:
socketio.sockets.adapter.subClient.on('message', function (channel, message) {
logger.warn('GOT STUFF ');
socketio.sockets.in(channel).emit(message);
});
Turns on the socket interface will take the channel and message buffers directly. Now to my next issue, instead of doing socketio.emit(message, doc); I want to do this socketio.sockets.in(doc._id).emit('message;, doc);. Basically i only want it to end in clients that are a part of that room. Doesn't quite seem to be working as is...

Related

Socket.IO duplicated message

I'm trying to build a chat application using Laravel, with node js server, socketio and redis. What I have is this:
Client JS:
var socket = io('http://localhost:3005');
var room = '17';
$("#send").click(function(){
content = $("textarea").val();
id =$("#id").val();
$.ajax({
url: "{{route('send.message')}}",
method: "POST",
data: {content, id, room},
success: function(){
}
});
});
socket.on('cacad', function(message){
console.log(message); //multiple copies here
});
socket.on('connect', function(){
console.log("Connected!");
socket.emit('room', room);
});
Laravel Controller:
public function sendMessage(Request $request){
event(new EventName($request->all()));
$message = new Message;
$message->message = $request->content;
$redis = LRedis::connection();
$redis->publish('chat-channel', json_encode($request->all()));
$message->save();
}
Node Server:
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var redis = require('ioredis');
var myMessage;
var redis_client = redis.createClient();
redis_client.subscribe('chat-channel');
io.on('connection', function(socket) {
redis_client.on('message', function(channel, message) {
var myData = JSON.parse(message);
socket.broadcast.to('17').emit('cacad', 'u i u a a');
});
socket.on('room', function(room){
socket.join(room);
});
socket.on('disconnect', function(){
console.log("disconnected!");
});
});
http.listen(3005, function() {
console.log('Listening on Port 3005');
});
I am trying to broadcast a message u i u a a in room 17. But when I receive it in the console, it shows multiple copies of it, 2x-4x. In the Laravel controller I publish a message using redis and I subscribe to it in node server. It is received successfully, but the problem lies with the multiple copies of the message (client side).
Please where is it wrong? Thank you :)
I'm pretty convinced I found the issue although I had to search a bit about those simple API usage because I'm not using the library lately.
Looking at the docs it's explain your issue pretty clearly.
Here you are listening to a new connection,
io.on('connection', function(socket) {
If the client asked to join to a specific room, you join him:
socket.on('room', function(room){
socket.join(room);
So far it's like the docs:
io.on('connection', function(socket){
socket.join('some room');
});
But your issue is with your emit, on each client connection, you listen to a message from your redis. Then you broadcast it to the room with an emit of the connected client.
Instead of that, you can do this:
io.on('connection', function(socket) {
socket.on('room', function(room){
socket.join(room);
});
socket.on('disconnect', function(){
console.log("disconnected!");
});
});
redis_client.on('message', function(channel, message) {
var myData = JSON.parse(message);
io.to('17').emit('cacad', 'u i u a a');
});
I think this happens on the socket-io-client side. not on the server-side. when I was using react-js for the client-side. I received the same message multiple times.
with the same server, I imported socket-io-client 4.4.1 in the vanilla js front-end project. then I didn't get multiple messages... :)
try use latest socket io client versions. i think they have fixed the issue in the latest versions..

Node-amqp and socket.io strange behaviour

I m actually trying to implement the pub/sub pattern using node-amqp (https://github.com/postwait/node-amqp).
I have some problems to implement it.
What I need :
Publish message from a user
Broadcast it to others user
Sending the message to offline users that will consume it the next time they'll be connected
What I've actually :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
var sendMessage = function (queue, msg) {
connection.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('my-queue', msg);
});
connection.queue('my-queue', {autoDelete: false}, function (q) {
q.bind('#');
q.subscribe(function (message) {
socket.broadcast.emit('news',message);
});
});
});
});
})()
On the index.html page, I connect to the socket server
I have a button that send a message
I open two different browser on the index page, and my users are both connected
If I send a message to the server, it send it to the other users
If I send a second message to the server, it send the message to the user that sent the message.
It's switching, every pair message (because I have two users), the other users get the message, if it's an impair message, the current user sending the message receive the message.
What is this behaviour ?
Can you help me correcting my code to implement my needs a good way ?
NB : I use RabbitMQ with standard config on a windows 7 x64 computer
EDIT : I made a solution and every consumer can get the message with :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
connection.exchange('logs', {type: 'fanout', autoDelete: false}, function (exchange) {
var sendMessage = function (queue, msg) {
exchange.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('', msg);
});
connection.queue(socket.id, {exclusive: true}, function (q) {
q.bind('logs', '');
q.subscribe(function (message) {
socket.emit('news', message);
});
});
});
});
});
})()
My last problem is that I cant manage offline messages now... Any solutions ? (bounty end tomorrow :-/)
The problem is RabbitMQ will send each message to a single user on purpose. That user acknowledges that it received the message (amqp does this for you automatically) and then the work is done as far as RabbitMQ is concerned, so it deletes the message.
The reason your users take turns receiving a message is that RabbitMQ tries to spread to load of incoming messages evenly over users.
Your question has been answered before here. Check it out for a solution to your problem!
You need to create separate queues for each of the consumers (in your case - users), and route messages to all of them from exchange. That way, when you publish a message, it will be placed in all users' queues, from which each of them will be able to consume it independently.
You have this:
http://www.rabbitmq.com/tutorials/tutorial-two-python.html
And you need this:
http://www.rabbitmq.com/tutorials/tutorial-three-python.html

node.js server socket.io-client with SSL

I try to setup two node.js servers communication with each other over socket.io. The node servers use SSL, but I don't get it running. I do not get any feedback, its close to this:
Node.js socket.io-client connect_failed / connect_error event
This will not work. No response.
var clientio = require('socket.io-client');
console.log('Trying stuff ...');
// the channel does not exist
var socket = clientio.connect( 'http://localhost:4000/news' );
// I expect this event to be triggered
socket.on('connect_failed', function(){
console.log('Connection Failed');
});
socket.on('connect', function(){
console.log('Connected');
});
socket.on('disconnect', function () {
console.log('Disconnected');
});
but if I try:
// Bind to the news namespace, also get the underlying socket
var ns_news = clientio.connect( 'https://localhost:9000' );
var socket = ns_news.socket
// Global events are bound against socket
socket.on('connect_failed', function(){
console.log('Connection Failed');
});
socket.on('connect', function(){
console.log('Connected');
});
socket.on('disconnect', function () {
console.log('Disconnected');
});
// Your events are bound against your namespace(s)
ns_news.on('myevent', function() {
// Custom event code here
});
I can see that ns_news has no element socket, so I get:
TypeError: Cannot call method 'on' of undefined
So how do I connect these two servers with feedback if the connection is successful or not?
And my following question would be:
How can these two servers authenticate to each other?
Means: Server A says to server B:
- hey, gimme that secret string
And Server B checks the certificate of server A and if it's ok
- here's the string
How do I do it with node?

How do I receive push notifications with socket.IO-client?

My goal is to get push notifications from the web site (it provides socketIO interface). In order to do that I use socket.io-client library. I took the code from some socket.io examples. Here it is:
var io = require('socket.io-client');
socket = io.connect('https://cryptonit.net', {
port: 8080
});
socket.on('connect', function() {
console.log('connected');
});
socket.on('message', function(msg){
console.log(msg);
});
socket.on('disconnect', function() {
console.log('disconnected');
});
socket.on('error', function (e) {
console.log('System', e ? e : 'A unknown error occurred');
});
But I don't get any messages, neither I see that connection was established. What can be the possible source of the problem? How can I test this code?
Actually all examples I've seen were supposed to connect to the localhost, not to the real website.
I needed to change
socket = io.connect('https://cryptonit.net', {
port: 8080
});
to
socket = io.connect('https://cryptonit.net:8080');
to get it working.
Have no idea why the first one is wrong.

Node JS Server to Server Connection

Is it possible to connect to a NodeJS Server from another server? Two NodeJS servers communicating with each other?
//Server Code
var io = require('socket.io').listen(8090);
io.sockets.on('connection', function (socket) {
io.sockets.emit('this', { will: 'be received by everyone'});
socket.on('private message', function (from, msg) {
console.log('I received a private message by ', from, ' saying ', msg);
});
socket.on('disconnect', function () {
io.sockets.emit('user disconnected');
});
});
//Client Code in Server Code. Connecting to another server.
io.connect( "http://192.168.0.104:8091" ); //Connect to another server from this one.
//ETC...
Here's a simple example that creates a server and a client that connects to that server. Remember that what you send has to be a buffer (strings are automatically converted to buffers). The client and server works independently of eachother, so can be put in the same app or on totally different computers.
Server (server.js):
const net = require("net");
// Create a simple server
var server = net.createServer(function (conn) {
console.log("Server: Client connected");
// If connection is closed
conn.on("end", function() {
console.log('Server: Client disconnected');
// Close the server
server.close();
// End the process
process.exit(0);
});
// Handle data from client
conn.on("data", function(data) {
data = JSON.parse(data);
console.log("Response from client: %s", data.response);
});
// Let's response with a hello message
conn.write(
JSON.stringify(
{ response: "Hey there client!" }
)
);
});
// Listen for connections
server.listen(61337, "localhost", function () {
console.log("Server: Listening");
});
Client (client.js):
const net = require("net");
// Create a socket (client) that connects to the server
var socket = new net.Socket();
socket.connect(61337, "localhost", function () {
console.log("Client: Connected to server");
});
// Let's handle the data we get from the server
socket.on("data", function (data) {
data = JSON.parse(data);
console.log("Response from server: %s", data.response);
// Respond back
socket.write(JSON.stringify({ response: "Hey there server!" }));
// Close the connection
socket.end();
});
The conn and socket objects both implement the Stream interface.
Check Substrack's dnode. It auto maps literal objects from the 1st env to the 2nd one. You gain a kind of RPC out of the box. And it works in the browser too...

Resources