Node-amqp and socket.io strange behaviour - node.js

I m actually trying to implement the pub/sub pattern using node-amqp (https://github.com/postwait/node-amqp).
I have some problems to implement it.
What I need :
Publish message from a user
Broadcast it to others user
Sending the message to offline users that will consume it the next time they'll be connected
What I've actually :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
var sendMessage = function (queue, msg) {
connection.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('my-queue', msg);
});
connection.queue('my-queue', {autoDelete: false}, function (q) {
q.bind('#');
q.subscribe(function (message) {
socket.broadcast.emit('news',message);
});
});
});
});
})()
On the index.html page, I connect to the socket server
I have a button that send a message
I open two different browser on the index page, and my users are both connected
If I send a message to the server, it send it to the other users
If I send a second message to the server, it send the message to the user that sent the message.
It's switching, every pair message (because I have two users), the other users get the message, if it's an impair message, the current user sending the message receive the message.
What is this behaviour ?
Can you help me correcting my code to implement my needs a good way ?
NB : I use RabbitMQ with standard config on a windows 7 x64 computer
EDIT : I made a solution and every consumer can get the message with :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
connection.exchange('logs', {type: 'fanout', autoDelete: false}, function (exchange) {
var sendMessage = function (queue, msg) {
exchange.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('', msg);
});
connection.queue(socket.id, {exclusive: true}, function (q) {
q.bind('logs', '');
q.subscribe(function (message) {
socket.emit('news', message);
});
});
});
});
});
})()
My last problem is that I cant manage offline messages now... Any solutions ? (bounty end tomorrow :-/)

The problem is RabbitMQ will send each message to a single user on purpose. That user acknowledges that it received the message (amqp does this for you automatically) and then the work is done as far as RabbitMQ is concerned, so it deletes the message.
The reason your users take turns receiving a message is that RabbitMQ tries to spread to load of incoming messages evenly over users.
Your question has been answered before here. Check it out for a solution to your problem!

You need to create separate queues for each of the consumers (in your case - users), and route messages to all of them from exchange. That way, when you publish a message, it will be placed in all users' queues, from which each of them will be able to consume it independently.
You have this:
http://www.rabbitmq.com/tutorials/tutorial-two-python.html
And you need this:
http://www.rabbitmq.com/tutorials/tutorial-three-python.html

Related

How to create multiple Nodejs socket io server-client?

I am new to Nodejs and Socket.io, and this is first time when I am creating any chat application, so pardon me if I am asking some silly question.
In my web I have to kind of chat services, one is Live debate which is kind of chat room, and another one is private messaging like Facebook Messenger.
I have created both, in private messenger before showing the message I am checking the conversation Id, it's working quite appropriately. Live debate is also working appropriately.
But there is a issue, any message sent in private messenger displays in live debate window also. So I change the Server.js file for messenger and also changed the listening port, now the listening port for live debate is 3000 and for messenger is 8050, but still Live debate receiving the messenger messages.
Am I doing this in wrong way? Is there any other way to run two chat applications ?
I am using this server code
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var redis = require('redis');
server.listen(3000);
io.on('connection', function (socket) {
console.log("client connected");
var redisClient = redis.createClient();
redisClient.subscribe('message');
redisClient.on("message", function(channel, data) {
console.log("mew message add in queue "+ data+ " channel");
socket.emit(channel, data);
});
socket.on('disconnect', function() {
redisClient.quit();
});
});
With using Namespace
server.js
var nsp = io.of('/debate')
nsp.on('connection', function (socket) {
console.log("client connected"+socket.id);
var redisClient = redis.createClient();
redisClient.subscribe('message');
var redisClient1 = redis.createClient();
redisClient1.subscribe('debate');
redisClient.on("message", function(channel, data) {
console.log("mew message add in queue "+ data+ " channel");
nsp.emit(channel, data);
});
socket.on('disconnect', function() {
redisClient.quit();
});
});
client code
var socket = io.connect('http://localhost:3000/debate');
socket.on('message', function (data) {
data = jQuery.parseJSON(data);
console.log(data.user);
$( "#messages" ).append( "<strong>"+data.user+":</strong><p>"+data.message+"</p>" );
$('#messages').animate({
scrollTop: $('#messages').get(0).scrollHeight}, 200);
});
socket.io supports the use of different namespaces. You should use that feature instead of creating two individual servers. After that you can use socket.emit to that specific namespace. For more information see the documentation: https://socket.io/docs/rooms-and-namespaces/
It's not too difficult, I wrote a chat app, as I think everyone does when they start nodejs, but nodeJS has rooms which are quite easy to use.
io.on('connection', function(client){
client.on('room_connection', function(id){
client.join(id); // User joins room.
})
client.on('message', function(data){
io.to(data.room).emit('message', {message:data.message, client:client.conn.id});
});
})
This is pretty much all you need. This works for PM's since u simply won't allow multiple users to join this chatroom, and normal chatrooms which multiple users can join.

Having problems sending private message in a socket.io room using redis

I'm trying to build a secure private facebook like messaging system using laravel, redis pub/sub and socket io. All the console.log functions log the messages to say everything went through on the node server and everything works fine untill I try and emit the message to the client. When I try to emit the message to the client nothing at all happens.
Although I don't think my controller method on laravel is of significance here it is anyway. You can skip the Laravel controller if it's not necessary
Laravel Message Controller:
public function store($id, Request $request)
{
//Stores message in $message
//Connects redis
$redis = Redis::connection();
$data = ['message' => $request->input('message'), 'user' => Auth::user()->name,
'room' => $message->conversation_id];
$redis->publish('message', json_encode($data));
return Redis::get('message');
response()->json([]);
}
Redis stuff being published in JSON output:
{message: 'some message', user: 'john doe', room: 1}
Node Server
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var redis = require('redis');
server.listen(3000);
io.on('connection', function (socket) {
console.log("client connected");
var redisClient = redis.createClient();
redisClient.subscribe('message');
redisClient.on("message", function(channel, data) {
data = JSON.parse(data);
socket.join(data.room, function() {
console.log('Joined room '+data.room);
});
console.log("new message add in queue "+ data.room + " channel");
socket.to(data.room).emit(channel, data);
});
socket.on('disconnect', function() {
redisClient.quit();
});
});
Client Side JS
var socket = io.connect('http://localhost:3000');
socket.on('message', function (data) {
//Just console the message and user right now.
console.log(data.message+" " + data.user);
});
I don't know if my problem is that I'm not specifying the room on the client. If so is there any way of emitting the data to the client without specifying the room in the client side code? Why can't I emit the message to the client?

Socket.io Redis Server Broadcast

I'm missing something really basic here. I have two very standard express processes with socket io. I'm trying to get a server broadcast on one process to broadcast the same message to another servers clients. I have following setup:
var server = require('http').createServer(app);
var socketio = require('socket.io')(server, {
serveClient: (config.env !== 'production'),
path: '/socket.io-client'
});
require('./config/express')(app);
require('./routes')(app);
server.listen(config.port, config.ip, function () {
logger.info('Express server listening on %d, in %s mode', config.port, app.get('env'));
});
var redis = require('socket.io-redis');
var adapter = redis({ host: config.redis.uri, port: config.redis.port });
adapter.pubClient.on('error', function(){
logger.error('Error on pub client');
});
adapter.subClient.on('error', function(){
logger.error('Error on sub client');
});
socketio.adapter(adapter);
require('./config/socketio')(socketio);
Then my socket io file is:
module.exports = function (socketio) {
//listen to other server broadcasts
socketio.on('socket.io#/#', function(data) {
logger.debug('received message from other process');
});
//client connection code
socketio.on('connection', function (socket) {
socket.address = socket.handshake.address !== null ?
socket.handshake.address + ':' + socket.handshake.address.port :
process.env.DOMAIN;
socket.connectedAt = new Date();
// Call onDisconnect.
socket.on('disconnect', function () {
onDisconnect(socket);
logger.debug('[%s] DISCONNECTED', socket.address);
});
// Call onConnect.
onConnect(socketio, socket);
logger.debug('[%s] CONNECTED', socket.address);
});
};
From a different server I'm just calling a standard emit
socketio.emit('message', object);
In terms of debugging, I load up the redis cli and check the channels that are created:
1) "socket.io#/#"
2) "socket.io-sync#request#/#"
I even run SUBSCRIBE "socket.io#/#" in the cli and can see the message I'm trying to get on the other server displayed via the cli.
This seems so basic, how do I properly subscribe to events on the redis channel 'socket.io#/#'?
EDIT: So I figured out that I receive messages on my other process if I do
socketio.sockets.adapter.subClient.on('message', function (channel, message) {
logger.warn('GOT STUFF ');
});
However here I'm just going straight to the redis sub client. The channel and message are just buffers of non usable data. Isn't this suppose to provide a higher level of abstraction?
EDIT 2: I've got it "mostly" working now. Doing this:
socketio.sockets.adapter.subClient.on('message', function (channel, message) {
logger.warn('GOT STUFF ');
socketio.sockets.in(channel).emit(message);
});
Turns on the socket interface will take the channel and message buffers directly. Now to my next issue, instead of doing socketio.emit(message, doc); I want to do this socketio.sockets.in(doc._id).emit('message;, doc);. Basically i only want it to end in clients that are a part of that room. Doesn't quite seem to be working as is...

How to update the website && the mobile from another process in node js?

I have some process:
var fetcher = new Fetcher({ url: 'http://www.example.com' });
setInterval(function() {
fetcher.getImageUrls().then(saveToDb, handleError, notifyProgress);
}, 10000);
I also have a website, I want to use socket.io for pushing updates
about the progress (using the notifyProgress method). I don't know
how to sync the website with the process.
note that i can't use events because the process can live anywhere (i can also duplicate the process with more instances)
How can i do that?
Thanks
You want to use Redis to create a PUB/SUB system.
You will publish to the channel your updates;
// notifyProgress.js
var Redis = require('redis'),
redis = Redis.createClient(6379, 127.0.0.1);
module.exports.publish = function (req, res, next) {
//publish to the mySpecialChannel channel.
redis.publish('mySpecialChannel', data);
};
your different servers/clusters will subscribe to the channel.
//socket server
var Redis = require('redis'),
redis = Redis.createClient(6379, 127.0.0.1),
app = express(),
server = require('http').createServer(app),
io = require('socket.io')(server);
redis.on('subscribe', function (channel, count) {
console.log('server subscribed to %s', channel);
});
redis.on('message', function (channel, message) {
if (channel === 'mySpecialChannel') {
//send info to your clients.
io.emit('update', message)
}
});
redis.subscribe('mySpecialChannel');

Express + Socket.IO + RabbitMQ (node-amqp)

I'm having a hard time putting all these three together, probably because I'm not getting properly the concept of routing with Express.
I have a RabbitMQ queue with event updates. We can recognize these events by their id. So I want to get on a given page about an event, just the updates corresponding to its id.
Queue: 1316, 1539, 3486, 3479, 1316, 3890, 3479, ... -> Feed from the DB indefinitely.
www.example.com/event/1316 -> Gets from the queue just messages with id 1316
www.example.com/event/3479 -> Gets from the queue just messages with id 3479
My code works good when I load the first event, but when I load the second one in a different window, it gets messages from both events, and if I load a third one, guess right, it gets messages from the three ids.
app.js
var express = require('express')
, http = require('http');
var app = express();
var server = http.createServer(app);
var io = require('socket.io').listen(server, { log: false });
require('./io')(io);
var amqp = require('amqp');
var rabbitMQ = amqp.createConnection({ host: 'localhost' });
rabbitMQ.on('ready', function() {
console.log('Connected to RabbitMQ');
io.sockets.on('connection', function (socket) {
console.log('Socket connected: ' + socket.id);
rabbitMQ.queue('offer', { autoDelete: false, durable: false, exclusive: false }, function(q) {
q.bind('#'); // Catch all messages
q.subscribe(function (message) {
obj = JSON.parse(message.data.toString());
//socket.broadcast.to(obj.id).emit('message', obj);
io.sockets.in(obj.id).emit('message', obj);
});
});
});
});
var routes = require('./routes')
, event = require('./routes/event');
app.get('/', routes.index);
app.get('/event/:id', event.index);
server.listen(app.get('port'), function(){
console.log("Express server listening on port " + app.get('port'));
});
io.js
var socketio = function (io) {
if (!io) return socketio._io;
socketio._io = io;
}
module.exports = socketio;
routes/event.js
var io = require('../io')();
exports.index = function(req, res) {
io.sockets.on('connection', function (socket) {
socket.join(req.params.id);
});
res.render('event', { title: 'Event' });
};
Thanks!
You are receiving them all because you join but never leave the room. If you look at Socket IO Rooms from the wiki, at the bottom, it provides io.sockets.manager.roomClients[socket.id] as a way to get a list of rooms that the socket has joined (which I suspect will include all three if you've visited all three links).
You might want to try going through this list of rooms and leave any that aren't the current room, and see if that solves the problem.
Edit
Ok, so, there are two reasons/solutions to this. I just tested my theory, and it is valid - you receive messages for each room you've joined, and will continue to do so until you leave them. So here are the options:
1. leave all other rooms when they join a room
io.sockets.on('connection', function (socket) {
var room = req.params.id;
var roomKeys = Object.keys(io.sockets.manager.roomClients[socket.id]);
roomKeys.forEach(function(key) {
if (key === '' || key === '/' + room) return;
socket.leave(key.slice(1));
});
socket.join(room);
});
As said, I tested this. It works.
2. Don't send a message event, send a {room name} event
Instead of emitting a 'message' event, you could emit a '{room name}' event. Instead of your q.subscribe() callback containing io.sockets.in(obj.id).emit('message', obj);, you would just do socket.emit(obj.id, obj); and you'd have the javascript client only listen for that page's event types (based on the URL path).
I also tested this. It also works. It's also simpler (I think) because it only requires the .emit() in your q.subscribe() callback, which means you save the 'room management' stuff.
After trying and failing, I have understood what I was doing wrong cause using io.sockets.on('connection') inside of the router was duplicating the event. So at the end, the simplest way of thinking is the right one.
app.js
var room = '';
var roomHandler = function(req, res, next) {
if (req.path.match('event')) {
room = req.params.id;
}
next(); // Passing the request to the next handler in the stack.
}
io.sockets.on('connection', function (socket) {
socket.join(room);
});
rabbitMQ.on('ready', function() {
rabbitMQ.queue('offer', { autoDelete: false, durable: false, exclusive: false }, function(q) {
q.bind('#'); // Catch all messages
q.subscribe(function (message) {
obj = JSON.parse(message.data.toString());
io.sockets.in(obj.id).emit('message', obj);
});
});
});
app.get('/', routes.index);
app.get('/event/:id', roomHandler, event.index);

Resources