firing socket.emit() on http request - node.js

Right now I have the following code: foo
sIo.sockets.on('connection', function(socket){
socket.emit('hello', 'world');
});
I would like to be able to emit this when somebody opens a page from my routes, like this:
//app.js
app.get('/send', routes.index);
//routes.js
exports.index = function(req, res){
socket.emit('hello', 'world');
};
How can I achieve this? Thanks in advance

To send a socket message to all connected sockets, you can just call io.sockets.emit instead of socket.emit. There are a few ways to send messages using socket.io which I'll outline below.
// Send the message to all connected clients
io.sockets.emit('message', data);
// Send the message to only this client
socket.emit('message', data);
// Send the messages to all clients, except this one.
socket.broadcast.emit('message', data);
There is also a concept of rooms which you can use to segment your clients.
// Have a client join a room.
socket.join('room')
// Send a message to all users in a particular room
io.sockets.in('room').emit('message', data);
All of that covers how to send messages, but it's clear now you're asking about how to access the socket and / or io objects from inside a separate file. One options just has you pass those dependencies to the specified file. Your require line will end up looking something like this.
var routes = require('./routes')(io);
Where io is the socket.io object created from .listen. To handle that in your route file you'll have to change how you're defining your exports.
module.exports = function(io) {
return {
index: function(req, res) {
io.sockets.emit('hello', 'world');
res.send('hello world');
}
};
}
A cleaner implementation would have your routes expose events that your socket code can bind to. The following is untested, but should be very close.
var util = require("util"),
events = require("events");
function MyRoute() {
events.EventEmitter.call(this);
}
util.inherits(MyRoute, events.EventEmitter);
MyRoute.prototype.index = function(req, res) {
this.emit('index');
res.send('hello world');
}
module.exports = new MyRoute();
And then in your app.js file when you're binding express routes and socket.io.
app.get('/send', routes.index);
routes.on('index', function() {
io.sockets.emit('hello', 'world');
});
There are many other ways to accomplish this, but the best one depends on what you're trying to do. As I alluded to before, calling broadcasting to everyone is going to be far more simple than broadcasting to a particular user.

Related

Sending socket message when post request is received

I would like to emit a socket.io message to all connected clients when any client sends a post request. How can I keep the socket open so that my http request methods can access the connected socket.
I was able to get the following to work but if a client is not connected, the post method doesn't work.
io.on('connection', function(socket){
console.log('socket connected');
app.post('/api/guests', function(req, res){
socket.emit('newguest', {hello: 'world'});
});
})
I also tried saving the socket to a higher scope and even a global but that didn't seem to work either.
Thanks in advance!
You don't have to call the "socket emit" inside the on.("connection"...)
Try something like this:
app.post('/api/news', user.can('access private page'), function(req, res, next) {
io.sockets.emit("nuova:news", data);
});
In my case i pass the "io" variable from the "server.js" file (or app.js) like this:
require('./app/myRoute')(app, user, io);
And receive it in the controller like this:
module.exports = function(app, user, io) {
...
}
"io" is declared like this:
var wsServer = require('http').createServer(app);
var io = require('socket.io').listen(wsServer);
I hope these additional info can be useful to you...

Can't connect to socket after emit

I am working on project that involves real time temperatures and have a device sending temps via get that is routed through the server and emitted to the socket. Then I want the server to connect to the original socket and emit the data to a new one that is being read by my client.
Here is my app.js
var express = require('express'),
app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
server.listen(8080);
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
app.route('/:temp')
.get(function (req, res){
var temp = req.params.temp;
res.end(temp);
io.sockets.on('connection', function(socket){
socket.emit('send temp', temp);
});
});
io.sockets.on('connection', function(socket){
socket.on('send temp', function(data){
console.log('connected to send temp');//this never shows in the console
io.sockets.emit('new temp', data);
});
});
The route code in app.js works fine. When I hit localhost:3000/test and change the client to connect to 'send temp' (rather then 'new temp') 'test' is output.
Here is the relevant section of my client
var socket = io.connect();
var $temp = $('#temp');
socket.on('new temp', function(data){
$temp.html("Temp: " + "<br/>"+data);
});
I am running node version 4.1.2, socket 1.3.7 and express 4.10.8.
I am wondering why I cannot connect to the original socket a second time. Or that may not even be my problem. I have studied the many 'chat' tutorials and searched for others using trying to do what I want to without any success.
Ultimately what I am trying to have happen is have a client hit /:temp over and over with a real-time reading and then have other clients get that data in real-time.
This is all still a little new to me so any help is appreciated.
Your code example registers a message handler on the server for the 'send temp' message. The client registers a message handler for the 'new temp' message.
The two (client and server) are then sitting there in a stalemate waiting for someone to first send a message, but (per the code you've disclosed) nobody ever does.
I don't really understand what the intent is for your code, but I see several issues.
First off, you do not want to install a listener for the connection event inside this code:
app.route('/:temp')
.get(function (req, res){
var temp = req.params.temp;
res.end(temp);
io.sockets.on('connection', function(socket){
socket.emit('send temp', temp);
});
});
Why would you only start listening for a connection event when you get a particular route handler. And, why add yet another event handler every time that route is hit. This code is just completely wrong. I don't know what you thought you were trying to achieve with it, but it's not the way to do things.
Second off, this code is waiting for the client to send a 'send temp' message and when it gets one, it attempts to broadcast that to all clients. But, the part of your client you disclose never sends a 'send temp' message.
io.sockets.on('connection', function(socket){
socket.on('send temp', function(data){
console.log('connected to send temp');//this never shows in the console
io.sockets.emit('new temp', data);
});
});
Third off, please describe exactly what you're trying to accomplish in words so we can better know what code to recommend in order to do that.
EDIT
Now that you've described the actual problem here:
Ultimately what I am trying to have happen is have a client hit /:temp
over and over with a real-time reading and then have other clients get
that data in real-time.
It is a little easier to recommend a solution:
On the server:
var express = require('express'),
app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
server.listen(8080);
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
app.get('/:temp', function (req, res) {
var temp = req.params.temp;
res.end(temp);
// send this temperature to all connected clients
io.emit('new temp', temp);
});
On the client:
var socket = io.connect();
var $temp = $('#temp');
socket.on('new temp', function(data){
$temp.html("Temp: " + "<br/>"+data);
});

Node-amqp and socket.io strange behaviour

I m actually trying to implement the pub/sub pattern using node-amqp (https://github.com/postwait/node-amqp).
I have some problems to implement it.
What I need :
Publish message from a user
Broadcast it to others user
Sending the message to offline users that will consume it the next time they'll be connected
What I've actually :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
var sendMessage = function (queue, msg) {
connection.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('my-queue', msg);
});
connection.queue('my-queue', {autoDelete: false}, function (q) {
q.bind('#');
q.subscribe(function (message) {
socket.broadcast.emit('news',message);
});
});
});
});
})()
On the index.html page, I connect to the socket server
I have a button that send a message
I open two different browser on the index page, and my users are both connected
If I send a message to the server, it send it to the other users
If I send a second message to the server, it send the message to the user that sent the message.
It's switching, every pair message (because I have two users), the other users get the message, if it's an impair message, the current user sending the message receive the message.
What is this behaviour ?
Can you help me correcting my code to implement my needs a good way ?
NB : I use RabbitMQ with standard config on a windows 7 x64 computer
EDIT : I made a solution and every consumer can get the message with :
(function () {
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'http://127.0.0.1:5672/' });
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
server.listen(8888);
// Wait for connection to become established.
connection.on('ready', function () {
connection.exchange('logs', {type: 'fanout', autoDelete: false}, function (exchange) {
var sendMessage = function (queue, msg) {
exchange.publish(queue, JSON.stringify(msg));
}
io.sockets.on('connection', function (socket) {
socket.on('message', function (msg) {
sendMessage('', msg);
});
connection.queue(socket.id, {exclusive: true}, function (q) {
q.bind('logs', '');
q.subscribe(function (message) {
socket.emit('news', message);
});
});
});
});
});
})()
My last problem is that I cant manage offline messages now... Any solutions ? (bounty end tomorrow :-/)
The problem is RabbitMQ will send each message to a single user on purpose. That user acknowledges that it received the message (amqp does this for you automatically) and then the work is done as far as RabbitMQ is concerned, so it deletes the message.
The reason your users take turns receiving a message is that RabbitMQ tries to spread to load of incoming messages evenly over users.
Your question has been answered before here. Check it out for a solution to your problem!
You need to create separate queues for each of the consumers (in your case - users), and route messages to all of them from exchange. That way, when you publish a message, it will be placed in all users' queues, from which each of them will be able to consume it independently.
You have this:
http://www.rabbitmq.com/tutorials/tutorial-two-python.html
And you need this:
http://www.rabbitmq.com/tutorials/tutorial-three-python.html

How to emit event in socket.io based on client?

I am working on realtime data visualization application using node.js, express and socket.io.
Requirement:
Have to emit the events based on the client request.
For example: If user enter the url as http://localhost:8080/pages socket.io should emit the topic pages to client and another user request for http://localhost:8080/locations socket should emit location to that particular user.
Code
var server = app.listen("8080");
var socket = require('socket.io');
var io = socket.listen(server);
var config = {};
io.on('connection', function (socket) {
config.socket = io.sockets.socket(socket.id);
socket.on('disconnect', function () {
console.log('socket.io is disconnected');
});
});
app.get('/*', function(req, res) {
var url = req.url;
var eventName = url.substring('/'.length);
//pages and locations
config.socket.volatile.emit(eventName, result);
});
Client Code:
//No problem in client code.Its working correctly.
Sample code as follows
socket.on('pages', function (result) {
console.log(result);
});
Problem:
It is emitting pages and locations to both the clients.
Any suggestion to overcome this problem.
I couldn't understand your approach on this, but because you said you're rendering different pages, It means you can serve different code, so what about doing it like this:
Server Side:
var server = app.listen("8080");
var socket = require('socket.io');
var io = socket.listen(server);
var config = {};
app.get('/pages', function(req, res) {
res.render('pages.html');
});
app.get('/locations', function(req, res) {
res.render('locations.html');
});
io.on('connection', function (socket) {
socket.on('pagesEvent', function(data){
socket.volatile.emit('pages', {your: 'data'});
});
socket.on('locationsEvent', function(data){
socket.volatile.emit('locations', {your: 'data'});
});
});
On Client side:
pages.html:
socket.on('connect', function(){
socket.emit('pagesEvent', {});
});
socket.on('pages', function(data){
// do stuff here
});
locations.html:
socket.on('connect', function(){
socket.emit('locationsEvent', {});
});
socket.on('locations', function(data){
// do stuff here
});
You are doing it wrong, WebSockets supposed to work same in both directions. Client emit event to Server, server emit back to Client/Subscribers.
The way you are doing things, seems like a way of implementing API, but for some reason you are trying to implement it with WebSockets, instead of XHR.

Working with Routes in express js and socket.io and maybe node in general

I am trying to write a multi channel application in socket.io. The channel you are in should be defined by the url you are on. If I do the joining part in the app.js with permanent values everything works. As soon as I change it so that the route for route.page does the joining I get the error, that sockets is not available in the context. What would be the correct way so that I can dynamically join the channel?
/app.js
var io = socketio.listen(app);
require('./io')(io);
io.sockets.on('connection', function (socket) {
socket.on('debug', function (message) {
socket.get('channel', function (err, name) {
socket.in(name).broadcast.emit('debug', message);
});
});
});
/io.js
var socketio = function (io) {
if (!io) return socketio._io;
socketio._io = io;
}
module.exports = socketio;
/routes/index.js
var io = require('../io')();
exports.page = function(req, res){
var channel = req.params.id;
res.render('page', { title: 'PAGE', channel: channel });
io.sockets.on('connection', function (socket) {
socket.join(channel);
socket.set('channel', channel );
});
};
The easiest way I've found to do multiple channels is off of different URLs.
For example I have the client do the following:
io.connect('/game/1')
io.connect('/system')
and on the server I have
io.of('/game/1').on('connect' function(socket) {...})
io.of('/system').on('connect' function(socket) {...})
It looks like I'm connecting twice here, but socket.io is smart enough to use a single websocket for this connection (at least it says so in the how-to-use).

Resources