How can i set timer function for Redis sub, so when client connect and message is not received from pubblisher in 200ms socket emit an event?
This is what i have now:
io.on('connection', function(socket){
const sub = redis.createClient();
sub.subscribe("data");
sub.on("message", function(channel, message) {
io.emit("data", JSON.parse(message));
});
sub.on("error", function(err){
console.log("Error" + err);
});
socket.on("disconnect", function(){
sub.removeListerner('message', function(){
io.emit("disconnected");
})
});
});
I know its a late question but hopefully this answer can help someone else or if you still havent found one. You can use a timeout function that calls a handler function just in case the message isnt received (im guessing that's what your asking)
I'm using redis as well and I do a call to other databases to check if they have the user with the requested id. To make sure the user doesn't stay and wait I added a timeout function which I set to 500ms. so its a race condition and whichever comes first I unsubscribe the redis client to make sure its a one off as well.
return User.mongoose.findAsync({ _id: params.id })
.then ( results => {
if (sails.IS.empty(results)) {
const request_id = Date.now() + '#' + sails.HOST_NAME + '/user/' + params.id
sails.REDIS_SUB.subscribe(request_id)
sails.REDIS_SUB.on('message', (channel, message) => {
if (channel == request_id) {
const data = JSON.parse(message)
sails.dlogwarn(params.id + '\t<=\t(' + data.responder + ')')
sails.REDIS_SUB.unsubscribe(request_id)
return data.params.results
}
})
sails.REDIS_PUB.publish('/user/find', JSON.stringify({
request_id: request_id,
params: {
_id: params.id
}
}))
setTimeout( () => {
sails.REDIS_SUB.unsubscribe(request_id)
return results
}, Number(process.env.REDIS_MAX_QUERY_TIMEOUT))
} else return results
})
Related
I'm brand new to socket.io and am trying to create an app similar to slido - users can send in messages and then view all messages being submitted in real time. I'm using node.js, express, socket.io, and redis in the back end. React and socket.io-client in front end.
At the moment, the live messages page/feed only updates (in real time) the first time a message is sent in, after that the emit even appears to stop working and the list of messages will only update when you refresh the page and it pulls the message history from redis.
Does anyone know why this may be happening? I've checked that the versions of socket.io for server and client are the same.
Thank you!
server-side socket setup:
io.on("connect", (socket) => {
initialiseUser(socket);
socket.on("dm", (message) => {
dm(socket, message, io);
});
io.emit("hello", "hello world");
socket.on("disconnecting", () => onDisconnect(socket));
});
// dm logic sits in separate file
module.exports.dm = async (socket, message, io) => {
message.from = socket.user.userid;
const messageString = [message.from, message.content].join(".");
await redisClient.lpush(`prayers:messages`, messageString);
io.emit("dm", message);
};
client-side setup:
const useSocketSetup = (setMessages, messages) => {
const { setUser } = useContext(AccountContext);
useEffect(() => {
socket.connect();
socket.on("hello", (content) => {
console.log("hello world", content);
});
socket.on("messages", (redisMessages) => {
setMessages(redisMessages);
});
socket.on("dm", (message) => {
setMessages((prevMessages) => [message, ...prevMessages]);
console.log("NEW MESSAGE", message);
});
socket.on("connect_error", () => {
console.log("Socket cannot connect");
setUser({ loggedIn: false });
});
return () => {
socket.off("connect_error");
socket.off("messages");
socket.off("dm");
};
}, [setUser, setMessages, messages]);
};
export default useSocketSetup;
The console log sitting inside socket.on("dm".... is only being logged on the first dm event.
This is the form setup for submitting a message:
const { setMessages } = useContext(MessagesContext);
useSocketSetup(setMessages);
return (
<>
<Formik
initialValues={{ message: "" }}
validationSchema={Yup.object({ message: Yup.string().min(1).max(255) })}
onSubmit={(values, actions) => {
const message = { from: null, content: values.message };
socket.emit("dm", message);
setMessages((prevMessages) => [message, ...prevMessages]);
console.log(JSON.stringify(message));
actions.resetForm();
navigate("/prayers");
}}
>
There is then a component accessing the messages from context and mapping through them to display.
I am trying my hand at node.js/typescript for the first time and having a bit of trouble making a consumer for a rabbit queue.
Code:
let amqp = require('amqp');
let connection = amqp.createConnection({url: "amqp://" + RABBITMQ_USER + ":" + RABBITMQ_PASSWORD + "#" + RABBITMQ_HOST + ":" + RABBITMQ_PORT + RABBITMQ_VHOST});
connection.on('ready', function() {
connection.exchange(RABBITMQ_WORKER_EXCHANGE, function (exchange) {
connection.queue(RABBITMQ_QUEUE, function (queue) {
queue.bind(exchange, function() {
queue.publish(function (message) {
console.log('subscribed to queue');
let encoded_payload = unescape(message.data);
let payload = JSON.parse(encoded_payload);
console.log('Received a message:');
console.log(payload);
})
})
})
})
})
It seems to connect to the amqp server and throws no errors but it just sits there and doesn't consume anything. Is there a step I am missing?
Any help would be greatly appreciated,
Thank you.
Here is my solution that is working based off of amqp's JS tutorial.
https://www.rabbitmq.com/tutorials/tutorial-three-javascript.html
Probably not up to TypeScript standards, feel free to correct me if there's a better way.
#!/usr/bin/env node
require('dotenv').config();
import amqp = require('amqplib/callback_api');
import db = require('./database');
amqp.connect({
protocol: process.env.RABBITMQ_PROTOCOL,
hostname: process.env.RABBITMQ_HOST,
port: process.env.RABBITMQ_PORT,
username: process.env.RABBITMQ_USER,
password: process.env.RABBITMQ_PASSWORD,
vhost: process.env.RABBITMQ_VHOST
}, function(err, conn) {
conn.createChannel(function (err, ch) {
// set exchange that is being used
ch.assertExchange(process.env.RABBITMQ_WORKER_EXCHANGE, 'direct', {durable: true});
// set queue that is being used
ch.assertQueue(process.env.RABBITMQ_QUEUE, {durable: true}, function (err, q) {
console.log(" [*] Waiting for messages in %s. To exit press CTRL+C", q.queue);
// bind the queue to the exchange
ch.bindQueue(q.queue, process.env.RABBITMQ_WORKER_EXCHANGE, '');
// consume from the queue, one message at a time.
ch.consume(q.queue, function (msg) {
console.log("Message received: %s", msg.content.toString());
//save message to db
db.store(msg.content.toString()).then(function() {
//acknowledge receipt of message to amqp
console.log("Acknowledging message");
ch.ack(msg, true);
});
}, {noAck: false});
});
});
});
import * as Amqp from "amqp-ts";
var connection = new Amqp.Connection("amqp://localhost");
var exchange = connection.declareExchange("ExchangeName");
var queue = connection.declareQueue("QueueName");
queue.bind(exchange);
queue.activateConsumer((message) => {
console.log("Message received: " + message.getContent());
});
// it is possible that the following message is not received because
// it can be sent before the queue, binding or consumer exist
var msg = new Amqp.Message("Test");
exchange.send(msg);
connection.completeConfiguration().then(() => {
// the following message will be received because
// everything you defined earlier for this connection now exists
var msg2 = new Amqp.Message("Test2");
exchange.send(msg2);
});
I am new to socket.io, and trying to figure out how to send multiple messages. Here is the scenario I am working on,
function setupServer(server) {
var socketIO = require("socket.io").listen(server);
socketIO.sockets.on('connection', function (socket) {
console.log("client is connected");
socket.emit('update', { progress: "starting..." })
});
}
I have to call setupServer(server) from another method, and I am receiving "starting..." on client side.
But the problem is, i want to send more/multiple messages LATER ON. Can not send array of messages as my application is building messages strings in say every 10 milliseconds (in a callback function) and i want to send as soon as they are created.
Any solution? Is it possible to get the socket object reference to reuse outside this function?
From socket.io docs: http://socket.io/docs/#broadcasting-messages
To broadcast, simply add a broadcast flag to emit and send method calls. Broadcasting means sending a message to everyone else except for the socket that starts it.
function setupServer(server) {
var socketIO = require("socket.io").listen(server);
socketIO.on('connection', function (socket) {
console.log("client is connected");
socket.emit('update', { progress: "starting..." });
startBroadCastFromSocket(socket, 3);
socket.on('disconnect', function(){
stopBroadcastFromSocket(socket);
});
});
// broadcast message to all sockets
setInteval(function() {
broadcastMessageToEveryone(socketIO, {body: 'Hello everyone (FROM SERVER)', timestamp: new Date());
}, 1000);
}
function broadcastMessageToEveryone(io, body) {
io.emit('message', {body: body, timestamp: new Date()});
}
function broadcastMessageFromSocket(socket, body) {
socket.broadcast.emit('message', {body: body, timestamp: new Date()});
}
var socketIntervals = {};
function stopBroadcastFromSocket(socket) {
if(socketIntervals[socket.id]) {
clearInterval(socketIntervals[socket.id]);
}
}
function startBroadcastFromSocket(socket, seconds) {
socketIntervals[socket.id] = setInterval(function(){
broadcastMessageFromSocket(socket, 'Hello!');
}, seconds*1000);
}
I'm using sockjs with standard configuration.
var ws = sockjs.createServer();
ws.on('connection', function(conn) {
conn.on('data', function(message) {
wsParser.parse(conn, message)
});
conn.on('close', function() {
});
});
var server = http.createServer(app);
ws.installHandlers(server, {prefix:'/ws'});
server.listen(config.server.port, config.server.host);
wsParser.parse function works like this:
function(conn, message) {
(...)
switch(message.action) {
case "titleAutocomplete":
titleAutocomplete(conn, message.data);
break;
(...) // a lot more of these
}
Each method called in switch sends back a message to client.
var titleAutocomplete = function(conn, data) {
redis.hgetall("titles:"+data.query, function(err, titles){
if(err) ERR(err);
if(titles) {
var response = JSON.stringify({"action": "titleAutocomplete", "data": {"titles": titles}});
conn.write(response);
}
})
};
Now my problem is that I'd like to make tests for my code (better late than never I guess) and I have no idea how to do it. I started writing normal http tests in with mocha + supertest but I just don't know how to handle websockets.
I'd like to have only one websocket connection to reuse through all tests, I'm binding the websocket connection with user session after first message and I want to test that persistence as well.
How do I make use of ws client's onmessage event and utilize it in my tests? How the tests can tell apart received messages and know which one they are supposed to wait for?
Collegue at work asked if it really needs to be a client connection or would it be possible to just mock it up. It turned out it was the way to go. I wrote a little helper class wsMockjs
var wsParser = require("../wsParser.js");
exports.createConnectionMock = function(id) {
return {
id: id,
cb: null,
write: function(message) {
this.cb(message);
},
send: function(action, data, cb) {
this.cb = cb;
var obj = {
action: action,
data: data
}
var message = JSON.stringify(obj);
wsParser.parse(this, message);
},
sendRaw: function(message, cb) {
this.cb = cb;
wsParser.parse(this, message);
}
}
}
Now in my mocha test I just do
var wsMock = require("./wsMock.js");
ws = wsMock.createConnectionMock("12345-67890-abcde-fghi-jklmn-opqrs-tuvwxyz");
(...)
describe('Websocket server', function () {
it('should set sessionId variable after handshake', function (done) {
ws.send('handshake', {token: data.token}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('handshake');
msg.data.should.be.empty;
ws.should.have.property('sessionId');
ws.should.not.have.property('session');
done();
})
})
it('should not return error when making request after handshake', function (done) {
ws.send('titleAutocomplete', {query: "ter"}, function (res) {
var msg = JSON.parse(res);
msg.action.should.equal('titleAutocomplete');
msg.data.should.be.an.Object;
ws.should.have.property('session');
done();
})
})
})
It works like a charm and persist connection state and variables between requests.
I have a node.js application subscribed to a channel listening for an event. When it hears that event it publishes another event to a new channel. I have been using MubSub for my publish/subscriptions. My issue is that no matter what I do every time the page is reloaded there are two listeners and two publishes, then 3,4,5,6 etc..
I dont understand why they do not get disconnected?
if (curURL == '/handwash') {
db2.handwash.find({
"event": "handwash"
}).sort({
"message.time": -1
}).limit(10).forEach(function(error, x) {
if (error || !x) {
console.log("error getting items");
} else {
socket.emit('archive', x.message);
}
});
channel2.subscribe('dealer out', function(message) {
console.log("new dealer event " + message);
db.events.find({
"event": "wash"
}).sort({
"message.time": -1
}).limit(1).forEach(function(error, x) {
if (error || !x) {
console.log("error getting items");
} else {
console.log("found last handwash..." + x.message.type + "....publishing");
var message = ({
'tableid': 123,
'time': Date(),
'type': x.message.type,
})
channel.publish('handwash', message );
socket.emit('message', message);
}
});
//socket.emit('message', message);
});
socket.on("disconnect", function(s) {
console.log("Disconnected from global handler");
});
}
The problem was that I was using MubSub for subscription and publishing messages. Fixed it by creating a varaible for my connection and then closing it with a unsubscribe();
var subscription = channelEvents.subscribe('pocket', function(message) {}
subscription.unsubscribe();