Socket.io and Redis Adapter same rooms and different servers - node.js

We are creating a scalable real-time collaborative text editor. The client (frontend) code is written in React.js, and the server (backend) code is Websockets (socket.io).
The client creates a connection with the websocket server and joins a room based on the documentID opened in the URL, so that each event is done inside the same document will be shared across all clients opening the same document since they are joining a room with the same documentID
client code
useEffect(() => {
if (socket == null || quill == null) return
//Emit save-document event every 500ms
const interval = setInterval(() => {
socket.emit("save-document", quill.getContents())
}, SAVE_INTERVAL_MS)
return () => {
clearInterval(interval)
}
}, [socket, quill])
useEffect(() => {
if (socket == null || quill == null) return
socket.once("load-document", document => {
quill.setContents(document)
quill.enable()
})
socket.emit("get-document", documentId)
}, [socket, quill, documentId])
useEffect(() => {
if (socket == null || quill == null) return
const handler = delta => {
quill.updateContents(delta)
}
socket.on("receive-changes", handler)
return () => {
socket.off("receive-changes", handler)
}
}, [socket, quill])
server code
socket.on('get-document', async (documentID) => {
const document = await lookUpDocument(documentID);
socket.join(documentID);
socket.emit("load-document", document.data);
socket.on("send-changes", (delta) => {
socket.broadcast.to(documentID).emit("receive-changes", delta)
})
socket.on("save-document", async (data) => {
await Document.findByIdAndUpdate(documentID, { data })
})
It works perfectly if we are working on the same server, but we want to have several Websockets servers that are connected communicate with each other as well. For example
Client1 connected to WS1.
Client2 and Client3 connected to WS2.
The three Clients are opening the same document, so that each event emitted by a client must be broadcasted to all the other clients even if they are connected to different Websockets. It should look like this
After searching I found out that we should consider Publish/Subscribe Architecture so that servers can subscribe and publish events. We also found that Socket.io has Redis-Adapter which does the same thing. So we modified our server code be like this
const io = new Server(process.env.PORT, {
cors: {
origin: process.env.CLIENT_URL,
methods: ["GET", "POST"]
}
})
const pubClient = createClient({ host:'https://<ngrokurl>.eu.ngrok.io'});
const subClient = pubClient.duplicate();
pubClient.on('ready', () => {
console.log('Publisher connected to redis and ready to use')
})
subClient.on('ready', () => {
console.log('Subscriber connected to redis and ready to use')
})
pubClient.on('error', (err) => console.log('Publisher Client Error', err));
subClient.on('error', (err) => console.log('Subscriber Client Error', err));
Promise.all([pubClient.connect(), subClient.connect()]).then(() => {
//Connecting the socket server to the redis channel
//using Socket.io Redis-Adapter
io.adapter(createAdapter(pubClient, subClient));
});
const defaultValue = ""
var allClients = [];
io.on("connection", (socket) => {
allClients.push(socket)
var username = socket.handshake.query.username
console.log(`A client is connected! ${username} - Number of sockets is: ${allClients.length}`)
//Event listener for client's socket disconnect
//Event that listens to any
socket.on('disconnect', function (reason) {
console.log(`${username} got disconnected due to ${reason}`)
var i = allClients.indexOf(socket);
allClients.splice(i, 1);
console.log(`Number of sockets now is: ${allClients.length}`)
})
socket.on('get-document', async (documentID) => {
const document = await lookUpDocument(documentID);
socket.join(documentID);
socket.emit("load-document", document.data);
socket.on("send-changes", (delta) => {
socket.broadcast.to(documentID).emit("receive-changes", delta)
})
socket.on("save-document", async (data) => {
await Document.findByIdAndUpdate(documentID, { data })
})
})
})
Yet each server now doesn't broadcast the events to all the websocket servers whenever the document changes? How can I make each socket subscribe and publish events to all the clients that are joining the same room (based on the documentID)?
Thank you so much in advance
Edit 1: Added the redis url that we connect to which is an ngrok server that tunnels to a localhost:6379 on a virtual machine.

Related

How to set socket attribute globally using socket.io in nodejs from within a socket event handler?

Currently working on a portfolio project, and running into a problem where I am trying to set an attribute for the socket variable globally from within a local socket event handler so that it can be accessed when handling other socket events.
These are the events I'm handling: a login event and a disconnect event.
io.on("connect", socket => {
console.log(`User joined: `, socket.id)
socket.on("login", (data) => handleUserLogin(socket, data))
socket.on("disconnect", () => handleDisconnect(socket))
})
When the user logs on, I emit a login event from the client and the login event handler takes in data of a JSON object with user details, and a company ID, both sent from the client. I'm trying to save this companyId gloablly. This companyId attribute is supposed to help determine which list to append/collect etc.
const handleUserLogin = async (socket, data) => {
const { companyId, user } = data;
socket.join([`${socket.id}`, `${companyId}`]);
socket.companyId = companyId;
try {
const newOnlineUser = await redisClient.hset(`${companyId}:users:online`, `${socket.id}`, JSON.stringify(user))
if (newOnlineUser) {
const onlineUsers = await redisClient.hgetall(`${companyId}:users:online`)
socket.to(companyId).emit("user_status_change", { onlineUsers })
}
} catch (error) {
socket.to(`${socket.id}`).emit("error", { error })
}
};
When the socket disconnects, I want to remove the user from my redis list, which means I'll need this companyId attribute. But a value of null appears when I try to access: socket.companyId.
const handleDisconnect = async (socket) => {
console.log(`Disconnect: ${socket.companyId}`)
if (socket?.companyId) {
console.log('Disconnect event for user', socket.id, 'of company', socket.companyId, 'occurred.' )
try {
const offlineUser = await redisClient.hdel(`${socket.companyId}:users:online`, `${socket.id}`)
if (offlineUser) {
const onlineUsers = await redisClient.hgetall(`${companyId}:users:online`)
socket.to(companyId).emit("user_status_change", { onlineUsers })
}
} catch (error) {
console.log(error)
}
}
}
Would love to know how to deal with this, or at least find a way to set an attribute to the socket instance from within event handling, for which can be accessed also when handling other events.

Socket.io emit event only works once/first time

I'm brand new to socket.io and am trying to create an app similar to slido - users can send in messages and then view all messages being submitted in real time. I'm using node.js, express, socket.io, and redis in the back end. React and socket.io-client in front end.
At the moment, the live messages page/feed only updates (in real time) the first time a message is sent in, after that the emit even appears to stop working and the list of messages will only update when you refresh the page and it pulls the message history from redis.
Does anyone know why this may be happening? I've checked that the versions of socket.io for server and client are the same.
Thank you!
server-side socket setup:
io.on("connect", (socket) => {
initialiseUser(socket);
socket.on("dm", (message) => {
dm(socket, message, io);
});
io.emit("hello", "hello world");
socket.on("disconnecting", () => onDisconnect(socket));
});
// dm logic sits in separate file
module.exports.dm = async (socket, message, io) => {
message.from = socket.user.userid;
const messageString = [message.from, message.content].join(".");
await redisClient.lpush(`prayers:messages`, messageString);
io.emit("dm", message);
};
client-side setup:
const useSocketSetup = (setMessages, messages) => {
const { setUser } = useContext(AccountContext);
useEffect(() => {
socket.connect();
socket.on("hello", (content) => {
console.log("hello world", content);
});
socket.on("messages", (redisMessages) => {
setMessages(redisMessages);
});
socket.on("dm", (message) => {
setMessages((prevMessages) => [message, ...prevMessages]);
console.log("NEW MESSAGE", message);
});
socket.on("connect_error", () => {
console.log("Socket cannot connect");
setUser({ loggedIn: false });
});
return () => {
socket.off("connect_error");
socket.off("messages");
socket.off("dm");
};
}, [setUser, setMessages, messages]);
};
export default useSocketSetup;
The console log sitting inside socket.on("dm".... is only being logged on the first dm event.
This is the form setup for submitting a message:
const { setMessages } = useContext(MessagesContext);
useSocketSetup(setMessages);
return (
<>
<Formik
initialValues={{ message: "" }}
validationSchema={Yup.object({ message: Yup.string().min(1).max(255) })}
onSubmit={(values, actions) => {
const message = { from: null, content: values.message };
socket.emit("dm", message);
setMessages((prevMessages) => [message, ...prevMessages]);
console.log(JSON.stringify(message));
actions.resetForm();
navigate("/prayers");
}}
>
There is then a component accessing the messages from context and mapping through them to display.

Running a continuous function in Node.js Server after User Logs In

I am building an IOT app incorporating MQTT. My challenge is this :
The list of topics a user has subscribed to are saved to a MongoDB collection. Once the User logs into the app, this list of topics is available for operation. What I want to do is to create a function that starts running once the user has logged in and listens to any message on the subscribed topics and update device state which is also saved to the MongoDB collection.
I have access to the mqtt client at the start of the server.
mongoose.connect(URI).then(res => {
console.log("Connected to DB")
const server = app.listen(3000, {
const client = mqtt.connect('mqtt://test.mosquitto.org')
client.on('connect', () => {
console.log('Connected')
})
})
const io = require('./socket').init(server)
})
.catch(err => console.log(err))
UPDATE :
Through this code, I can receive messages if I hard code the topics :
mongoose.connect(URI).then(res => {
console.log("Connected to DB")
const server = app.listen(3000, function () {
const client = mqtt.connect('mqtt://test.mosquitto.org')
// console.log("MQTT CLIENT : ", client)
client.on('connect', () => {
console.log('Connected')
})
client.subscribe(`62d7d71d65c27a/devices/62dd2208c4b`, () => {
console.log("Subscribe to topic 62d7d71d65c27a/devices/62dd2208c4b")
})
client.on('message', (topic, payload) => {
console.log(payload.toString())
})
const io = require('./socket').init(server)
})
}).catch(err => console.log(err))
But the challenge remains on how do I get the topic list from the logged User and then run this function continuously to listen on incoming messages for the topics that the user has subscribed to?

How to listen to socketIO private message in React client?

I have a SocketIO instance in an Express app, that listens to a React client requests. A user can send private messages to a specific person. The server receives the private message, and should dispatch it back to both sender & recipient thanks to the io.to(socketId).emit(content) method.
How to listen to this event in React and update the message array? In order to ease the process, I have created a connectedUsers object, whose keys are mongoDB's user._id, and whose values are the unique socketID generated by socketIO. This way, I can easily address message to specific persons in the client. Once sent, the messages are stored in a MongoDB database.
Here is the back-end. The point of interest is io.on("privateMessage")
const connectedUsers = {};
const socketManager = (io) => {
io.on("identifyUser", (user) => {
if (!([user.id] in connectedUsers)) {
connectedUsers[user.id] = io.id;
}
});
io.on("privateMessage", (data) => {
io.to(connectedUsers[data.recipientId]).emit(data.message);
io.to(connectedUsers[data.senderId]).emit(data.message);
});
io.on("disconnect", () => console.log("user disconnected!"));
};
Here is the listening function in React. Everything works but the "privateMessage" part.
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connect", () => {
socket.emit("identifyUser", { id: res.data._id });
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
});
} catch (err) {
throw new Error(err);
}
}
Thanks for your help!
I think you need to put the socket.on("privateMessage") part outside the socket.on("connect") scope.
React must load all events at the beginning.
The backend side must be responsible for the authorization.
For the client there is connection event, not connect.
Subscription to event privateMessage should be outside connection callback.
This code should work. Hope this helps
import io from 'socket.io-client'
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connection", () => {
socket.emit("identifyUser", { id: res.data._id });
});
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
} catch (err) {
throw new Error(err);
}
}

i used socketio in my nodejs app and my server slows down sometimes and sometimes it does not respond and sometimes it automatically reloads

i am building a Management App with Nodejs, Expressjs, MongoDB and Reactjs. i used socketio for realtime-ness. before using socektio, my app works fine i.e. it responds every request i made to my Nodejs API. when i used socketio, it slows down i.e. sometimes it does not respond to my queries or sometimes it automatically reloads and sends request from Client to Server. it does not show any errors. please help me to solve this issue. i have to deploy this system by next week and not getting how to solve this.
i made a separate file socket.js in which
let io;
module.exports = {
init: httpServer => {
io = require("socket.io")(httpServer, { wsEngine: "ws" });
return io;
},
getIO: () => {
if (!io) {
throw new Error("Socket is not initialized");
}
return io;
}
};
in my nodejs starting app index.js, i use this like this.
mongoose
.connect("mongodb://localhost/QuickResponse")
.then(() => {
console.log("Connected to MongoDB...");
const port = process.env.PORT || 5000;
const server = app.listen(port, () =>
console.log(`Listening on port ${port}...`)
);
const io = require("./socket").init(server);
io.on("connection", socket => {
console.log("New client connected");
socket.on("disconnect", () => {
console.log("Client is disconnected");
});
});
and whenever i want to use this in my other files, i use like this,
const io = require("../socket");
const complaint = await Complaint.findById(req.params.id);
if (!complaint)
return res.status(404).send("Complaint with given ID was not found.");
const admin = await Admin.findOne().limit(1);
complaint.assignedTo = {
_id: admin._id
};
complaint.assigned = false;
await complaint.save();
io.getIO().emit("complaints", {
action: "drop",
complaint: complaint
});
res.status(200).send("You have successfully dropped responsibility");
and on the frontend i have used socketio-client.
where i use like this.
import openSocket from "socket.io-client";
const socket = openSocket("http://localhost:5000", { reconnection: true });
socket.on("complaints", data => {
if (data.action === "new complaint") {
this.createNewComplaint(data.complaint);
toast.info(
`New Complaint has been registered with title "${
data.complaint.title
}"`
);});
i don't want my server to slow down or do not respond to my queries

Resources