After playing around a bit i found out the hard way that the socket.io will change the socketId on a socket reconnection. That said i am looking into a way for my NodeJs to keep track of changes to the socketID. Socket has a build in store which keeps track of SocketId's , Remote addresses etc. How reliable is that store and can i query a socketID quickly by a UserID which i stored in that db ?
I also found some docs on that one can set its own SocketID which has to be unique but does this allow me to avoid to issue a new SocketID on refresh or is that still not possible.
Instead of SocketID, is there a way to send a message to a client based on a USERID instead ?
If neither of the above work, i guess i will be back to drawing board and store session info in a DB table and update the data.
Many questions but hopefully someone can share some inside with me
Here is my SocketIO helper class i use
const io = require('socket.io');
class ioWrapper {
constructor(app, { middlewares, updateSession, clearSession } = {}) {
this.io = io(app);
this.connected = {};
if (middlewares && middlewares.length) {
for (const middleware of middlewares) {
this.io.use(middleware);
}
}
this.io.on('connect', client => {
//put callback to constructor to update your session
if (updateSession) {
updateSession(client.id);
console.log('Updated: ' + client.id)
}
client.jwt = client.id
this.connected[client.id] = client;
console.log('Connected: ' + client.id + ' ' )// + this.io.socket.request.connection.remoteAddress)
client.on('disconnect', () => {
if (clearSession) {
clearSession(client.id);
}
if (this.connected[client.id]) {
delete this.connected[client.id];
}
console.log('Disconnect: ' + client.id )
});
client.on('error', e => console.log(e));
client.on('connect_error', e => console.log('connect_error', e));
});
this.ioEmit = this.ioEmit.bind(this);
this.ioInject = this.ioInject.bind(this);
}
ioEmit(socketId, event) {
try {
if (this.connected[socketId]) {
this.connected[socketId].emit(...event);
}
}
catch (e) {
console.log(e);
}
}
ioInject(socketId, handler) {
try {
if (this.connected[socketId]) {
this.connected[socketId].on(...handler);
}
}
catch (e) {
console.log(e);
}
}
}
module.exports = (app, middleware, updateSession) => new ioWrapper(app, middleware, updateSession);
Related
Currently working on a portfolio project, and running into a problem where I am trying to set an attribute for the socket variable globally from within a local socket event handler so that it can be accessed when handling other socket events.
These are the events I'm handling: a login event and a disconnect event.
io.on("connect", socket => {
console.log(`User joined: `, socket.id)
socket.on("login", (data) => handleUserLogin(socket, data))
socket.on("disconnect", () => handleDisconnect(socket))
})
When the user logs on, I emit a login event from the client and the login event handler takes in data of a JSON object with user details, and a company ID, both sent from the client. I'm trying to save this companyId gloablly. This companyId attribute is supposed to help determine which list to append/collect etc.
const handleUserLogin = async (socket, data) => {
const { companyId, user } = data;
socket.join([`${socket.id}`, `${companyId}`]);
socket.companyId = companyId;
try {
const newOnlineUser = await redisClient.hset(`${companyId}:users:online`, `${socket.id}`, JSON.stringify(user))
if (newOnlineUser) {
const onlineUsers = await redisClient.hgetall(`${companyId}:users:online`)
socket.to(companyId).emit("user_status_change", { onlineUsers })
}
} catch (error) {
socket.to(`${socket.id}`).emit("error", { error })
}
};
When the socket disconnects, I want to remove the user from my redis list, which means I'll need this companyId attribute. But a value of null appears when I try to access: socket.companyId.
const handleDisconnect = async (socket) => {
console.log(`Disconnect: ${socket.companyId}`)
if (socket?.companyId) {
console.log('Disconnect event for user', socket.id, 'of company', socket.companyId, 'occurred.' )
try {
const offlineUser = await redisClient.hdel(`${socket.companyId}:users:online`, `${socket.id}`)
if (offlineUser) {
const onlineUsers = await redisClient.hgetall(`${companyId}:users:online`)
socket.to(companyId).emit("user_status_change", { onlineUsers })
}
} catch (error) {
console.log(error)
}
}
}
Would love to know how to deal with this, or at least find a way to set an attribute to the socket instance from within event handling, for which can be accessed also when handling other events.
I have a SocketIO instance in an Express app, that listens to a React client requests. A user can send private messages to a specific person. The server receives the private message, and should dispatch it back to both sender & recipient thanks to the io.to(socketId).emit(content) method.
How to listen to this event in React and update the message array? In order to ease the process, I have created a connectedUsers object, whose keys are mongoDB's user._id, and whose values are the unique socketID generated by socketIO. This way, I can easily address message to specific persons in the client. Once sent, the messages are stored in a MongoDB database.
Here is the back-end. The point of interest is io.on("privateMessage")
const connectedUsers = {};
const socketManager = (io) => {
io.on("identifyUser", (user) => {
if (!([user.id] in connectedUsers)) {
connectedUsers[user.id] = io.id;
}
});
io.on("privateMessage", (data) => {
io.to(connectedUsers[data.recipientId]).emit(data.message);
io.to(connectedUsers[data.senderId]).emit(data.message);
});
io.on("disconnect", () => console.log("user disconnected!"));
};
Here is the listening function in React. Everything works but the "privateMessage" part.
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connect", () => {
socket.emit("identifyUser", { id: res.data._id });
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
});
} catch (err) {
throw new Error(err);
}
}
Thanks for your help!
I think you need to put the socket.on("privateMessage") part outside the socket.on("connect") scope.
React must load all events at the beginning.
The backend side must be responsible for the authorization.
For the client there is connection event, not connect.
Subscription to event privateMessage should be outside connection callback.
This code should work. Hope this helps
import io from 'socket.io-client'
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connection", () => {
socket.emit("identifyUser", { id: res.data._id });
});
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
} catch (err) {
throw new Error(err);
}
}
8 out of ten times everything connects well. That said, I sometimes get a MongoClient must be connected before calling MongoClient.prototype.db error. How should I change my code so it works reliably (100%)?
I tried a code snippet from one of the creators of the Now Zeit platform.
My handler
const { send } = require('micro');
const { handleErrors } = require('../../../lib/errors');
const cors = require('../../../lib/cors')();
const qs = require('micro-query');
const mongo = require('../../../lib/mongo');
const { ObjectId } = require('mongodb');
const handler = async (req, res) => {
let { limit = 5 } = qs(req);
limit = parseInt(limit);
limit = limit > 10 ? 10 : limit;
const db = await mongo();
const games = await db
.collection('games_v3')
.aggregate([
{
$match: {
removed: { $ne: true }
}
},
{ $sample: { size: limit } }
])
.toArray();
send(res, 200, games);
};
module.exports = handleErrors(cors(handler));
My mongo script that reuses the connection in case the lambda is still warm:
// Based on: https://spectrum.chat/zeit/now/now-2-0-connect-to-database-on-every-function-invocation~e25b9e64-6271-4e15-822a-ddde047fa43d?m=MTU0NDkxODA3NDExMg==
const MongoClient = require('mongodb').MongoClient;
if (!process.env.MONGODB_URI) {
throw new Error('Missing env MONGODB_URI');
}
let client = null;
module.exports = function getDb(fn) {
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
if (client === null) {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
console.log('[mongo] client init');
} else if (client.isConnected) {
console.log('[mongo] client connected, quick return');
return client.db(process.env.MONGO_DB_NAME);
}
return new Promise((resolve, reject) => {
client.connect(err => {
if (err) {
client = null;
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
};
I need my handler to be 100% reliable.
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
This code can cause problems! Even though you're setting client to null, that client still exists, will continue connecting to mongo, will not be garbage collected, and its callback connection code will still run, but in its callback client will refer to the next client that's created that is not necessarily connected.
A common pattern for this kind of code is to only ever return a single promise from the getDB call:
let clientP = null;
function getDb(fn) {
if (clientP) return clientP;
clientP = new Promise((resolve, reject) => {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
client.connect(err => {
if (err) {
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
return clientP;
};
I had the same issue. In my case it was caused by calling getDb() before a previous getDb() call had returned. In this case, I believe that 'client.isConnected' returns true, even though it is still connecting.
This was caused by forgetting to put an 'await' before the getDb() call in one location. I tracked down which by outputting a callstack from getDb using:
console.log(new Error().stack);
I don't see the same issue in the sample code in the question, though it could be triggered by another bit of code that isn't shown.
I have written this article talking about serverless, lambda e db connections. There are some good concepts which could help you to find the root cause of your problem. There are also example and use cases of how to mitigate connection pool issues.
Just by looking your code I can tell it is missing this:
context.callbackWaitsForEmptyEventLoop = false;
Serverless: Dynamodb x Mongodb x Aurora serverless
I am attempting to hook up a small game with WebSockets. I am using socket.io, socket.io-p2p, and socket.io-p2p-server. I want users to be automatically paired up against any connected player who doesn't have a partner. I want users to only be connected in pairs.
So far simply following the docs I can only get clients to connect using just socket.io. When I attempt to use socket.io-p2p and socket.io-p2p-server I can sometimes get users to connect and other times I get error messages on the screen like
"Missing error handler on socket.
TypeError: Cannot read property 'emit' of undefined"
Someone opened an issue for this problem on the repo and didn't get a response and never got a response
https://github.com/tomcartwrightuk/socket.io-p2p-server/issues/5
I don't know if socket.io-p2p-server is broken or if I am just missing something. Further more socket.io-p2p-server has not been touched much since march.
So my main questions are:
Is socket.io-p2p-server still alive?
Is there a better implementation I can use for these abstractions?
Would writing my own logic instead of using socket.io-p2p-server be worth it?
client side code
import P2P from 'socket.io-p2p'
import io from 'socket.io-client'
const socket = io()
const p2pSocket = new P2P(socket, null, function () {
console.log("my id is: " + p2pSocket.peerId)
})
p2pSocket.on('peer-msg', function (data) {
console.log(data)
})
server side code
var http = require('http')
var httpServer = http.createServer(requestHandler)
var fs = require('fs')
var io = require('socket.io')(httpServer)
var p2pServerModule = require('socket.io-p2p-server')
var p2p = p2pServerModule.Server
var connectedUsers = p2pServerModule.clients
io.use(p2p)
httpServer.listen(8000, 'localhost');
function serveUpFile(req, res, path) {
fs.readFile(path.toString(), function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading index.html');
}
res.writeHead(200)
res.end(data)
})
}
function requestHandler (req, res) {
if (req.url === '/static/bundle.js') {
serveUpFile(req, res, './static/bundle.js')
} else {
serveUpFile(req, res, './index.html')
}
}
io.on('connection', function (client) {
console.log('client connected to the server')
client.on('peer-msg', function (data) {
console.log('Message from peer %s', data)
})
client.on('disconnect', function () {
console.log('client disconnected from the server')
})
})
socket.io-p2p-server will not work with socket.io 1.x as expected.
Change socket.io-p2p-server/index.js:
This part:
if (typeof room === 'object') {
var connectedClients = socket.adapter.rooms[room.name]
} else {
var connectedClients = clients
}
To this:
if (typeof room === 'object' && ('name' in room) && (room.name in socket.adapter.rooms) ) {
var connectedClients = {}
for (var id in socket.adapter.rooms[room.name].sockets) {
connectedClients[id] = clients[id];
}
} else {
var connectedClients = clients;
}
This solution works for me.
I made simple notification application that subscribes to redis channel and when it will have new message then this application send it to user.
client.on("auth", function(sessId, userId){
console.log('AUTH: userId ' + userId)
var userIdFromRedis;
redis1.get('PHPREDIS_SESSION:' + sessId, function (err , reply) {
if (reply){
reply = reply.toString();
var result = reply.match(/s:2:\"id\";i:(\d+);/);
if (result) {
userIdFromRedis = result[1]
console.log('AUTH: userIdFromRedis ' + userIdFromRedis)
} else {
result = reply.match(/s:7:\"guestId\";i:(\d+);/);
if (result) {
var guestIdFromRedis = result[1]
console.log('AUTH: guestIdFromRedis ' + guestIdFromRedis)
}
}
if (userIdFromRedis == userId) {
client.userId = userId;
subscribe.subscribe(channelPrefix + userId);
clients[userId] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else if (guestIdFromRedis) {
client.guestId = guestIdFromRedis;
subscribe.subscribe(channelPrefix + guestIdFromRedis);
clients[guestIdFromRedis] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else {
client.disconnect();
console.log('AUTH: result - fail')
}
} else {
client.disconnect();
}
});
})
subscribe.on("message", function(channel, message) {
var userId = Math.round(channel.substr(channelPrefix.length));
if (client.userId == userId || client.guestId == userId) {
console.log('Subscriber: ' + message)
client.send(message);
}
});
client.on("message", function(text){
client.send(text);
})
And in log file sometimes in top hour I can find error message
(node) warning: possible EventEmitter memory leak detected. 11
listeners added. Use emitter.setMaxListeners() to increase limit.
And node.js application's process uses 90% of CPU.
Please advice how can I resolve this problem?
Whenever your client connects to your server, you add another EventEmitter to the stack. Unfortunatly you are not closing them, so you are hitting the listeners limit of 11. You should track down the piece of code that adds the eventEmitters and then make another statement, that if there is already a client connected, there should not be any other emitters generated.