How to handle concurrent request with socket io while joining room - node.js

I'm trying to create/join a room with limit of 2 user per room everything is working fine
but when 2 users make a concurrent request to create/join a room all 3 users are added to same room. i have tried using
simple mongodb insert/update
using transaction
now storing in memory
here is the code
const rooms = {};
module.exports = function ({ io, socket }) {
socket.on("/createJoinPublicRequest", async ({ user }) => {
let title = `${user._id}.${Date.now()}`;
let foundRoom = Object.keys(rooms).find((room) => {
return rooms[room].roomLimit > rooms[room].users.length;
});
if (!foundRoom) {
//create a room if not found
rooms[title] = {
title,
users: [
{
_id: user._id,
},
],
roomLimit: 2,
roomType: "public",
status: "open",
};
socket.join(title);
} else {
// join existing room
room = rooms[foundRoom];
room.users.push({ _id: user._id });
if (room.users.length == room.roomLimit) {
room.status = "full";
if (await storeToMongoDB(room)) {
delete rooms[foundRoom];
}
}
socket.join(room.title);
}
await User.findByIdAndUpdate(user, {
joinedRoom: title,
});
return;
});

you should use mutex lock to prevent adding more Users to Room.
For Single Instance : https://www.npmjs.com/package/async-mutex
For Multi Instance : Redis Mutex Lock

Related

How to add an object to an array of objects in Nodejs?

I'm creating a backend for my React web application and I'm trying to subscribe a user to a match, this match is an object that have an array called "players" and when I click on the join button the username and profilePicture of the user are being dispatched to my backend. The first user info is sent perfectly but when a second user is subscribed the info of the first one is replaced for the second one.
This is my function that push the data:
const playerJoined = async (req, res) => {
const torneoId = req.params.id;
const uid = req.uid;
const profilePicture = req.profilePicture;
const username = req.username;
console.log(req.params);
try {
const torneo = await Torneo.findById(torneoId);
if (!torneo) {
return res.status(404).json({
ok: false,
msg: "Torneo no existe por ese ID",
});
}
const newPlayer = {
profilePicture: profilePicture,
username: username,
};
const nuevoTorneo = {
...req.body,
players: newPlayer,
};
const torneoActualizado = await Torneo.findByIdAndUpdate(
torneoId,
nuevoTorneo,
{
new: true,
}
);
res.json({
ok: true,
torneo: torneoActualizado,
});
} catch (error) {
console.log(error);
res.status(500).json({
ok: false,
msg: "Hable con el administrador",
});
}
};
My frontend is working well because when I added more users the array of objects shows all the players like this:
players: (2) [{…}, {…}]
But on my mongo DB shows only the last user info added like I mentioned before.
I really appreciate any help.
You seem to be replacing the players property instead of pushing into it.
const nuevoTorneo = {
...req.body,
players: newPlayer,
};
When you grab the torneo by id, you should have access to that players property already, so spread that array into your nuevoTorneo as well:
const nuevoTorneo = {
...req.body,
players: [...torneo.players, newPlayer],
};
It is because you always put your newPlayer into the "player" field of your nuevoTorneo and updated the same document. I assume you are using mongoose, You probably should just modify the "torneo" after your query and do something like this:
const torneo = await Torneo.findById(torneoId);
const newPlayer = {
profilePicture: profilePicture,
username: username,
};
torneo.nuevoTorneo.players.push(newPlayer);
await torneo.save();
Or to simply modify your code as:
const nuevoTorneo = {
...req.body,
players: [...torneo.nuevoTorneo.players,newPlayer],
};
I recommend the first method, let me know if you have any questions.

Handling concurrent request that finds and update the same resource in Node Js & Mongo DB?

I have a function in node that runs after a clicking the checkout button. It checks the availability of the items in cart and if the item is available it will deduct it from the inventory.
I'm currently testing with two users clicking the checkout button at the same time. Both users have the exact same content in their cart (10 apples each) which gives a total of 20 apples, but there are only 10 apples in inventory.
If there is no item in cart it should return an error to the user but both orders are going through.
NOTE: This works if there is a 1 second delay between the clicks.
What can i do to prevent this?
// Check if items in inventory
const availability = await checkInventory(store, cart, seller);
if (!availability.success) {
return res.status(400).json({
success: false,
type: 'unavailable',
errors: availability.errors,
});
}
// Deduct Inventory
const inventory = await deductInventory(store, seller, cart);
if (!inventory) {
return next(new ErrorResponse('Server Error', 500));
}
checkInventory
exports.checkInventory = asyncHandler(async (store, cart, seller) => {
let isAvailable = true;
const unavailableProducts = [];
const inventory = await Inventory.find({
$and: [
{
store: store,
user: seller,
},
],
});
const products = inventory[0].products;
cart.forEach((item) => {
const product = products.find(
(product) => product._id.toString() === item.productId
);
if (!item.hasvariation) {
if (product.stock < item.qty) {
isAvailable = false;
unavailableProducts.push(
`${item.title} is not available, only ${product.stock} left available`
);
}
}
if (item.hasvariation) {
const variation = product.variations.find(
(variation) => variation._id.toString() === item.variationId
);
const option = variation.options.find(
(option) => option._id.toString() === item.optionId
);
if (option.stock < item.qty) {
isAvailable = false;
unavailableProducts.push(
`${item.title} is not available, only ${product.stock} left available`
);
}
}
});
return {
success: isAvailable,
errors: unavailableProducts,
};
});
deductInventory
exports.deductInventory = asyncHandler(async (store, seller, cart) => {
const inventory = await Inventory.findOne({
$and: [
{
store: store,
user: seller,
},
],
});
const products = inventory.products;
cart.forEach((item) => {
const product = products.find(
(product) => product._id.toString() === item.productId
);
if (!item.hasvariation) {
product.stock = product.stock - item.qty;
}
if (item.hasvariation) {
const variation = product.variations.find(
(variation) => variation._id.toString() === item.variationId
);
const option = variation.options.find(
(option) => option._id.toString() === item.optionId
);
option.stock = option.stock - item.qty;
}
});
const saveInventory = await Inventory.findOneAndUpdate(
{
$and: [
{
store: store,
user: seller,
},
],
},
{
$set: { products: products },
},
{ new: true, runValidator: true }
);
if (!saveInventory) {
return {
success: false,
errors: ['Server Error'],
};
}
return {
success: true,
};
});
The problem is that the 2 checkout calls run at (almost) the same time and your routine is not thread-safe. Both calls read a copy of the inventory data in memory. So both calls get a products.stock=10 and based on that local info you check and set the products counter by calculating the new amount in your function (stock-qty) and use an update query to set it as a fixed value (so both calls update the products.stock to 0). Resulting in your concurrency issues.
What you should do is let mongodb handle the concurrency for you.
There are several ways to handle concurrency but you could for example use the $inc to decrease the stock amount directly in mongo. That way the stock amount in the db can never be wrong.
result = await update({stock: {$ge: 10}}, {$inc: {stock : -10}})
As I added a filter to the query the order amount can not be lower than 0 plus you can now check the result of the update call to see if the update modified any documents. If it did not (result.nModified==0) you know the inventory was too low and you can report that back to the user.
https://docs.mongodb.com/manual/reference/operator/update/inc/
https://docs.mongodb.com/manual/reference/method/db.collection.update/#std-label-writeresults-update

Only process 500 lines/row at a time createReadStream

I have to read a really large CSV file so search through the google and get to know about createReadStream. I am using a program that read the csv file data and insert it into the mongoDB.
process I am following
process the data using createReadStream (I think it read the file line by line).
Storing data into an array.
Insert the data into mongoDB using insertMany
Now the problem is whole file is first get stored into an array and then I insert into the database.
But what I think is the better approach would be I only store first 500 line/rows into an array insert it into the DB and again follow the same step for the next 500 records
Is it possible to achieve this ?
and also is it the right way to do this ?
my program
const test = async () => {
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try{
stream.pause()
if(!authorName.includes(csvrow.author)) {
const author = new Author({author: csvrow.author})
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if(!companyName.includes(csvrow.company_name)) {
const company = new Company({companyName: csvrow.company_name})
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
})
userData.push(users)
book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
})
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
})
}finally {
stream.resume()
}
})
.on('end', async function() {
try {
Author.insertMany(authorData)
User.insertMany(userData)
Book.insertMany(bookData)
Company.insertMany(companyData)
await Relational.insertMany(relationalData)
parentPort.postMessage("true")
}catch(e){
console.log(e)
parentPort.postMessage("false")
}
})
}
test()
This program is working fine also inserting the data into the DB, But I am looking for something like this:
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow, maxLineToRead: 500) {
// whole code/logic of insert data into DB
})
so maxLineToRead is my imaginary term.
basically my point is I want to process 500 data at a time and insert it into the DB and want to repeat this process till the end.
You can create a higher scoped array variable where you accumulate rows of data as they arrive on the data event. When you get to 500 rows, fire off your database operation to insert them. If not yet at 500 rows, then just add the next one to the array and wait for more data events to come.
Then, in the end event insert any remaining rows still in the higher scoped array.
In this way, you will insert 500 at a time and then however many are left at the end. This has an advantage vs. inserting them all at the end that you spread out the database load over the time you are parsing.
Here's an attempt to implement that type of processing. There are some unknowns (documented with comments) based on an incomplete description of exactly what you're trying to accomplish in some circumstances):
const test = () => {
return new Promise((resolve, reject) => {
const accumulatedRows = [];
async function processRows(rows) {
// initialize data arrays that we will insert
const authorData = [],
companyData = [],
userData = [],
bookData = [],
relationalData = [];
// this code still has a problem that I don't have enough context
// to know how to solve
// If authorName contains csvrow.author, then the variable
// authorId is not initialized, but is used later in the code
// This is a problem that needs to be fixed.
// The same issue occurs for companyID
for (let csvrow of rows) {
let authorId, companyID;
if (!authorName.includes(csvrow.author)) {
const author = new Author({ author: csvrow.author })
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if (!companyName.includes(csvrow.company_name)) {
const company = new Company({ companyName: csvrow.company_name })
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
let users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
});
userData.push(users)
let book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
});
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
});
}
// all local arrays of data are populated now for this batch
// so add this data to the database
await Author.insertMany(authorData);
await User.insertMany(userData);
await Book.insertMany(bookData);
await Company.insertMany(companyData);
await Relational.insertMany(relationalData);
}
const batchSize = 50;
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try {
accumulatedRows.push(csvRow);
if (accumulatedRows.length >= batchSize) {
stream.pause();
await processRows(accumulatedRows);
// clear out the rows we just processed
acculatedRows.length = 0;
stream.resume();
}
} catch (e) {
// calling destroy(e) will prevent leaking a stream
// and will trigger the error event to be called with that error
stream.destroy(e);
}
}).on('end', async function() {
try {
await processRows(accumulatedRows);
resolve();
} catch (e) {
reject(e);
}
}).on('error', (e) => {
reject(e);
});
});
}
test().then(() => {
parentPort.postMessage("true");
}).catch(err => {
console.log(err);
parentPort.postMessage("false");
});

Which approach to choose for messenger architecture?

We are writing an instant messenger for private use in our application. The planned load of the current version is hundreds of users, maybe a thousand or two.
As a database, I use Mongo. Messages are written in messages (linked via chats_id with user_chats and uid with an external table in the users database). The arrival of a message on the client creates a flurry of events in response (mark read). It is clear that I need to make a queue. How to do it better? Are there methods to prioritize queues?
How many users should the spherical process of a node in a vacuum withstand? How much memory should it use? I'm using pm2 for process management and node-ipc for broadcast.
Where to turn, who to ask to optimize requests to the database. Now there is a quick written construction of the kind illustrated below. This example requests all chats, for each chat it selects the last message, the number of unread messages and the last users seen. In MySQL I would do it with one big request. How is it better to do this in Mongo? My level with Mongo is still in the middle.
The architecture itself. Now the action from the client comes and goes to the handler who understands what to do with it and who needs to be notified about this action. Usually the handler notifies the corresponding manager, and it already writes to the database and, if necessary, notifies the other processes.
const loadUserChatsStatistic = async (chatIds, userId) => {
const startExecTime = new Date();
const userChats = await db()
.collection("user_chats")
.aggregate([
{ $match: { uid: userId, chats_id: { $in: chatIds.map(ObjectID) } } },
{
$lookup: {
from: "chats",
localField: "chats_id",
foreignField: "_id",
as: "chat"
}
},
{ $unwind: "$chat" },
{
$lookup: {
from: "user_last_seen",
localField: "chats_id",
foreignField: "chats_id",
as: "last_seens"
}
}
])
.toArray();
const chats = await Promise.all(
userChats.map(async userChat => {
const usersCount = await db()
.collection("user_chats")
.find({ chats_id: userChat.chats_id })
.count();
let unreadCountQuery = {};
if (userChat.last_read) {
unreadCountQuery = { _id: { $gt: userChat.last_read } };
} else {
unreadCountQuery = { _id: { $gt: userChat._id } };
}
const unreadCount = await db()
.collection("messages")
.find({ ...unreadCountQuery, chats_id: userChat.chats_id })
.count();
const message =
(await db()
.collection("messages")
.findOne({ chats_id: userChat.chats_id }, { sort: { _id: -1 } })) ||
{};
return { ...userChat, usersCount, unreadCount, message };
})
);
console.log(
"Execution time for loadUserChatsStatistic",
new Date() - startExecTime
);
return chats.sort((a, b) => {
if (!a.message._id) {
return true;
}
if (!b.message._id) {
return false;
}
return (
new Date(ObjectID(a.message._id).getTimestamp()) <
new Date(ObjectID(b.message._id).getTimestamp())
);
});
};

How do you query data with Bookshelf with a lot of relations?

So, I have the following:
var Device = services.bookshelf.Model.extend({
tableName: 'device',
calendar: function() {
return this.belongsToMany(Calendar, 'calendar_device', 'deviceId', 'calendarId');
}
});
var Calendar = module.exports = services.bookshelf.Model.extend({
tableName: 'calendar',
device: function() {
return this.belongsToMany(Device, 'calendar_device', 'calendarId', 'deviceId');
},
schedule: function() {
return this.hasMany(Schedule);
}
});
var Schedule = module.exports = services.bookshelf.Model.extend({
tableName: 'schedule',
calendar: function() {
return this.belongsTo(Calendar);
},
channel: function() {
return this.hasMany(Channel);
}
});
var Channel = module.exports = services.bookshelf.Model.extend({
tableName: 'channel',
schedule: function() {
return this.belongsTo(Schedule);
},
screenItem: function() {
return this.hasMany(ScreenItem);
}
});
And the relations keep going...
How am I supposed to make a query to get, for example, the channels of a device? I don't know if I'm missing something but y I haven't found much information on this...
Device.where('id', id).fetch({withRelated: ['calendar.schedule.channel']}).then(function(devices) {
...
})
What I do is to fetch the Device with 'id' = id specifying the relations of the model that I want to return to (that are declared as it can be seen in the models declaration in my question).
So, saying:
{withRelated: ['calendar']} returns a Device with its Calendars
{withRelated: ['calendar.schedule']} returns a Device with its Calendars with its Schedules
{withRelated: ['calendar.schedule.channel']} returns a Device with its Calendars with its Schedules with its Channels
and so on if there are more related models you want to get.
The response is something like:
{
id: 1,
...,
calendars: [
id: 1,
...,
schedules: [
...
]
]
}

Resources