Let's say I have some sort of game. I have a buyItem function like this:
buyItem: function (req, res) {
// query the users balance
// deduct user balance
// buy the item
}
If I spam that route until the user balance is deducted (the 2nd query) the user's balance is still positive.
What I have tried:
buyItem: function (req, res) {
if(req.session.user.busy) return false;
req.session.user.busy = true;
// query the users balance
// deduct user balance
// buy the item
}
The problem is req.session.user.busy will be undefined for the first ~5 requests. So that doesn't work either.
How do we handle such situations? I'm using the Sails.JS framework if that is important.
Update 2
Sails 1.0 now has full transaction support, via the .getDatastore() method. Example:
// Get a reference to the default datastore, and start a transaction.
await sails.getDatastore().transaction(async (db, proceed)=> {
// Now that we have a connection instance in `db`, pass it to Waterline
// methods using `.usingConnection()` to make them part of the transaction:
await BankAccount.update({ balance: 5000 }).usingConnection(db);
// If an error is thrown, the transaction will be rolled back.
// Or, you can catch errors yourself and call `proceed(err)`.
// To commit the transaction, call `proceed()`
return proceed();
// You can also return a result with `proceed(null, result)`.
});
Update
As several commenters have noted, the code below doesn't work when connection pooling is enabled. At the time that this was originally posted, not all of the adapters pooled by default, but at this point it should be assumed that they do, so that each individual method call (.query(), .findOne(), etc.) could be on a different connection, and operating outside of the transaction. The next major version of Waterline will have transaction support, but until then, the only way to ensure that your queries are transactional is to use the raw database driver package (e.g. pg or mysql).
It sounds like what you need is a transaction. Sails doesn't support transactions at the framework level yet (it's on the roadmap) but if you're using a database that supports them (like Postgres or MySQL), you can use the .query() method of your model to access the underlying adapter and run native commands. Here's an example:
buyItem: function(req, res) {
try {
// Start the transaction
User.query("BEGIN", function(err) {
if (err) {throw new Error(err);}
// Find the user
User.findOne(req.param("userId").exec(function(err, user) {
if (err) {throw new Error(err);}
// Update the user balance
user.balance = user.balance - req.param("itemCost");
// Save the user
user.save(function(err) {
if (err) {throw new Error(err);}
// Commit the transaction
User.query("COMMIT", function(err) {
if (err) {throw new Error(err);}
// Display the updated user
res.json(user);
});
});
});
});
}
// If there are any problems, roll back the transaction
catch(e) {
User.query("ROLLBACK", function(err) {
// The rollback failed--Catastrophic error!
if (err) {return res.serverError(err);}
// Return the error that resulted in the rollback
return res.serverError(e);
});
}
}
I haven't tested this out. But as long as your not using multiple instances or clusters, you should just be able to store the status in memory. Because node is single threaded there shouldn't be any problems with atomicity.
var inProgress = {};
function buyItem(req, res) {
if (inProgress[req.session.user.id]) {
// send error response
return;
}
inProgress[req.session.user.id] = true;
// or whatever the function is..
req.session.user.subtractBalance(10.00, function(err, success) {
delete inProgress[req.session.user.id];
// send success response
});
}
Related
I'm currently running a stack that consists of Express and MongoClient with Mocha and Chai for testing. I'm working on writing test cases for my endpoint and am getting a random error that pops up from time to time. Below is a snippet of one of the suits I'm writing:
describe('Recipes with populated database', () => {
before((done) => {
var recipe1 = {"search_name": "mikes_mac_and_cheese", "text_friendly_name": "Mikes Mac and Cheese","ingredients": [{"name": "elbow_noodles","text_friendly_name": "elbow noodles","quantity": 12,"measurement": "oz"},{"name": "cheddar_cheese","text_friendly_name": "cheddar cheese","quantity": 6,"measurement": "oz"},{"name": "gouda_cheese","text_friendly_name": "gouda cheese","quantity": 6,"measurement": "oz"},{"name": "milk","text_friendly_name": "milk","quantity": 2,"measurement": "oz"}],"steps": ["Bring water to a boil","Cook noodels until al dente.","Add the milk and cheeses and melt down.","Stir constantly to ensure even coating and serve."],"course": ["dinner","lunch","side"],"prep_time": {"minutes": 15,"hours": 0},"cook_time":{"minutes": 25,"hours": 1},"cuisine": "italian","submitted_by": "User1","searchable": true};
db.collectionExists('recipes').then((exists) => {
if (exists) {
db.getDb().dropCollection('recipes', (err, results) => {
if (err)
{
throw err;
}
});
}
db.getDb().createCollection('recipes', (err, results) => {
if (err)
{
throw err;
}
});
db.getDb().collection('recipes').insertOne(recipe1, (err, result) => {
done();
});
});
});
The collectionExists() method simply takes in a name and returns a promise that is resolved to a true/false value. I've already done some debugging and it is working just fine. Where I am getting a problem is when I hit the section of the code where I call createCollection. I get an error about how the collection already exists thus leading to my tests failing. This appears to be happening on every third time I'm running my tests as well.
The purpose of all this is to ensure that my database collection called recipes is completely empty before I start testing so I'm not stuck with old data or in an uncontrolled environment.
You have a race condition between .createCollection and .insertOne. In other words, they start at the same time and go in parallel. There is no way to tell which will be done first.
The way .insert works in MongoDB is that if the collection is missing and you try inserting - it's going to create a collection. So if .insertOne is executed first - the collection is created and that is why you're getting the already exists error in an attempt to createCollection.
Due to the async nature of DB calls you'd have to place the subsequent calls inside the callback of a prev. one. This way there will be no parallel execution:
before((done) => {
var recipe1 = {/**/};
db.collectionExists('recipes')
.then((exists) => {
if (exists) {
// Drop the collection if it exists.
db.getDb().dropCollection('recipes', (err) => {
if (err) {
// If there's an error we want to pass it up to before.
done(err);
return;
}
// Just insert a first document into a non-existent collection.
// It's going to be created.
// Notice the done callback.
db.getDb().collection('recipes').insertOne(recipe1, done);
});
}
// If there were no such collection - simply insert the first doc to create it.
// Note that I'm passing before's done callback inside.
db.getDb().collection('recipes').insertOne(recipe1, done);
})
// We don't want to lose the error from this promise always.
.catch(err => done(err));
});
But. Actually, there is no need to drop and re-create a collection each time you run the tests. You can simply .remove all the objects in the before block. So probably the right solution would be:
before((done) => {
var recipe1 = {/**/};
const recipes = db.getDb().collection('recipes');
// Simply wipe out the data
recipes.remove({}, err => {
if (err) {
done(err);
return;
}
recipes.insertOne(recipe1, done);
});
});
I need to fetch two different MongoDB collections (db.stats and db.tables ) for the same request req.
Now, in the code below, I am nesting the queries within the callback function.
router.post('/', (req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
db.stats.findOne({Year: season}, function (err, data) {
if (data) {
resultData = getResult(data);
db.tables.findOne({Year: season}, function (err, data) {
if (data) {
resultTable = getTable(data);
res.render('index.html', {
data:{
result : resultData,
message: "Working"}
});
} else {
console.log("Error in Tables");
}
});
} else {
console.log("Error in Stats");
}
});
});
This code works, but there a few things that don't seem right. So my question is:
How do I avoid this nested structure? Because it not only looks ugly but also, while I am processing these requests the client side is unresponsive and that is bad.
What you have right now is known as the callback hell in JavaScript. This is where Promises comes in handy.
Here's what you can do:
router.post('/', (req, res) => {
let season = String(req.body.year);
var queries = [
db.stats.findOne({ Year: season }),
db.tables.findOne({ Year: season })
];
Promise.all(queries)
.then(results => {
if (!results[0]) {
console.log("Error in Stats");
return; // bad response. a better way is to return status 500 here
} else if (!results[1]) {
console.log("Error in Tables");
return; // bad response. a better way is to return status 500 here
}
let resultData = getResult(results[0]);
let resultTable = getTable(results[1]);
res.render('index.html', { data: {
result : resultData,
message: "Working"
} });
})
.catch(err => {
console.log("Error in getting queries", err);
// bad response. a better way is to return status 500 here
});
});
It looks like you are using Mongoose as your ODM to access your mongo database. When you don't pass in a function as the second parameter, the value returned by the function call (e.g. db.stats.findOne({ Year: season })) will be a Promise. We will put all of these unresolved Promises in an array and call Promise.all to resolve them. By using Promise.all, you are waiting until all of your database queries get executed before moving on to render your index.html view. In this case, the results of your database function calls will be stored in the results array in the order of your queries array.
Also, I would recommend doing something like res.status(500).send("A descriptive error message here") whenever there is an error on the server side in addition to the console.log calls.
The above will solve your nested structure problem, but latter problem will still be there (i.e. client side is unresponsive when processing these requests). In order to solve this, you need to first identify your bottleneck. What function calls are taking up most of the time? Since you are using findOne, I do not think that will be the bottleneck unless the connection between your server and the database has latency issues.
I am going to assume that the POST request is not done through AJAX since you have res.render in it, so this problem shouldn't be caused by any client-sided code. I suspect that either one of getResult or getTable (or both) is taking up quite a significant amount of time, considering the fact that it causes the client side to be unresponsive. What's the size of the data when you query your database? If the size of it is so huge that it takes a significant amount of time to process, I would recommend changing the way how the request is made. You can use AJAX on the front-end to make a POST request to the back-end, which will then return the response as a JSON object. That way, the page on the browser would not need to reload, and you'll get a better user experience.
mongodb driver return a promise if you dont send a callback so you can use async await
router.post('/', async(req, res) => {
let season = String(req.body.year);
let resultData, resultTable;
try {
const [data1,data2] = await Promise.all([
db.stats.findOne({Year: season}),
db.tables.findOne({Year: season})
]);
if (data1 && data2) {
resultData = getResult(data1);
resultTable = getTable(data2);
return res.render('index.html', {
data: {
result: resultData,
message: "Working"
}
});
}
res.send('error');
console.log("Error");
} catch (err) {
res.send('error');
console.log("Error");
}
});
I would like to know if it's possible to run a series of SQL statements and have them all committed in a single transaction.
The scenario I am looking at is where an array has a series of values that I wish to insert into a table, not individually but as a unit.
I was looking at the following item which provides a framework for transactions in node using pg. The individual transactions appear to be nested within one another so I am unsure of how this would work with an array containing a variable number of elements.
https://github.com/brianc/node-postgres/wiki/Transactions
var pg = require('pg');
var rollback = function(client, done) {
client.query('ROLLBACK', function(err) {
//if there was a problem rolling back the query
//something is seriously messed up. Return the error
//to the done function to close & remove this client from
//the pool. If you leave a client in the pool with an unaborted
//transaction weird, hard to diagnose problems might happen.
return done(err);
});
};
pg.connect(function(err, client, done) {
if(err) throw err;
client.query('BEGIN', function(err) {
if(err) return rollback(client, done);
//as long as we do not call the `done` callback we can do
//whatever we want...the client is ours until we call `done`
//on the flip side, if you do call `done` before either COMMIT or ROLLBACK
//what you are doing is returning a client back to the pool while it
//is in the middle of a transaction.
//Returning a client while its in the middle of a transaction
//will lead to weird & hard to diagnose errors.
process.nextTick(function() {
var text = 'INSERT INTO account(money) VALUES($1) WHERE id = $2';
client.query(text, [100, 1], function(err) {
if(err) return rollback(client, done);
client.query(text, [-100, 2], function(err) {
if(err) return rollback(client, done);
client.query('COMMIT', done);
});
});
});
});
});
My array logic is:
banking.forEach(function(batch){
client.query(text, [batch.amount, batch.id], function(err, result);
}
pg-promise offers a very flexible support for transactions. See Transactions.
It also supports partial nested transactions, aka savepoints.
The library implements transactions automatically, which is what should be used these days, because too many things can go wrong, if you try organizing a transaction manually as you do in your example.
See a related question: Optional INSERT statement in a transaction
Here's a simple TypeScript solution to avoid pg-promise
import { PoolClient } from "pg"
import { pool } from "../database"
const tx = async (callback: (client: PoolClient) => void) => {
const client = await pool.connect();
try {
await client.query('BEGIN')
try {
await callback(client)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
}
} finally {
client.release()
}
}
export { tx }
Usage:
...
let result;
await tx(async client => {
const { rows } = await client.query<{ cnt: string }>('SELECT COUNT(*) AS cnt FROM users WHERE username = $1', [username]);
result = parseInt(rows[0].cnt) > 0;
});
return result;
I'm relatively new to the loopback game. How can I get observers to work?
For example, I want something to observe whenever user information is changed or a user is created.
Thanks
//this observer will be activated whenever the user is edited or created
User.observe('after save', function(ctx, next) {
var theUserObject = ctx.instance;
if(ctx.isNewInstance){
anotherModel.create(theUserObject.name,theUserObject.ID);
}else{
anotherModel.update(theUserObject.name,theUserObject.ID);
}
next();
});
Is this the correct user of ctx? Where should this code sit? Within the User.js?
Just to put this in answer (see comments above):
In general what you are doing is mostly correct. You want to put operation hooks in the common/models/my-model.js file(s), but that context object (ctx) will change depending on the hook (read the linked documentation above).
In your case, to create a new model, you need to access the app off of the current model and then execute create(), but be sure to put your next() callback in the callback for the create call:
//this observer will be activated whenever the user is edited or created
User.observe('after save', function(ctx, next) {
var theUserObject = ctx.instance;
if(ctx.isNewInstance){
User.app.models.anotherModel.create({name: theUserObject.name, id: theUserObject.ID}, function(err, newInstance) {
next(err);
});
} else {
User.app.models.anotherModel.find({ /* some criteria */ }, function(err, instance) {
if (err) { return next(err); }
if (instance) {
instance.updateAttributes({name: theUserObject.name, id: theUserObject.ID}, function(err) {
next(err);
});
}
});
}
});
I'm using Sequelize and PostgreSQL in my node.js application. I have two tables with one-to-one relationship - Customers and Users.
There is foreign key UserId in the Customers table. So, I firstly insert into the Users table and then insert into the Customers with the last inserted UserId. Here is my controller code:
var self = this;
async.each(data, function(row, callback) {
var userData = {
name: row.name,
/** **/
}
// self.User is User model
self.User.create(userData).then(function(user) {
console.log("[User.create] succeeded");
/** **/
// self.Model is Customer model
self.Model.create(cusData).then(function(customer) {
console.log("[Customer.create] succeeded");
/** **/
}).catch(function(err) {
throw err;
}); // EOL self.Model.create
}).catch(function(err) {
throw err;
}); // EOL self.User.create
callback();
}, function(err){
if (err) {
throw err;
}
}
I'm using async.each() to loop the array of 2 records synchronously. When I inserted the two records, the console output is:
[User.create] succeeded
[User.create] succeeded
[Customer.create] succeeded
[Customer.create] succeeded
What I expected is:
[User.create] succeeded
[Customer.create] succeeded
[User.create] succeeded
[Customer.create] succeeded
I think it could be a problem of synchronous flow in asynchronous programming. What am I wrong? I think I'm using the callbacks correctly.
You're initiating the two inserts asynchronously, so you've no guarantee on the order of the follow-up queries. For all you know, depending on locks, you could get the current or your expected result.
To force the order, move the iteration forward from within the inner callback:
self.Model.create(cusData).then(function(customer) {
console.log("[Customer.create] succeeded");
/** move the iterator forward here, using e.g. recursion **/
})