Transaction over two Tables with Postgres in Sails.js - node.js

I am trying to implement a transaction in Node.js (Sails.js) with Postgresql and Sails Waterline.js ORM.
I tried to adapt this answer. In that answer we have only one table. In my problem I have 2 tables which have to be locked till the transaction is over.
What should this code do:
find User with :Id
if this User.coins > 0:
Update User.coins = 0
Create a new record in the table MyTransaction
But the snippet doesn't lock the transaction over both tables:
try {
// Start the transaction
sails.models.user.query("BEGIN", function (err) {
if (err) {
throw new Error(err);
}
// Find the user
sails.models.user.findOne(currentUser.id).exec(function (err, user) {
if (err) {
throw new Error(err);
}
if (user.coins > 0) {
var params = {
user_id: req.session.passport.user,
publicAddress: bitcoin_address,
amount: user.coins,
wohnAdresse: wohnAdresse
}
// Update the user balance
user.coins = 0;
// Save the user
user.save(function (err) {
if (err) {
throw new Error(err);
}
sails.models.myTransaction.create(params).exec(function (err, transaction) {
if (err) {
payout = {success: false};
payout.transactionError = err;
console.log("ROLLBACK! ROLLBACK!")
return res.serverError(e);
}
// Commit the transaction
sails.models.user.query("COMMIT", function (err) {
if (err) {
throw new Error(err);
}
payout = {success: true};
console.log("PAYOUT PAYOUT", payout);
console.log("PAYOUT PAYOUT", transaction);
return res.json(payout);
});
});
});
} // END of if user.coins>0
});
});
}
// If there are any problems, roll back the transaction
catch (e) {
User.query("ROLLBACK", function (err) {
// The rollback failed--Catastrophic error!
if (err) {
return res.serverError(err);
}
// Return the error that resulted in the rollback
return res.serverError(e);
});
}
So when I run a loop(5times) in the frontend console, then it creates 5 records in the myTransaction table. How Can I do this properly?

Related

Query redshift cluster using NodeJs asynchronously

My task is to copy few redshift tables from cluster one to a new cluster.
For this I am writing a script in nodejs.
I am using aws-sdk RedshiftData api to fetch the data.
I have two separate queries which I want to run in parallel. Following is my code
class syncRedShiftNodes {
constructor(){ ... }
readDataOne(){
let newSqlQuery = `select * from ${this.tableName} limit 10`;
const params = {
ClusterIdentifier: clusterIdentifier,
Sql: newSqlQuery,
Database: database,
DbUser: dbUser
};
return new Promise((resolve, reject)=>{
return awsRedshift.executeStatement(params, function(err, res){
if (err) console.log(err, err.stack); // an error occurred
else{
return awsRedshift.getStatementResult({Id:res.Id}, function(error, data){
if (error) console.log(error, error.stack); // an error occurred
else return data;
});
}
});
});
}
readDataTwo(){ ...//identical to above function except the query }
main(){
return Promise.all([this.readDataOne(), this.readDataTwo()])
.spread((data1, data2)=>{
console.log("promise resolved!!");
return true;
}
}
The problem is that my code is never reaching the "promise resolved" log. If I put a log in the callback of the redshift getStatementResult, that is being printed correctly but my handle is never reaching the promise.all().then statement which I am not able to understand why so.
Another question I had in mind was is it a good practice to use such a pattern inside a class?
You didn't resolve or reject your promise inside the class.
Example below
class syncRedShiftNodes {
constructor() {}
readDataOne() {
let newSqlQuery = `select * from ${this.tableName} limit 10`;
const params = {
ClusterIdentifier: clusterIdentifier,
Sql: newSqlQuery,
Database: database,
DbUser: dbUser,
};
return new Promise((resolve, reject) => {
awsRedshift.executeStatement(params, function (err, res) {
if (err) {
console.log(err, err.stack);
reject(err);
} else {
awsRedshift.getStatementResult(
{ Id: res.Id },
function (error, data) {
if (error) {
console.log(error, error.stack);
reject(error);
} else {
resolve(data);
}
}
);
}
});
});
}
readDataTwo() {}
async main() {
try {
const result = await Promise.all([
this.readDataOne(),
this.readDataTwo(),
]);
return result;
} catch (err) {
console.log(err);
}
}
}

Asynchronous programming sequencing problems in node.js

I have picked up a project where when a node.js program starts for the first time, no database will exist. The program should create the database tables if they don't already exist.
However, in the sample program below, the data is not inserted if the database did not exist on first run because the select statement fails.
The output of the code below is:
$ node dbtest.js
finished initialise_database
program ended.
select err: { Error: SQLITE_ERROR: no such table: mytable errno: 1, code: 'SQLITE_ERROR' }
successfully created mytable table
database closed.
As you can see from the logging, the code assumes a synchronous execution.
I assume that what is happening is that the node.js runtime system uses different threads to schedule the database functions to run in parallel.
I need the CREATE TABLE command to complete before proceding. How would I achieve this?
Is there some standard way to achieve such a thing in node.js?
code below:
// npm install sqlite3 - to install sqlite3
const sqlite3 = require('sqlite3').verbose();
let db = initialise_database();
check_and_update(db); //Calling this function upon starting the server.
close_database(db);
console.log('program ended.');
function initialise_database() {
//Establishing a database connection.
let db = new sqlite3.Database('database1.db', (err)=>{
if(err) {
return console.error(err.message);
}
});
// // new db always succeeds even if no file exists - if empty file have to generate table structure
db.run("CREATE TABLE IF NOT exists 'mytable' ('num1' INTEGER, 'num2' INTEGER, 'text1' TEXT);", function(err) {
if (err) {
console.log("Create table error: ", err);
}
console.log("successfully created mytable table");
});
console.log("finished initialise_database");
return db;
}
function check_and_update(db) {
db.all("SELECT * FROM mytable", function(err, data){
if(err) {
console.log("select err: ", err);
} else {
db.run('INSERT INTO mytable (num1, num2, text1) VALUES (?, ?, ?)', [1, 2, 'hi guys!!!'], function(err){
if(err)
console.log("insert err: ", err);
});
}
});
}
function close_database(db) {
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('database closed.');
});
}
Database requests are asynchronous, you have to deal with them in an asynchronous way.
Which can be :
Callback
Promise
Async/await
Otherwise you will try to perform a request on a database not initialized.
Here is an example using Promise.
const sqlite3 = require('sqlite3').verbose();
let dbPtr = false;
initialise_database()
.then((db) => {
dbPtr = db;
return check_and_update(dbPtr);
})
.then(() => {
close_database(dbPr);
// All is done
console.log('program ended.');
})
.catch((err) => {
// Deal with the error
});
function initialise_database() {
return new Promise((resolve, reject) => {
//Establishing a database connection.
const db = new sqlite3.Database('database1.db', (err) => {
if (err) {
console.error(err.message);
return reject(err);
}
db.run('...', function(err) {
if (err) {
console.log("Create table error: ", err);
return reject(err);
}
console.log("successfully created mytable table");
return resolve(db);
});
});
});
}
function check_and_update(db) {
return new Promise((resolve, reject) => {
db.all('...', function(err, data) {
if (err) {
console.log("select err: ", err);
return reject(err);
}
db.run('...', [1, 2, 'hi guys!!!'], function(err) {
if (err) {
console.log("insert err: ", err);
return reject(err);
}
return resolve();
});
});
});
}
function close_database(db) {
db.close();
}
#EDIT
Looking at the documentation it seems that db.close() do not take a callback in parameter. I've modified the snippet.

Is it possible to have nested queries in Postgres pg module

Here is my code that tries to update a record in the db.
But if the record is not there then I want to insert it.
Is it OK to call client.query again? Or what's the best way to do it?
const {Pool} = require('pg');
const pool = new Pool(POSTGRES_CONFIG);
pool.connect((err, client, release) => {
if (err) {
return console.error('Error acquiring client', err.stack)
}
………
client.query(query, queryValues, (err, result) => {
release();
if(result.rowCount<=0){
//**** CAN I CALL IT AGAIN WITH OTHER PARAMETERS TO INSERT? ****
client.query(....... => {
release();
if (err) {
if(err.code === POSTGRES_ERRORS.UNIQUE_VIOLATION){
return console.error('KEY ALREADY EXISTS');
} else {
return console.error('query error', err);
}
}
}
}
});
});
It is perfectly OK as long as you call release after you're done with the client. From the docs:
You must call the releaseCallback or client.release (which points to
the releaseCallback) when you are finished with a client.
So, you could do this:
client.query(query, queryValues, (err, result) => {
// don't release just yet
if(result.rowCount<=0){
//**** CAN I CALL IT AGAIN WITH OTHER PARAMETERS TO INSERT? ****
client.query(....... => {
release(); // now you're done with the client so you can release it
if (err) {
if(err.code === POSTGRES_ERRORS.UNIQUE_VIOLATION){
return console.error('KEY ALREADY EXISTS');
} else {
return console.error('query error', err);
}
}
}
}
});

Resolve not working in loop Node.js

Hi I have a problem running a loop and getting the return data using Promises.
I have a getStudentMarks method for getting students marks from the database in subject wise.
getStudentMarks: function(studentId, studentStandard) {
console.log("getStudentMarks invoked...");
return new Promise(function(resolve, reject) {
r.table('student_subjects').filter({
"studentId": studentId,
"studentStandard": studentStandard
}).pluck("subjectId", "subjectName").run(connection, function(err, cursor) {
if (err) {
throw err;
reject(err);
} else {
cursor.toArray(function(err, result) {
if (err) {
throw err
} else {
console.log(result.length);
if (result.length > 0) {
studentSubjectArray = result;
var studentMarksSubjectWiseArray = [];
studentSubjectArray.forEach(function(elementPhoto) {
r.table('student_marks').filter({
"studentId": studentId,
"subjectId": studentSubjectArray.subjectId
}).run(connection, function(err, cursor) {
if (err) {
throw err;
reject(err);
} else {
cursor.toArray(function(err, result_marks) {
var studnetMarksDataObject = {
subjectId: studentSubjectArray.subjectId,
subjectName: studentSubjectArray.subjectName,
marks: result.marks
};
studentMarksSubjectWiseArray.push(studnetMarksDataObject);
});
}
});
});
resolve(studentMarksSubjectWiseArray);
}
}
});
}
});
});
}
I'm invoking the method by,
app.post('/getStudentMarks', function(req, reqs) {
ubm.getStudentMarks(req.body.studentId, req.body.studentStandard)
.then((data) => {
console.log('return data: ' + data);
})
.catch((err) => {
console.log(err);
});
});
When I run the code its working absolutely fine there is no error. I get all the student marks object in the studentMarksSubjectWiseArray array. But the problem is even before the studentSubjectArray loops gets completed, the resolve is getting executed and I'm getting a blank array as return. How do I solve the problem. I understand that I'm not doing the Promises right. I'm new to Promises so I'm not being able to figure out the right way.
That happens because inside your studentSubjectArray.forEach statement you perform set of asynchronous operations r.table(...).filter(...).run() and you push their result into the array. However, those actions finish after you perform the resolve(), so the studentMarksSubjectWiseArray is still empty. In this case you would have to use Promise.all() method.
let promisesArray = [];
studentSubjectArray.forEach((elementPhoto) => {
let singlePromise = new Promise((resolve, reject) => {
// here perform asynchronous operation and do the resolve with single result like r.table(...).filter(...).run()
// in the end you would perform resolve(studentMarksDataObject)
r.table('student_marks').filter({
"studentId": studentId,
"subjectId": studentSubjectArray.subjectId
}).run(connection, function(err, cursor) {
if (err) {
throw err;
reject(err);
} else {
cursor.toArray(function(err, result_marks) {
var studnetMarksDataObject = {
subjectId: studentSubjectArray.subjectId,
subjectName: studentSubjectArray.subjectName,
marks: result.marks
};
resolve(studnetMarksDataObject);
});
}
});
});
promisesArray.push(singlePromise)
});
Promise.all(promisesArray).then((result) => {
// here the result would be an array of results from previously performed set of asynchronous operations
});

How to commit transaction after loop over asynchonous updates

I'm using node-mysql-queues to handle database transactions in my application.
for (lineitem in lineitems) {
transaction.query("SELECT n from inventory WHERE productId = ?", [lineitem], function (err, rows) {
if (err)
transaction.rollback();
var newN = rows[0].n - lineitems[lineitem].quantity;
if (newN >= 0) {
transaction.query("UPDATE inventory SET n = ? WHERE productId = ?", [newN, lineitem], function (err) {
if (err){
transaction.rollback();
console.log(err);
}
//here I want to commit if all updates were successfull!!!
});
}
})
}
As you can see in the code, I don't know how to handle the commit part. If it was synchronous it would be easy, but don't know how ro solve this problem.
Thanks & Regards
This is easy with something like the async module.
async.each(lineitems, performQuery, function(err) {
if(err) {
transaction.rollback();
console.log(err);
return;
}
transaction.commit();
});
function performQuery(lineitem, callback) {
transaction.query("SELECT n from inventory WHERE productId = ?", [lineitem], function (err, rows) {
if (err) return callback(err);
var newN = rows[0].n - lineitems[lineitem].quantity;
if (newN >= 0) {
transaction.query("UPDATE inventory SET n = ? WHERE productId = ?", [newN, lineitem], function (err) {
if (err) return callback(err);
callback();
});
}
});
}
I found a solution for my problem. Since I had problems with doing a select and then an update depending on the result of the select, I implemented something like a conditional update.
But see my code:
mysql.getTransaction(function (err, transaction) {
//For each item in the cart, call the performUpdate method
//If an error occures, rollback the whole transaction
async.each(lineitems, performUpdate, function (err) {
if (err) {
transaction.rollback();
res.json(err.message);
return;
}
//Since we are going to call another callback, we need to pause the transaction, else it would be committed automatically
transaction.pause();
//If the Updates were successfull, create an Order in MongoDB
orderController.createMongoOrder(lineitems, req.session.cart.total, req.session.passport.user, function (err) {
if (err) {
//If there is a Problem with Mongo, cancel the transaction
transaction.resume();
transaction.rollback();
res.json(err.message);
} else {
//Else commit the transaction and empty the cart
transaction.resume();
transaction.commit();
req.session.cart = {
products: {},
count: 0,
total: 0
};
res.json("Order accepted!");
}
})
});
function performUpdate(lineitem, callback) {
//This query can be seen as conditional update. If the number of articles in stock is not sufficient, there will be no affectedRows in the returned info message
transaction.query("UPDATE inventory SET n = n -? WHERE productId = ? AND n >= ?", [lineitem.quantity, lineitem.id, lineitem.quantity],function (err, info) {
if (err) {
return callback(err);
} else {
//if for any item there is no affectedRow, this means the Updated failed. This should make the whole transaction roll back so we return an error to the callback
if (info.affectedRows != 1) {
return callback(new Error("Article: " + lineitem.productObject.name + " out of stock!"))
}
return callback(null, info);
}
}).execute()
}
})

Resources