I am trying to create nested transactions in NodeJs using MSSQL NPM. I am using following code block
var sqlModule = require("mssql");
var sqlManagerClass = require("./sqlManager.js");
var sql = new sqlManagerClass(sqlModule);
sql.setConfig({
user: "dbo",
password: "***********",
server: "127.0.0.1",
database: "dbname",
requestTimeout: 120000,
});
sql.beginTransaction(function (transaction) {
if (transaction == null) {
callback("Unable to Start Transaction", null);
return;
}
sql.beginTransaction(function (newTransaction) {
if (newTransaction == null) {
callback("Unable to Start newTransaction", null);
return;
}
sql.execute(
"dbname.dbo.insert_dest",
[],
[],
function (err, results) {
console.log(results);
newTransaction.commit(function (err) {
if (err) {
console.log(err);
}
sql.execute("dbname.dbo.select_dest", [], [], function (
err,
results2
) {
if (err) {
console.log(err);
return;
}
console.log(results2);
transaction.rollback(function (rollBackErr) {
if (rollBackErr) {
console.log(rollBackErr);
}
sql.execute("dbname.dbo.select_dest", [], [], function (
err,
results2
) {
if (err) {
console.log(err);
return;
}
console.log(results2);
console.log('end')
});
});
});
});
},
newTransaction
);
});
});
creating transaction
this.beginTransaction = function(callback, transaction) {
// if the optional transaction argument is passed in, we are already working with a transation, just return it
if (typeof transaction !== "undefined") {
callback(transaction);
return;
}
self.connectSql(function(err) {
var transaction = new self.sql.Transaction(self.connection);
transaction.begin(function(err) {
// ... error checks
if (err) {
self.log("SQLManager - Error Beginning Transaction " + err);
callback(null);
}
// callback with the transaction handler
callback(transaction);
});
});
}
I have to create 2 transactions so that I can start a transaction and perform a set of operations on different stored procedures id any thing goes wrong I can revert back to my original state using the first transaction.
In short I am trying to achieve something like following SQL code
BEGIN TRAN T1;
BEGIN TRAN M2
INSERT INTO dbo.dest
(
Code,
Text,
Type
)
VALUES
( 'l', -- Code - varchar(50)
'Love', -- Text - varchar(50)
2 -- Type - int
)
COMMIT TRAN M2
// I am calling another Sp which is similar to this
SELECT * FROM dbo.dest
//validation the code
//if error
ROLLBACK TRAN T1
My question is how can I create nested transaction so that I can revert back entire transaction if any error occur after the first transaction complete. Thank you.
First read this
A SQL Server DBA myth a day: (26/30) nested transactions are real
To see if SQL Server "nested transactions" or savepoints are really going to help you.
Then you'll probably have to handle this all yourself by issuing the transaction control statements using sql.execute() eg:
sql.execute("begin transaction");
//something that works
sql.execute("save transaction MyTran");
//something that fails
sql.execute("rollback transaction MyTran");
sql.execute("commit transaction");
how can I create nested transaction so that I can revert back entire transaction if any error occur after the first transaction complete
What you have will do that. Nested transactions are not real, so calling
begin tran
...
begin tran --<-- ##trancount += 1, but nothing else really happens
...
commit tran --<--nothing is really commited yet. ##trancount -= 1
...
rollback tran --<-- all work is rolled back
Related
Seems like a super basic task, but I just cannot get this to work (not very experienced with mongo or nodeJS).
I have an array of records. I need to check the DB to see if any records with a matching name already exist and if they do grab that record so I can update it.
Right now I am trying this
function hit_the_db(db, record_name, site_id) {
return new Promise((resolve, reject) => {
var record = db.collection('' + site_id + '_campaigns').find({name: record_name}).toArray(function(err, result) {
if (err) {
console.log('...error => ' + err.message);
reject(err);
} else {
console.log('...promise resolved...');
resolve(result);
}
});
console.log('...second layer of select successful, returning data for ' + record.length + ' records...');
return record;
});
}
This query works in another part of the app so I tried to just copy it over, but I am not getting any records returned even though I know there should be with the data I am sending over.
site_id is just a string that would look like ksdlfnsdlfu893hdsvSFJSDgfsdk. The record_name is also just a string that could really be anything but it is previously filtered so no spaces or special characters, most are something along these lines this-is-the-name.
With the names coming through there should be at least one found record for each, but I am getting nothing returned. I just cannot wrap my head around using mongo for these basic tasks, if anyone can help it would be greatly appreciated.
I am just using nodeJS and connecting to mongoDB, there is no express or mongoose or anything like that.
The problem here is that you are mixing callback and promises for async code handling. When you call:
var record = db.collection('' + site_id + '_campaigns').find({name: record_name}).toArray(function(err, result) {
You are passing in a callback function, which will receive the resulting array of mongo records in a parameter called result, but then assigning the immediate returned value to a variable called 'record', which is not going to contain anything.
Here is a cleaned up version of your function.
function hit_the_db(db, site_id, record_name, callback) {
// Find all records matching 'record_name'
db.collection(site_id + 'test_campaigns').find({ name: record_name }).toArray(function(err, results) {
// matching records are now stored in 'results'
if (err) {
console.log('err:', err);
}
return callback(err, results);
});
}
Here is optional code for testing the above function.
// This is called to generate test data
function insert_test_records_callback(db, site_id, record_name, insert_count, callback) {
const testRecords = [];
for (let i = 0; i < insert_count; ++i) {
testRecords.push({name: record_name, val: i});
}
db.collection(site_id + 'test_campaigns').insertMany(testRecords, function(err, result) {
return callback(err);
});
}
// This cleans up by deleting all test records.
function delete_test_records_callback(db, site_id, record_name, callback) {
db.collection(site_id + 'test_campaigns').deleteMany({name: record_name}, function(err, result) {
return callback(err);
});
}
// Test function to insert, query, clean up test records.
function test_callback(db) {
const site_id = 'ksdlfnsdlfu893hdsvSFJSDgfsdk';
const test_record_name = 'test_record_callback';
// First call the insert function
insert_test_records_callback(db, site_id, test_record_name, 3, function(err) {
// Once execution reaches here, insertion has completed.
if (err) {
console.log(err);
return;
}
// Do the query function
hit_the_db(db, site_id, test_record_name, function(err, records) {
// The query function has now completed
console.log('hit_the_db - err:', err);
console.log('hit_the_db - records:', records);
delete_test_records_callback(db, site_id, test_record_name, function(err, records) {
console.log('cleaned up test records.');
});
});
});
}
Output:
hit_the_db - err: null
hit_the_db - records: [ { _id: 5efe09084d078f4b7952dea8,
name: 'test_record_callback',
val: 0 },
{ _id: 5efe09084d078f4b7952dea9,
name: 'test_record_callback',
val: 1 },
{ _id: 5efe09084d078f4b7952deaa,
name: 'test_record_callback',
val: 2 } ]
cleaned up test records.
I would like to know if it's possible to run a series of SQL statements and have them all committed in a single transaction.
The scenario I am looking at is where an array has a series of values that I wish to insert into a table, not individually but as a unit.
I was looking at the following item which provides a framework for transactions in node using pg. The individual transactions appear to be nested within one another so I am unsure of how this would work with an array containing a variable number of elements.
https://github.com/brianc/node-postgres/wiki/Transactions
var pg = require('pg');
var rollback = function(client, done) {
client.query('ROLLBACK', function(err) {
//if there was a problem rolling back the query
//something is seriously messed up. Return the error
//to the done function to close & remove this client from
//the pool. If you leave a client in the pool with an unaborted
//transaction weird, hard to diagnose problems might happen.
return done(err);
});
};
pg.connect(function(err, client, done) {
if(err) throw err;
client.query('BEGIN', function(err) {
if(err) return rollback(client, done);
//as long as we do not call the `done` callback we can do
//whatever we want...the client is ours until we call `done`
//on the flip side, if you do call `done` before either COMMIT or ROLLBACK
//what you are doing is returning a client back to the pool while it
//is in the middle of a transaction.
//Returning a client while its in the middle of a transaction
//will lead to weird & hard to diagnose errors.
process.nextTick(function() {
var text = 'INSERT INTO account(money) VALUES($1) WHERE id = $2';
client.query(text, [100, 1], function(err) {
if(err) return rollback(client, done);
client.query(text, [-100, 2], function(err) {
if(err) return rollback(client, done);
client.query('COMMIT', done);
});
});
});
});
});
My array logic is:
banking.forEach(function(batch){
client.query(text, [batch.amount, batch.id], function(err, result);
}
pg-promise offers a very flexible support for transactions. See Transactions.
It also supports partial nested transactions, aka savepoints.
The library implements transactions automatically, which is what should be used these days, because too many things can go wrong, if you try organizing a transaction manually as you do in your example.
See a related question: Optional INSERT statement in a transaction
Here's a simple TypeScript solution to avoid pg-promise
import { PoolClient } from "pg"
import { pool } from "../database"
const tx = async (callback: (client: PoolClient) => void) => {
const client = await pool.connect();
try {
await client.query('BEGIN')
try {
await callback(client)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
}
} finally {
client.release()
}
}
export { tx }
Usage:
...
let result;
await tx(async client => {
const { rows } = await client.query<{ cnt: string }>('SELECT COUNT(*) AS cnt FROM users WHERE username = $1', [username]);
result = parseInt(rows[0].cnt) > 0;
});
return result;
I want to upsert document in the following way
for (var i = 0; i < req.body.app_events.length; i++ ) {
console.log(req.body.app_events[i].event_key);
//delete upsertData._id;
Appusers.update({app_key: req.body.app_key, e_key:req.body.app_events[i].event_key}, {
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}}, { upsert: true}, function(err, data){
if(err) return console.log(err);
console.log(data);
});
}
it is creating a single document with _id only. i want to insert document if it exist otherwise update on the basis of e_key and app_key.
You really should not be calling asynchronous functions inside a synchronous loop. What you need is something that repects the callback on completion of the loop cycle and will alert when the update is complete. This makes incrementing counters externally safe.
Use something like async.whilst for this:
var i = 0;
async.whilst(
function() { return i < req.body.app_events.length; },
function(callback) {
console.log(req.body.app_events[i].event_key);
//delete upsertData._id;
Appusers.findOneAndUpdate(
{ app_key: req.body.app_key, e_key:req.body.app_events[i].event_key},
{
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}
},
{ upsert: true},
function(err,data) {
if (err) callback(err);
console.log(data);
i++;
callback();
}
);
},
function(err) {
if (err)
console.log(err);
else
// done;
}
);
Now the loop is wrapped with a "callback" which is called in itself within the callback to the update method. Also if you expect a "document" back then you should be using .findOneAndUpdate() as .update() just modifies the content and returns the number affected.
When the loop is complete or when an error is passed to the callback, then handling is moved to the last function block, where you complete your call or call other callbacks as required.
Better than above. Dig into the native driver methods for Bulk operations. You need to be careful that you have an open connection to the database already established. If unsure about this, then try to always wrap application logic in:
mongoose.connection('once',function(err) {
// app logic here
});
Which makes sure the connections have been made. The mongoose methods themselves "hide" this away, but the native driver methods have no knowledge.
But this is the fastest possible listing to update the data:
var i = 0;
var bulk = Appusers.collection.initializeOrderedBulkOp();
async.whilst(
function() { return i < req.body.app_events.length; },
function(callback) {
console.log(req.body.app_events[i].event_key);
bulk.find(
{ app_key: req.body.app_key, e_key:req.body.app_events[i].event_key},
).upsert().updateOne({
$set : {
app_key:req.body.app_key,
e_key: req.body.app_events[i].event_key,
e_name: req.body.app_events[i].event_name
}
});
i++;
if ( i % 1000 == 0) {
bulk.execute(function(err,response) {
if (err) callback(err);
console.log(response);
bulk = Appusers.collection.initializeOrderedBulkOp();
callback();
})
} else {
callback();
}
},
function(err) {
if (err)
console.log(err);
else {
if ( i % 1000 != 0 )
bulk.execute(function(err,response) {
if (err) console.log(err)
console.log(response);
// done
});
else
// done
}
}
);
The Bulk methods build up "batches" of results ( in this case 1000 at a time ) and send all to the server in one request with one response ( per batch ). This is a lot more efficient than contacting the database once per every write.
async.forEach(vsr.vehicles, function(vsr_vehicle, callback){
pjCustom.vehicleJson(vsr_vehicle, function(vehicleInitialize){
Vehicle.find({ where: { vehicleID: (vsr_vehicle.vehicleID).toString().trim() } }).success(function(vehicleFound){
if(vehicleFound){
//Code Logic is working fine.
}else{
vehicleBuild.save().success(function(vehicleNew){ // To create new vehicle of updated vsr
var vehicleBuild = Vehicle.build(vehicleInitialize)
pj.log("Update vehicle ............................")
temp.push(vehicleNew.vehicleID)
})
}
})
})
callback()
},function(){
res.send(204)
})
//vehicleJSON
exports.vehicleJson = function(vsr_vehicle, callback){
pjCustom.getVehicle(vsr_vehicle, function(status, vehicleId){
if (status == true) {
vsr_vehicle.vehicleID = vehicleId
callback(
{ 'vehicleID':vsr_vehicle.vehicleID).toString().trim(),'vsr_id':vsr_vehicle.vsr_id})
}
})
}
//getvehicle
exports.getVehicle = function(vsr_vehicle, callback){
if(vsr_vehicle.vehicleID !== undefined){
callback(true, vsr_vehicle.vehicleID)
}else{
Vehicle.find({ where: { 'vsr_id': vsr_vehicle.vsr_id },
attributes: ['id', 'vehicleID'],'order': 'id DESC', 'limit': '1'
}).success(function(vehicles){
var temp = (vehicles.vehicleID).split("-")
var newvehicleId = temp[0]+"-"+temp[1]+"-"+(parseInt(temp[2])+1)
callback(true, newvehicleId)
})
}
}
Explanation:
while inserting a record from vsr_vehicle. I need to check whether the vehicleID is present then it will fetch if not it will creates a new Id.
Consider this code is for updating a vehicle as well as inserting another "two" new vehicles. how to manage async process. of insertion of new vehicles.
it is not waiting for completion of first iteration and going for vehicleJson and generating same vehicleID for both new vehicles. suggest me to complete this challange.
My Code is clearly written here.
Please requesting before reading pls copy the code and paste in any JS editor you definitely will understand more than my explanation.
Your callback call in series.forEach is at the incorrect place. Here is the correction:
async.forEach(vsr.vehicles, function(vsr_vehicle, callback){
pjCustom.vehicleJson(vsr_vehicle, function(vehicleInitialize){
Vehicle.find({ where: { vehicleID: (vsr_vehicle.vehicleID).toString().trim() } }).success(function(vehicleFound){
if(vehicleFound){
callback(); // <--- call here
}else{
vehicleBuild.save().success(function(vehicleNew){ // To create new vehicle of updated vsr
var vehicleBuild = Vehicle.build(vehicleInitialize);
pj.log("Update vehicle ............................");
temp.push(vehicleNew.vehicleID);
callback(); // <--- call here
});
}
});
});
// callback(); // <--- Don't call here
},function(){
res.send(204);
});
BTW, for good practice, use semicolon (";") at the end of javascript statements
One of the advantages of NodeJS is its async and non-blocking I/O, which in my case is great on the one hand, but breaks my neck every day on the other hand.
I consider myself a NodeJS / Async novice and I often end up having such code:
function(req, res) {
req.assert("name", "Lobbyname is required").notEmpty();
req.assert("name", "Lobbyname length should be between 4 and 64 characters").len(4, 64);
req.assert("game", "Game not found").isInt();
req.sanitize("game").toInt();
var userId = req.user.id;
var errors = req.validationErrors();
var pg_errors = [];
var games = null;
if (errors) {
console.log(errors);
client.query("SELECT * FROM games", function(err, result) {
if (!err) {
games = result.rows;
res.render("lobby/create", {
title: "Create a new lobby",
games: games,
errors: errors.toString()
});
}
else {
res.send("error");
}
});
}
else {
errors = null;
client.query("SELECT COUNT(*) as in_lobbies FROM users u RIGHT JOIN lobby_userlist ul ON ul.user_id = u.id WHERE u.id = $1", [userId], function(err, result) {
if (!err) {
console.log(result.rows[0]);
if (result.rows[0].in_lobbies < 1) {
client.query("SELECT COUNT(*) as hosting_lobbies FROM lobbies WHERE owner = $1", [userId], function(err, result) {
if (!err) {
if (result.rows[0].hosting_lobbies < 1) {
client.query("INSERT INTO lobbies(name, game, owner) VALUES($1, $2, $3)", [req.param("name"), req.param("game"), userId], function(err, result) {
if (!err) {
res.redirect("/lobby");
}
else {
pg_errors.push(err);
console.log(err);
}
});
}
else {
errors = "You can only host one lobby at a time";
}
}
else {
pg_errors.push(err);
client.query("SELECT * FROM games", function(err, result) {
if (!err) {
games = result.rows;
res.render("lobby/create", {
title: "Create a new lobby",
games: games,
errors: errors
});
}
else {
pg_errors.push(err);
}
});
}
});
}
else {
pg_errors.push(err);
}
}
});
console.log("pg_errors");
console.log(pg_errors);
console.log("pg_errors _end");
if (pg_errors.length < 1) {
console.log("no errors");
}
else {
console.log(pg_errors);
res.send("error service operation failed");
}
}
}
This an example I have written using the following npm packages:
pg (native)
express
express-validator (middleware of node-validator)
passport (auth middleware)
Checking whether the input given by the user is valid or not is the least problem, I have this checks where I assert the variables and give back a rendered version of the page printing out the errors to the user.
BUT if we pass the validation errors in the first place we assume the "lobby" is ready to be inserted into the database, before I want to ensure that the user has no other lobby open and is not member of another lobby.
Well now I end up putting one query into another and theoretically I would have to put my view render function (res.render()) into every query callback if the query encounters an error or returns a result which inidicates that the user is not allowed to create a lobby.
I don't want that and it doesn't seem very practicable.
I tried removing the render logic and every other logic from the query callbacks and instead let the query callbacks set error arrays or variables which would indicate a success or a failure and below my query code I would check if(errors) renderPageWithErrors.
This lead to strange errors due to the async behaviour of nodejs in which case res.redirect() was called after res.render() and stuff like that.
I had to move my res.render back into the query callbacks.
Is there a proper way of doing this?
You might want to look into an async library such as https://github.com/caolan/async. It helps structure async code so that it doesn't turn into a mess like this. There are different methods depending on your requirements from simple series and parallel execution to things like waterfall to auto which does dependency tracking.
async.auto({
get_data: function(callback){
// async code to get some data
},
make_folder: function(callback){
// async code to create a directory to store a file in
// this is run at the same time as getting the data
},
write_file: ['get_data', 'make_folder', function(callback){
// once there is some data and the directory exists,
// write the data to a file in the directory
callback(null, filename);
}],
email_link: ['write_file', function(callback, results){
// once the file is written let's email a link to it...
// results.write_file contains the filename returned by write_file.
}]
}, function(err) {
// everything is done or an error occurred
});
The other nice thing it does is consolidate all errors into a single callback. That way you only have to handle errors in one place instead of them sprinkled throughout your code.
You might want to check for https://github.com/0ctave/node-sync library as well. It's a syntax sugar for nodejs Fibers, a way to write asynchronous code in a traditional way without breaking nodejs event loop model. There are a lot of discussions about pros and cons of using Fibers, but I prefer code readability and ease of development over potential small resource usage increase.
I don't know all of your code logic, but function above can look something like this:
function(req, res) {
Sync(function() {
req.assert("name", "Lobbyname is required").notEmpty();
req.assert("name", "Lobbyname length should be between 4 and 64 characters").len(4, 64);
req.assert("game", "Game not found").isInt();
req.sanitize("game").toInt();
var userId = req.user.id;
var errors = req.validationErrors();
var pg_errors = [];
var games = null;
if (errors) {
console.log(errors);
var games = client.query.sync(client, "SELECT * FROM games").rows;
games = result;
res.render("lobby/create", {
title: "Create a new lobby",
games: games,
errors: errors.toString()
});
}
else {
errors = null;
var result = client.query.sync(client, "SELECT COUNT(*) as in_lobbies FROM users u RIGHT JOIN lobby_userlist ul ON ul.user_id = u.id WHERE u.id = $1", [userId]);
console.log(result.rows[0]);
if (result.rows[0].in_lobbies < 1) {
var result = client.query.sync(client, "SELECT COUNT(*) as hosting_lobbies FROM lobbies WHERE owner = $1", [userId]);
if (result.rows[0].hosting_lobbies < 1) {
var res = client.query.sync(clien, "INSERT INTO lobbies(name, game, owner) VALUES($1, $2, $3)", [req.param("name"), req.param("game"), userId]);
res.redirect("/lobby");
}
else {
errors = "You can only host one lobby at a time";
}
}
else {
var games = client.query.sync(client, "SELECT * FROM games").rows;
res.render("lobby/create", {
title: "Create a new lobby",
games: games,
errors: errors
});
};
}
}, function(err) {
if(err) {
// do your error handling here
}
});
}