Node JS and node-firebird multiple queries in one transaction - node.js

I have tried now in two days without any good result.
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
And when i do a new query, it only start a new transaction, and i cannot rollback or commit. I only get this message:
invalid transaction handle (expecting explicit transaction start)
How do i keep the first transaction every time i do a new query?
The db.transaction is starting a transaction it seems, but when running queries in it it only start new ones.
db.transaction(Firebird.ISOLATION_READ_COMMITED, await async function(err,transaction) {
});
i want it like this:
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
transaction.commit(function(err) {
if (err)
transaction.rollback();
else
db.detach();
});
With the same transaction.
I tried an tried, but without success.
I see that for mySQL it is easy, but not in node-firebird.
I looked at the test index in github, This have transactions and multiple inserts, but this is totally wrong. This test do not show how to use the same transaction outside the block of code, so it is not useful info. It do not work with going through an array of list and doing the inserts.
It's only for node-firebird that i can't find this info.
Tried to search for examples, but it don't exists.

I got it to work.
I use promise and await. I use this in a loop for iterating through all new row items.
One problem with this was something strange.
When the insert SQL use RETURNING ID, it Crash on every second row. I used a whole day figure this out.
RETURNING command in the SQL don't work good with several inserts in the same transaction. But it works good without transaction.
A bug in node-firebird or Firebird engine.
I had to do it in another way. I first do a manual select from a procedure that results in a new highest row id + 1. And then i just do rowID++ each time i insert.
This is my solution:
var addItems= async function (i,params,ID){
return new Promise((resolve, reject) => { // Important for waiting for the result
var params2=[];
params2.push(rowID);
params2=params2.concat(params);
transaction.query("INSERT INTO INVOICE (ID, INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?) ",params2, function(err, result) {
if (err){
transaction.rollback();
db.detach();
reject(err.toString());
return false;
}
resolve(true);
});
});
}
And i call it with this, in the loop:
var result=await addItems(i,params,invoice.head[0].INVOICE_NO_TMP)
if (result==true){
rowID++;
}else{
transaction.rollback();
reject(result.toString());
break;
return false;
}
And after the loop i do:
transaction.commit(function(err) {
if (err){
reject(err.toString());
transaction.rollback();
db.detach();
return false;
}
db.detach();
});
And, in the end, i finish with:
var result=await updateFromList();
res.end(result);
The updateFromList function is that function that start the insert or update of all rows.
It was not easy to create this when i didn't realize that RETURNING ID was crashing it all. It took allot of testing to get to it, and realize that it was crashing it all.
It works good now.
It was important to be able to use transaction, because without it, the data is very unstable.

Related

Model.findOne() function not working when passed a variable as the value

I am new to node.js.
I am trying to create function, where a randomly generated String is queried to check if it exists or not. If it already exists, the String is randomly generated till it is unique.
let validID = false;
console.log(temp); //temp is the randomly generated String.
while(!validID){
Website.findOne({shortcut: temp},function(err,docs){
if(docs==null){
validID = true;
console.log("The shortcut for the url is" + temp);
}else{
console.log("FOUND");
temp = generateId();
}
});
}
When run, the code is stuck in an infinite while loop.
I tried to see whether the code works with a String value ( not a variable ) passed in as the query inside findOne(). It worked. I am assuming that the fact that temp is a variable is causing the problem. Can variables be passed in as a value in a query? If so, what is the correct way?
Website.findOne operates asynchronously, i.e. the callback-function you passed to it, will be run once the results from the mongodb are fetched. However, node will not be able to actually process this callback, since your callstack never gets emptied due to your while-loop. If you're interested, you can find out more about this here.
One way to solve this is to wrap your Mongo-DB call in a promise, wait for it to resolve, then return if the ID is unique and continue by calling it recursively otherwise (note that this can be highly simplified by using async/await but for understanding how this works using promised are beneficial imo):
function findIdPromise(temp) {
return new Promise((resolve, reject) => {
Website.findOne({
shortcut: temp
}, function (err, docs) {
if (err) {
return reject(err);
}
resolve(docs);
});
});
}
function getNextIsUniqueIdPromise(shortcut) {
return findIdPromise()
.then(docs => {
if (docs == null) {
return shortcut;
}
return getNextIsUniqueIdPromise(generateId());
});
}
// call it initially with
getNextIsUniqueIdPromise(firstShortcutToCheck)
.then(shortcut => {
console.log("The shortcut for the url is" + shortcut):
})
.catch(err => {
console.log("an error occured", err):
});

nodejs express knex master detail record update

I have master(Workbook) & Child(WorkBookDataset). I am trying to update master and at the same time (insert or update or delete) child records. Everything works fine except it is not returning the updated child records. I know I am doing something wrong while specifying the 'then', and as it is always asyc operation, the result is already returned before child updates complete.
var Promise = require('bluebird');
return knex.transaction(function(trx) {
return knex('workbook').where('workbookid',workbook.workbookid).andWhere('userid', workbook.userid)
.update(workbook)
.then(function(updatedrecords) {
return Promise.map(datasets, function(dataset) {
if(dataset.workbookdatasetid && dataset.workbookdatasetid == -1){
//remove
return knex('workbookdataset').where('workbookid',workbook.workbookid).andWhere('datasetid', dataset.datasetid)
.delete();
} else {
dataset.workbookid = workbook.workbookid;
knex('workbookdataset').where('workbookid',workbook.workbookid).andWhere('datasetid', dataset.datasetid)
.then(function(alreadyds) {
if(alreadyds.length == 1){
//update
return knex('workbookdataset').where('workbookid',workbook.workbookid).andWhere('datasetid', dataset.datasetid)
.update(dataset)
}else{
//insert
if(dataset.workbookdatasetid){
delete dataset.workbookdatasetid;
}
return knex('workbookdataset')
.insert(dataset)
}
})
}
});
})
})
.then(function(updatedrecords) {
return getWorkBook(workbook.userid, workbook.workbookid); //this returns updated workbook information, but not updated workbookdataset information
});
I tried putting then() to Promise, but still same. Any help/pointer would be a great help! Thanks in advance!
You need a return on the line where you have knex('workbookdataset').where('workbookid'
Have a look at a .returning method from knex documentation. It allows you to return inserted/updated/deleted records from a database.
Also, I'd recommend you to check your promise chain to be sure you are returning correct values in correct places.
P.S. This is not related to question but if you are using transactions add to your queries .transacting(trx) to actually run them in one transaction instead of separately.
knex('workbook')
.where('workbookid',workbook.workbookid)
.andWhere('userid', workbook.userid)
.update(workbook)
.returning('*')
.transacting(trx)
.then(data => {
console.log(data) // => [{id: 1, workbookid: 1, userid: 981,...}, ...]
})

Query data from postgresql functions not working in app, but works when testing manually

I met an annoyance when trying to use simple function to query data for my web app. The idea is to use one function to list the contents of one table; the other function to use the user-selected record_id in this table to query the detailed contents data in another table.
When running, the app ran the two functions without any error while no data got. Checked the console and found the second function query results is null (I console.log the input for the second function, found they got and using the correct query keys). Since I am sure database has the data for query.
I tried:
use psql command line to query data using the same query keys, I have the results without problem;
I run a node command line, and try to run the two functions by providing the query keys, it also gave me the correct results.
So the functions should work. Now my question is why put them in the app and let them run by themselves, they did not get the query results?
I am using pg = require("pg"); and const pool = new pg.Pool(config) for database connection;
Your sharing of your experience will be very appreciated.
(UPDATE)The functions are like below:
function listItemDB(callback){
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
//use the client for executing the query
client.query(`SELECT * FROM basicdb.items`,
function(err, result) {
//call `done(err)` to release the client back to the pool (or destroy it if there is an error)
done(err);
if(err) {
return console.error('error running query', err);
}
// console.log(result.rows);
callback(result.rows);
});
});
}
The above function is only trying to get "item1" and "dataset1" for future use and pass them to below function args. It does its job perfectly.
function getFileName(itemName,datasetName, callback) {
let fileName;
console.log(itemName,datasetName);
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query("SELECT * "+
"FROM basicdb.dataset "+
"INNER JOIN basicdb.items "+
"ON basicdb.dataset.item_id = basicdb.items.item_id "+
"WHERE (basicdb.items.item_name = ($1)) "+
"AND (basicdb.dataset.datasetname = ($2))",[itemName,datasetName],
function (err, result){
done();
if(err) {
return console.error('error running query', err);
}
let records = result.rows;
fileName = records[records.length-1].filename;
callback(fileName);
});
});
}
This above function is trying to get the filename so the main app can use it. The code to call the above function in my main app.js is like below:
db.getFileName("item1","dataset1",function(fileName) {
//do something with the fileName....}
("db" is the module name which include the functions.)
I finally found the problem, which is a low-level mistake and has nothing to do with the database and the queries.
The item names got from the dropdown list in the app, which was feed to the function args, has one " "(space) attached to the end of the name(i dont know why?), which always "!=" the record in the database:-(, so always no query result in the app. But for the function test, I hardcode the item name which is correct "==" the record in the database. Since it is " ", even when I console.log(itemName), I did not find the space at the end.
It turns out to be 'A mistake of space'.

Sequelize and WITH HOLD cursors

I'm trying to implement cursor pagination with postgresql and sequelize, I have tried with success creating a cursor WITH HOLD on a transaction and fetching data from it.
sequelize.transaction(function (t) {
const query = "DECLARE my_cur CURSOR WITH HOLD FOR SELECT foo, bar FROM x " +
"WHERE time=\'2016-09-16\'::date;"
return sequelize.query(query, {transaction: t}).spread(function (results,metadata) {
console.log("Cursor created!")
});
}).then(function (result) {
console.log(result)
console.log("Transaction commited")
sequelize.query("FETCH NEXT FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}).catch(function (err) {
throw err
console.log("Failed to create a cursor")
});
If I try fetching from the cursor in a different session I get:
FETCH NEXT FROM my_cur;
ERROR: <<my_cur>> doesn't exist.
Cursors even if declared WITH HOLD are destroyed after a session closes and aren't shared between sessions, my question is, how does sequelize handle sessions with postgreSQL, will I be able to FETCH from this cursor on a separate API call?
socket.on("fetch_next_page", function()){
var cursor = socket.session.cursor
var pageSize = 10
sequelize.query("FETCH +"pageSize"+ FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
socket.emit("page",results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}
The easiest way to execute a sequence of queries within the same session is via a task or transaction as implemented within pg-promise.
See Tasks and Transactions.
You would either promise-chain your queries, if they have a dependency between them, or execute them as a batch, if they don't.
And if you need to iterate N times inside the session, like calling FETCH till certain condition, you can also make use of method sequence.

Node+MongoDB: coll.find().toArray(cb) works for collection A, but never fires cb for collection B?

UPDATE: I've narrowed this down to what appears to be a different issue, and as such have asked a separate question here.
=======
I have a mongoDB instance running on localhost with two collections, "mydocs" (which has ~12,000 documents in it) and "mydoctypes" (which has only 7 documents in it).
I have a standalone NodeJS script which gets a connection to the database and then fires off the following:
myDb.collection('mydoctypes').find().toArray(function(err, results) {
console.log("Got results.");
if (err) {
console.log("err: " + err);
} else {
console.log("Got doctypes: " + results.length);
}
});
The output of that script is:
Got results.
Got doctypes: 7
If I modify the same script to access the 'mydocs' collection instead:
myDb.collection('mydocs').find().toArray(function(err, results) {
console.log("Got results.");
if (err) {
console.log("err: " + err);
} else {
console.log("Got docs: " + results.length);
}
});
I get no output at all. The callback, apparently, never gets fired.
== UPDATE ==
So it looks like the problem was likely too many documents causing toArray() to run out of RAM.
Now, I'm using .each() to iterate, but having a different issue: each() is only running through the first batch (whatever I set batchSize to), and never loading any more documents. The code is this:
myDb.collection('mydocs').find().batchSize(50).each(function(err, item) {
if (item != null) {
process.stdout.write(".");
}
}
Indeed as seen in the comments default mongodb driver for nodejs is returning a cursor, default cursor when triggered have a ~101 documents or around 1 MB batch size, you can modify this number using the batchSize function. But in order to iterate your collection you should stream it as following:
MongoClient.connect('mongodb://localhost:27017/mydb', function(err, db) {
var cursor = db.collection('mycollection').find();
cursor.forEach(
function(doc) {
console.log(doc);
},
function(err) {
if (err) {
console.error(err);
} else {
//cursor has exausted, no more docs to iterate exit
return db.close();
}
});
});
The forEach method applied on the cursor is not the javascript default one from Arrays, it has two callbacks (the cb(doc) wich will iterate for each document, and the second one an cb(err) wich will catch the error or when the cursor was exausted.
You can use projection to lower the amount of data cursor.project({title: 1, name: 1}) and this will significantly reduce the amount of ram consumed.

Resources