Sequelize and WITH HOLD cursors - node.js

I'm trying to implement cursor pagination with postgresql and sequelize, I have tried with success creating a cursor WITH HOLD on a transaction and fetching data from it.
sequelize.transaction(function (t) {
const query = "DECLARE my_cur CURSOR WITH HOLD FOR SELECT foo, bar FROM x " +
"WHERE time=\'2016-09-16\'::date;"
return sequelize.query(query, {transaction: t}).spread(function (results,metadata) {
console.log("Cursor created!")
});
}).then(function (result) {
console.log(result)
console.log("Transaction commited")
sequelize.query("FETCH NEXT FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}).catch(function (err) {
throw err
console.log("Failed to create a cursor")
});
If I try fetching from the cursor in a different session I get:
FETCH NEXT FROM my_cur;
ERROR: <<my_cur>> doesn't exist.
Cursors even if declared WITH HOLD are destroyed after a session closes and aren't shared between sessions, my question is, how does sequelize handle sessions with postgreSQL, will I be able to FETCH from this cursor on a separate API call?
socket.on("fetch_next_page", function()){
var cursor = socket.session.cursor
var pageSize = 10
sequelize.query("FETCH +"pageSize"+ FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
socket.emit("page",results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}

The easiest way to execute a sequence of queries within the same session is via a task or transaction as implemented within pg-promise.
See Tasks and Transactions.
You would either promise-chain your queries, if they have a dependency between them, or execute them as a batch, if they don't.
And if you need to iterate N times inside the session, like calling FETCH till certain condition, you can also make use of method sequence.

Related

Node JS and node-firebird multiple queries in one transaction

I have tried now in two days without any good result.
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
And when i do a new query, it only start a new transaction, and i cannot rollback or commit. I only get this message:
invalid transaction handle (expecting explicit transaction start)
How do i keep the first transaction every time i do a new query?
The db.transaction is starting a transaction it seems, but when running queries in it it only start new ones.
db.transaction(Firebird.ISOLATION_READ_COMMITED, await async function(err,transaction) {
});
i want it like this:
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
transaction.query("INSERT INTO INVOICE (INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) RETURNING ID",array, function(err, result) {
if (err){
res.end(err.toString());
transaction.rollback();
db.detach();
return result;
}
})
transaction.commit(function(err) {
if (err)
transaction.rollback();
else
db.detach();
});
With the same transaction.
I tried an tried, but without success.
I see that for mySQL it is easy, but not in node-firebird.
I looked at the test index in github, This have transactions and multiple inserts, but this is totally wrong. This test do not show how to use the same transaction outside the block of code, so it is not useful info. It do not work with going through an array of list and doing the inserts.
It's only for node-firebird that i can't find this info.
Tried to search for examples, but it don't exists.
I got it to work.
I use promise and await. I use this in a loop for iterating through all new row items.
One problem with this was something strange.
When the insert SQL use RETURNING ID, it Crash on every second row. I used a whole day figure this out.
RETURNING command in the SQL don't work good with several inserts in the same transaction. But it works good without transaction.
A bug in node-firebird or Firebird engine.
I had to do it in another way. I first do a manual select from a procedure that results in a new highest row id + 1. And then i just do rowID++ each time i insert.
This is my solution:
var addItems= async function (i,params,ID){
return new Promise((resolve, reject) => { // Important for waiting for the result
var params2=[];
params2.push(rowID);
params2=params2.concat(params);
transaction.query("INSERT INTO INVOICE (ID, INVOICE_NO,INVOICE_NO_TMP,UNIT_TYPE,UNIT_TYPE_TEXT,TAX_PERCENT,QTY,SALE_PRICE,SERIALNUMBER,DISCOUNT,PARTNO,PARTNO_ID,DESCRIPTION) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?) ",params2, function(err, result) {
if (err){
transaction.rollback();
db.detach();
reject(err.toString());
return false;
}
resolve(true);
});
});
}
And i call it with this, in the loop:
var result=await addItems(i,params,invoice.head[0].INVOICE_NO_TMP)
if (result==true){
rowID++;
}else{
transaction.rollback();
reject(result.toString());
break;
return false;
}
And after the loop i do:
transaction.commit(function(err) {
if (err){
reject(err.toString());
transaction.rollback();
db.detach();
return false;
}
db.detach();
});
And, in the end, i finish with:
var result=await updateFromList();
res.end(result);
The updateFromList function is that function that start the insert or update of all rows.
It was not easy to create this when i didn't realize that RETURNING ID was crashing it all. It took allot of testing to get to it, and realize that it was crashing it all.
It works good now.
It was important to be able to use transaction, because without it, the data is very unstable.

ArangoDB Transaction doesn't rollback on error

UPDATE: SOLUTION FOUND. ARANGODB CLUSTER DOES NOT SUPPORT TRANSACTIONS. IT IS ONLY SUPPORTED ON SINGLE INSTANCES.
I am trying to use the transactions function using arangoJS library. The function that I will present is just a dummy function that inserts two records, and then tries to get a document that doesn't exist. Getting the nonexistent document generates an error, and the transaction must rollback. Indeed, the error gets generated after trying to get the document that doesn't exist. However, the database does not rollback, and the two inserted documents remain inserted in the database. Does anyone know how to solve it?
"updateCustomer" : function (options, cb) {
const action = String(function (params) {
// This code will be executed inside ArangoDB!
const db = require('#arangodb').db;
const aql = require('#arangodb').aql;
const customer = db._collection('customer');
try{
//insert two documents
db._query(aql`INSERT ${params.user} INTO ${customer} Return NEW`);
db._query(aql`INSERT ${params.customer} INTO ${customer} Return NEW`);
//Get a document that doesn't exist
customer.document('does-not-exist');
}catch(e){
throw new Error("Everything is bad");
}
});
let opts = {
collections : {
read : ["customer"],
write : ["customer"]
},
action : action,
params : {user: options, customer: options},
lockTimeout : 5
};
Arango.transaction(opts,(err, result) => {
console.log("err: " + err);
console.log("result: " + JSON.stringify(result));
return cb(err, result);
});
}
"transaction" : function (options, cb) {
utils.dbConnect().transaction(options.collections, options.action, options.params, options.lockTimeout, cb);
}
UPDATE: I tried this transaction on a single instance ArangoDB and it worked. However, it did not work on a cluster. Is there no support for transactions on ArangoDB clusters?
Single document operations are atomic in arangodb clusters. Multi-document are not as of now. We are currently working on ACID for multi-document operations.

Query data from postgresql functions not working in app, but works when testing manually

I met an annoyance when trying to use simple function to query data for my web app. The idea is to use one function to list the contents of one table; the other function to use the user-selected record_id in this table to query the detailed contents data in another table.
When running, the app ran the two functions without any error while no data got. Checked the console and found the second function query results is null (I console.log the input for the second function, found they got and using the correct query keys). Since I am sure database has the data for query.
I tried:
use psql command line to query data using the same query keys, I have the results without problem;
I run a node command line, and try to run the two functions by providing the query keys, it also gave me the correct results.
So the functions should work. Now my question is why put them in the app and let them run by themselves, they did not get the query results?
I am using pg = require("pg"); and const pool = new pg.Pool(config) for database connection;
Your sharing of your experience will be very appreciated.
(UPDATE)The functions are like below:
function listItemDB(callback){
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
//use the client for executing the query
client.query(`SELECT * FROM basicdb.items`,
function(err, result) {
//call `done(err)` to release the client back to the pool (or destroy it if there is an error)
done(err);
if(err) {
return console.error('error running query', err);
}
// console.log(result.rows);
callback(result.rows);
});
});
}
The above function is only trying to get "item1" and "dataset1" for future use and pass them to below function args. It does its job perfectly.
function getFileName(itemName,datasetName, callback) {
let fileName;
console.log(itemName,datasetName);
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query("SELECT * "+
"FROM basicdb.dataset "+
"INNER JOIN basicdb.items "+
"ON basicdb.dataset.item_id = basicdb.items.item_id "+
"WHERE (basicdb.items.item_name = ($1)) "+
"AND (basicdb.dataset.datasetname = ($2))",[itemName,datasetName],
function (err, result){
done();
if(err) {
return console.error('error running query', err);
}
let records = result.rows;
fileName = records[records.length-1].filename;
callback(fileName);
});
});
}
This above function is trying to get the filename so the main app can use it. The code to call the above function in my main app.js is like below:
db.getFileName("item1","dataset1",function(fileName) {
//do something with the fileName....}
("db" is the module name which include the functions.)
I finally found the problem, which is a low-level mistake and has nothing to do with the database and the queries.
The item names got from the dropdown list in the app, which was feed to the function args, has one " "(space) attached to the end of the name(i dont know why?), which always "!=" the record in the database:-(, so always no query result in the app. But for the function test, I hardcode the item name which is correct "==" the record in the database. Since it is " ", even when I console.log(itemName), I did not find the space at the end.
It turns out to be 'A mistake of space'.

How to insert data to db2 using node js ( ibm_db)

Hi Can anyone give an example of how use insert statement in nodejs. I am able to use select query. But for insert query i am getting the result as []. no error can be seen but the values are not added to the original table. I am using db2, ibm_db,express,nodejs and angularjs.
I wrote a blog entry on using DB2 and node.js on Bluemix a while ago. It includes code for an INSERT statement.
As part of the insert
first prepare the statement,
then bind the values to be inserted and
finally execute the statement.
Here is the relevant code snippet, the full context is in the blog:
exports.insertIP = function(ibmdb,connString,ipinfo) {
console.log("insertIP called",ipinfo);
ibmdb.open(connString, function(err, conn) {
if (err ) {
res.send("error occurred " + err.message);
}
else {
// prepare the SQL statement
conn.prepare("INSERT INTO IP.VISITORS(vtime,ip,country_code,country,region_code,region,city,zip,latitude,longitude,metro,area) VALUES (current timestamp,?,?,?,?,?,?,?,?,?,?,?)", function(err, stmt) {
if (err) {
//could not prepare for some reason
console.log(err);
return conn.closeSync();
}
//Bind and Execute the statment asynchronously
stmt.execute([ipinfo["ip"],ipinfo["country_code"],ipinfo["country_name"],ipinfo["region_code"],ipinfo["region_name"],ipinfo["city"],ipinfo["zipcode"], ipinfo["latitude"], ipinfo["longitude"],ipinfo["metro_code"],ipinfo["area_code"]], function (err, result) {
console.log(err);
// Close the connection to the database
conn.close(function(){
console.log("Connection Closed");
});
});
});
}
})};
I would suggest and recommend (as one of the members of node-ibm_db) to follow the node-ibm_db github repository (https://github.com/ibmdb/node-ibm_db) , we have updated the README document as well as the list of APIs to do particular tasks.
For your above query you can use ".prepare(sql, callback)" or ".prepareSync(sql)" API (as per your requirements Async/sync call), below is the attached code snippet and URL link for particular API documentation.
var ibmdb = require("ibm_db"),
cn ="DATABASE=dbname;HOSTNAME=hostname;PORT=port;PROTOCOL=TCPIP;UID=dbuser;PWD=xxx";
ibmdb.open(cn,function(err,conn){
conn.prepare("insert into hits (col1, col2) VALUES (?, ?)",
function (err, stmt) {
if (err) {
//could not prepare for some reason
console.log(err);
return conn.closeSync();
}
//Bind and Execute the statment asynchronously
stmt.execute(['something', 42], function (err, result) {
if( err ) console.log(err);
else result.closeSync();
//Close the connection
conn.close(function(err){});
});
});
});
API documentation(Github URL) : https://github.com/ibmdb/node-ibm_db#-8-preparesql-callback
Try to install jt400 by using the below command
npm install node-jt400 --save
use the below code to insert the data to table name foo.
Follow link https://www.npmjs.com/package/node-jt400 for for details
pool
.insertAndGetId('INSERT INTO foo (bar, baz) VALUES(?,?)',[2,'b'])
.then(id => {
console.log('Inserted new row with id ' + id);
});

Im trying to add elements to an array in a nested query in mongoose using Node.js

Im getting an empty array at the end of Async.waterfall not sure why but this how my code looks like:
exports.GetJobs = function(req,res){
var Jobs =[]; ///// Jobs is a global variable
async.waterfall([
function(next){
// get my alert
UserAlertDB.find({User:req.user.id},function(err,AlertResult){
next(null,AlertResult);
})
},
function(AlertResult, next) {
// You might get an error if you do not have created an alert so AlertResult[0].Words will not exist
if(AlertResult) // if Alert Result not equal to null then query by alert
{
JobDB.find({title: new RegExp(AlertResult[0].Words, 'i')}, function (err, JobResults) {
if (err) console.log(err);
// If the job matches the requirements for alert then push it to the list
JobResults.forEach(function(job){
JobOffer.find({JobID : job._id, JobOfferOwnerID: req.user.id}, function(err,Offers){
if(err) console.log("Error Inside Querying Jobs Result for Alert " + err);
if(Offers.length==0){
console.log("Jobs are : " + JSON.stringify(Jobs)) // when I print the Jobs array here it shows that a job is getting pushed into the array
Jobs.push(job);
}
})
})
next(err,Jobs) // But Jobs here is empty
})
}
else{
next("There is an error",null)
}
}
], function(err,Jobs){
console.log(JSON.stringify(Jobs)); ////// Getting Empty Jobs here
if(err) console.log("Error Inside Get Jobs Match Alert Data in Server : " + err);
res.json(Jobs); ////// Jobs here is empty
});
}
so if you notice that when I try to send the Jobs array at the end with the res.json(Jobs) has empty Jobs although I have pushed those jobs in the Jobs array.
The problem in your code is that the JobResults.forEach is synchronous and you are calling asynchronous JobOffer.find inside the forEach loop. Therefore, your program doesn't wait for asynchronous operations to finish and calling the next(err, Jobs) immediately. Instead of forEach use async.each and call next(err, Jobs) only when async.each is finished. I would also advise to make sure you are checking the err value in every callback, for example here you are passing null even though there might be error:
// get my alert
UserAlertDB.find({User:req.user.id},function(err, AlertResult) {
next(err, AlertResult);
})
Hope it helps, let me know if you need any other help with your code.

Resources