Is it possible to have nested queries in Postgres pg module - node.js

Here is my code that tries to update a record in the db.
But if the record is not there then I want to insert it.
Is it OK to call client.query again? Or what's the best way to do it?
const {Pool} = require('pg');
const pool = new Pool(POSTGRES_CONFIG);
pool.connect((err, client, release) => {
if (err) {
return console.error('Error acquiring client', err.stack)
}
………
client.query(query, queryValues, (err, result) => {
release();
if(result.rowCount<=0){
//**** CAN I CALL IT AGAIN WITH OTHER PARAMETERS TO INSERT? ****
client.query(....... => {
release();
if (err) {
if(err.code === POSTGRES_ERRORS.UNIQUE_VIOLATION){
return console.error('KEY ALREADY EXISTS');
} else {
return console.error('query error', err);
}
}
}
}
});
});

It is perfectly OK as long as you call release after you're done with the client. From the docs:
You must call the releaseCallback or client.release (which points to
the releaseCallback) when you are finished with a client.
So, you could do this:
client.query(query, queryValues, (err, result) => {
// don't release just yet
if(result.rowCount<=0){
//**** CAN I CALL IT AGAIN WITH OTHER PARAMETERS TO INSERT? ****
client.query(....... => {
release(); // now you're done with the client so you can release it
if (err) {
if(err.code === POSTGRES_ERRORS.UNIQUE_VIOLATION){
return console.error('KEY ALREADY EXISTS');
} else {
return console.error('query error', err);
}
}
}
}
});

Related

Query redshift cluster using NodeJs asynchronously

My task is to copy few redshift tables from cluster one to a new cluster.
For this I am writing a script in nodejs.
I am using aws-sdk RedshiftData api to fetch the data.
I have two separate queries which I want to run in parallel. Following is my code
class syncRedShiftNodes {
constructor(){ ... }
readDataOne(){
let newSqlQuery = `select * from ${this.tableName} limit 10`;
const params = {
ClusterIdentifier: clusterIdentifier,
Sql: newSqlQuery,
Database: database,
DbUser: dbUser
};
return new Promise((resolve, reject)=>{
return awsRedshift.executeStatement(params, function(err, res){
if (err) console.log(err, err.stack); // an error occurred
else{
return awsRedshift.getStatementResult({Id:res.Id}, function(error, data){
if (error) console.log(error, error.stack); // an error occurred
else return data;
});
}
});
});
}
readDataTwo(){ ...//identical to above function except the query }
main(){
return Promise.all([this.readDataOne(), this.readDataTwo()])
.spread((data1, data2)=>{
console.log("promise resolved!!");
return true;
}
}
The problem is that my code is never reaching the "promise resolved" log. If I put a log in the callback of the redshift getStatementResult, that is being printed correctly but my handle is never reaching the promise.all().then statement which I am not able to understand why so.
Another question I had in mind was is it a good practice to use such a pattern inside a class?
You didn't resolve or reject your promise inside the class.
Example below
class syncRedShiftNodes {
constructor() {}
readDataOne() {
let newSqlQuery = `select * from ${this.tableName} limit 10`;
const params = {
ClusterIdentifier: clusterIdentifier,
Sql: newSqlQuery,
Database: database,
DbUser: dbUser,
};
return new Promise((resolve, reject) => {
awsRedshift.executeStatement(params, function (err, res) {
if (err) {
console.log(err, err.stack);
reject(err);
} else {
awsRedshift.getStatementResult(
{ Id: res.Id },
function (error, data) {
if (error) {
console.log(error, error.stack);
reject(error);
} else {
resolve(data);
}
}
);
}
});
});
}
readDataTwo() {}
async main() {
try {
const result = await Promise.all([
this.readDataOne(),
this.readDataTwo(),
]);
return result;
} catch (err) {
console.log(err);
}
}
}

Wait for function to complete in Nodejs

I am trying to call a function inside my post API. I have multiple queries and want to wait for the function to complete in order to execute the next queries. I am having an issue here. The function isn't completing the execution and the rest of the API gets executed as expected. How can I wait for the function to complete its execution? I have searched but couldn't find something convenient and related to my code.
Here's the code:
Node.js
function verifyEmail(mailToUpper)
{
var emailResult;
db2.open(mydbConnection, (err, conn) => {
if(!err)
{
console.log("Connected Successfully");
}
else
{
console.log("Error occurred while connecting to the database " + err.message);
}
conn.query(checkEmailQuery, [mailToUpper], (err, results) => {
if(!err)
{
if(results.length > 0)
{
// res.write("Email already exists");
emailResult = 0;
}
else
{
emailResult = 1;
}
}
conn.close((err) => {
if(!err)
{
console.log("Connection closed with the database");
}
else
{
console.log("Error occurred while trying to close the connection with the database " +
err.message);
}
})
})
})
return emailResult;
}
router.post('/api/postData', (req, res) => {
//some stuff
var waitingForResult;
setTimeout(() => {
waitingForResult = verifyEmail(mailToUpper);
}, 2000)
console.log(waitingForResult); //throwing an error of undefined
if(waitingForResult === 1) //not executing this
{
//my other queries
}
else //always executes this
{
res.send("Email already exists");
}
});
function verifyEmail(mailToUpper) {
return new Promise((resolve, reject) => {
db2.open(mydbConnection, (err, conn) => {
if (!err) {
console.log("Connected Successfully");
} else {
console.log("Error occurred while connecting to the database " + err.message);
}
conn.query(checkEmailQuery, [mailToUpper], (err, results) => {
if (!err) {
if (results.length > 0) {
// res.write("Email already exists");
resolve(0);
} else {
resolve(1);
}
}
conn.close((err) => {
if (!err) {
console.log("Connection closed with the database");
} else {
console.log("Error occurred while trying to close the connection with the database " +
err.message);
}
})
})
})
})
}
router.post('/api/postData', async (req, res) => {
const waitingForResult = await verifyEmail( mailToUpper );
if( waitingForResult === 1 ){
//my other queries
} else {
res.send("Email already exists");
}
});

Asynchronous programming sequencing problems in node.js

I have picked up a project where when a node.js program starts for the first time, no database will exist. The program should create the database tables if they don't already exist.
However, in the sample program below, the data is not inserted if the database did not exist on first run because the select statement fails.
The output of the code below is:
$ node dbtest.js
finished initialise_database
program ended.
select err: { Error: SQLITE_ERROR: no such table: mytable errno: 1, code: 'SQLITE_ERROR' }
successfully created mytable table
database closed.
As you can see from the logging, the code assumes a synchronous execution.
I assume that what is happening is that the node.js runtime system uses different threads to schedule the database functions to run in parallel.
I need the CREATE TABLE command to complete before proceding. How would I achieve this?
Is there some standard way to achieve such a thing in node.js?
code below:
// npm install sqlite3 - to install sqlite3
const sqlite3 = require('sqlite3').verbose();
let db = initialise_database();
check_and_update(db); //Calling this function upon starting the server.
close_database(db);
console.log('program ended.');
function initialise_database() {
//Establishing a database connection.
let db = new sqlite3.Database('database1.db', (err)=>{
if(err) {
return console.error(err.message);
}
});
// // new db always succeeds even if no file exists - if empty file have to generate table structure
db.run("CREATE TABLE IF NOT exists 'mytable' ('num1' INTEGER, 'num2' INTEGER, 'text1' TEXT);", function(err) {
if (err) {
console.log("Create table error: ", err);
}
console.log("successfully created mytable table");
});
console.log("finished initialise_database");
return db;
}
function check_and_update(db) {
db.all("SELECT * FROM mytable", function(err, data){
if(err) {
console.log("select err: ", err);
} else {
db.run('INSERT INTO mytable (num1, num2, text1) VALUES (?, ?, ?)', [1, 2, 'hi guys!!!'], function(err){
if(err)
console.log("insert err: ", err);
});
}
});
}
function close_database(db) {
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('database closed.');
});
}
Database requests are asynchronous, you have to deal with them in an asynchronous way.
Which can be :
Callback
Promise
Async/await
Otherwise you will try to perform a request on a database not initialized.
Here is an example using Promise.
const sqlite3 = require('sqlite3').verbose();
let dbPtr = false;
initialise_database()
.then((db) => {
dbPtr = db;
return check_and_update(dbPtr);
})
.then(() => {
close_database(dbPr);
// All is done
console.log('program ended.');
})
.catch((err) => {
// Deal with the error
});
function initialise_database() {
return new Promise((resolve, reject) => {
//Establishing a database connection.
const db = new sqlite3.Database('database1.db', (err) => {
if (err) {
console.error(err.message);
return reject(err);
}
db.run('...', function(err) {
if (err) {
console.log("Create table error: ", err);
return reject(err);
}
console.log("successfully created mytable table");
return resolve(db);
});
});
});
}
function check_and_update(db) {
return new Promise((resolve, reject) => {
db.all('...', function(err, data) {
if (err) {
console.log("select err: ", err);
return reject(err);
}
db.run('...', [1, 2, 'hi guys!!!'], function(err) {
if (err) {
console.log("insert err: ", err);
return reject(err);
}
return resolve();
});
});
});
}
function close_database(db) {
db.close();
}
#EDIT
Looking at the documentation it seems that db.close() do not take a callback in parameter. I've modified the snippet.

Convert to using promises in DB wrapper module function with Bluebird

I anticipate a callback hell is beginning to form in my code so I decided to start using promises. But I can't wrap my head around implementing it. For example I have a function:
DB.prototype = {
findUser: function (username) {
this._pool.getConnection(function (err, connection) {
if (err) { return callback(true, false); }
connection.query('SELECT password_hash, password_salt FROM users WHERE email = ? AND admin = 1', username,
function (err, rows) {
connection.release();
if (rows.length === 1) { callback(false, rows[0]); }
else { callback(false, false); }
});
connection.on('error', function () { callback(true, false); });
});
}
};
How would I adapt this to using promises instead of callbacks? And how would I use this adapted db.findUser() ?
EDIT:
I got something working. It looks like this:
DB.prototype = {
getConnection: function() {
return this._pool.getConnectionAsync();
}
}
And the usage:
Promise.using(db.getConnection(), function(connection) {
return connection.queryAsync("SELECT password_hash, password_salt FROM users WHERE email = ? AND admin = 1", "exampleUser")
.then(function(rows) {
connection.release();
console.log("is there a row?", rows.length === 1, rows);
// do something with results
});
}).catch(function(err) {
// This is only run if an error is thrown
console.log("error is", err);
});
Is this a good implementation or could something be improved?

Mongoose .findOne not working as an internal function call

With this as a URL:
'api/support-tag/name/myTagName'
This function works properly:
getByName: function (req, res) {
model.Shared_SupportTag.findOne({name: req.params.name}).exec(function (err, results) {
if (err) {
return res.status(400).send({
message: errMsg.Util_ErrorMsg.getErrorMessage(err)
});
}
res.send(results);
})
}
But when I try to call a similar function from within the node server:
supportDoc.category = GetById(item.category);
function GetById(name){
model.Shared_SupportTag.findOne({name: name}).exec(function(err, result){
if(err){
console.log(err)
}else{
console.log(result);
}
})
}
The function does not execute, nor does the error catch, intellisense shows:
err= Reference error; err is not defined
result = Reference error; result is not defined
All I am trying to accomplish is a function call from within the server and not via a URL.
Any solution here? Thanks in advance
In the case of the findOne() method, the positive response (sans error) will either hold a mongoose object or null.
If the same query had been sent using just find(), the result would have been an empty array.
function GetById(name){
model.Shared_SupportTag.findOne({name: name}).exec(function(err, result){
if(err){
console.log(err)
}else{
if (result) console.log(result); //Check whether object exists.
else console.log('Not found!');
}
})
}
Solved:
model.Shared_SupportDoc.find({}).exec(function (err, collection) {
var supportDocs = require('../../data/_seed/support/supportDocs.json');
if (collection.length === 0) {
supportDocs.forEach(function (item) {
var supportDoc = new model.Shared_SupportDoc;
supportDoc.title = item.title;
supportDoc.created = item.date;
supportDoc.icon = item.icon;
supportDoc.likeCount = item.likeCount || 7;
-----> // requires callback - ie asynchronous
GetByName(item.category, function(tagId) {
supportDoc.categoryId = tagId;
-----> // must put save in the callback
supportDoc.save(function (err) {
if (err) {
console.log(supportDoc.categoryId)
console.log('Error: ' + err);
} else {
console.log('Support Doc Seed Complete');
}
});
});
})
}
});}
function GetByName(name, next) {
model.Shared_SupportTag.findOne({name : name}).exec(function (err, result) {
if (!result) {
console.log('Not Found');
next();
} else {
console.log(result._id);
next(result._id);
}
});}

Resources