How to (properly) chain multiple sequential MSSQL queries in Node - node.js

I'm writing a simple nodejs CLI tool while learning promises (to avoid callback hell), and every tutorial/stackoverflow example I've found only shows how to make a single call. My use case is as follows:
1. Connect to the Database (this I can do)
2. Perform a SQL select statement (also got this part)
3. Do some stuff with the results
4. Repeat steps 2 & 3 a few more times
I'm collecting the MSSQL user name and password (with hard-coded server 'localhost' and database name 'testdb') so when the app executes I can't just jump into the MSSQL connection.
I can get this via callbacks, but right now I have about 50 queries so you can imagine the ugliness. The full code below does get me the first query, and I strongly suspect I'm not passing the "pool" object to the next "then", but when I try
.then((pool,result) => {
//next command
})
It still doesn't recognize pool
Here is the code (index.js):
const mssql = require('mssql');
const qry1 = "select fieldA from tblA";
const qry2 = "select fieldB from tblB";
const qry3 = "select fieldC from tblC";
var dbuser = '';
var dbpass = '';
var config = {}
function init() {
log('Beginning Audit');
collectDbInfo(); //The reason I don't just included it all here
}
function collectDbInfo() {
//code is irrelevant to the problem, this is where I'm collecting the database credentials
}
function start() {
config = {
user: dbuser,
password: dbpass,
server: 'localhost',
database: 'testdb'
}
mssql.connect(config)
.then(pool => {
//FIRST query
return pool.request().query(qry1)
})
.then(result => {
processQryA(result);
//SECOND query
return pool.request().query(qry2)
})
.then(result => {
processQryB(result);
//THIRD query
return pool.request().query(qry3)
})
.then(result => {
processQryC(result);
})
mssql.on('error',err => {
log('SQL Error:' err)
mssql.close();
process.exit(0);
}
}
processQryA(data) {
console.log(data.recordset[0].fieldA)
}
processQryB(data) {
console.log(data.rcordset[0].fieldB)
}
processQryC(data) {
console.log(data.recordset[0].fieldC)
}
init();
I fully appreciate I may be approaching this all wrong, so any advice or especially examples would be greatly appreciated.

If the queries are absolutely sequential in nature, you can achieve that with async/await:
async function start(){
config = {
user: dbuser,
password: dbpass,
server: 'localhost',
database: 'testdb'
}
try {
pool = await mssql.connect(config);
const res1 = await pool.request().query(qry1);
processQryA(res1);
const res2 = await pool.request().query(qry2);
processQryB(res2);
const res3 = await pool.request().query(qry3);
processQryC(res3);
const res4 = await pool.request().query(qry4);
processQryD(res4);
/*..And so on with rest of sequential queries*/
/*Any of them resulting in error will be caught in (catch)*/
} catch (error) {
console.error("Error in start()::", error);
}
}
Also: I would probably have my pool getting method separately from query executions to handle errors/validations nicely.

Related

"cannot insert multiple commands into a prepared statement" How do I return cursor results from a PostgreSQL Function in KOA service

I am using PostgreSQL FUNCTION to return result sets. I am having trouble getting my Node.js/KOA api to call it properly with parameters. I can get it to work without parameters, but the parameters turn it into a prepared statement. And the prepared statements don't like multiple commands.
Here is the database object and configuration that I use:
const { Pool } = require('pg');
const config = require('../configuration');
exports.pool = this.pool;
exports.start = async function() {
const host = config.get("PGHOST");
const user = config.get("PGUSER");
const port = config.get("PGPORT");
const password = config.get("PGPASSWORD");
const database = config.get("PGDATABASE");
this.pool = new Pool({ user, host, database, password, port });
console.log('Postgres database pool started.');
};
exports.close = async function() {
await this.pool.end;
};
exports.query = async function(query, data) {
let rs = await this.pool.query(query, data);
return rs;
};
Here is my KOA service (it uses the pg.Pool node module, and where my issue is, i think...):
let getFilteredDevelopers = async (developerId, firstName, lastName) => {
let query = {
text: `
BEGIN;
SELECT ks_get_filtered_developers($1, $2, $3);
FETCH ALL IN "ks_developers_cursor";
COMMIT;
`,
values: [ developerId, firstName, lastName ]
};
try {
let result = await database.query(query);
return result[2].rows;
} catch (error) {
return `Failed to fetch developers.`;
}
};
This approach works fine without the parameters. But when I add the parameters to the koa pg call, it throws the error: 'cannot insert multiple commands into a prepared statement'.
The following is my function:
CREATE OR REPLACE FUNCTION ks_get_filtered_developers (
p_developer_id NUMERIC,
p_first_name TEXT,
p_last_name TEXT
) RETURNS refcursor AS
$$
DECLARE
v_query TEXT = '';
v_where_clause TEXT = '';
v_developers_cursor refcursor = 'ks_developers_cursor';
BEGIN
IF (p_developer_id IS NOT NULL) THEN
v_where_clause = v_where_clause || FORMAT(' AND d.developer_id = %s ', p_developer_id);
END IF;
v_query = '
SELECT d.developer_id AS id, d.*
FROM ks_developers d
WHERE 1=1
' || v_where_clause || '
ORDER BY d.developer_id
';
OPEN v_developers_cursor FOR
EXECUTE v_query;
RETURN v_developers_cursor;
END;
$$
LANGUAGE plpgsql;
How can I implement this in the appropriate way? What am I missing or misunderstanding?
The problem was my lack of understanding on how to use the pg node module. Basically, I was trying to jam everything into one query, because I thought I had to. I forgot that I created a "helper" query in the database module, and that it was narrow. I got that confused with the pg pool query function name. I was shooting myself in the foot.
This is the article that showed me how multiple statements should be done with a transaction:
https://node-postgres.com/features/transactions
So for a quick fix on this question, I exposed the pool to myself and started querying the pool, and then released the connection. This allows for multiple prepared statements and actions.
Here is what I modified my koa to be:
let getFilteredDevelopers = async (developerId, firstName, lastName) => {
const client = await database.pool.connect();
try {
await client.query('BEGIN');
const selectQuery = `SELECT ks_get_filtered_developers($1, $2, $3)`;
await client.query(selectQuery, [ developerId, firstName, lastName ]);
const fetchQuery = `FETCH ALL IN "ks_developers_cursor"`;
const result = await client.query(fetchQuery);
client.query('COMMIT');
return result.rows;
} catch (error) {
client.query('ROLLBACK');
return `Failed to fetch developers.`;
} finally {
client.release();
}
};
I will be refactoring my code to handle this better, but I wanted to answer the question of why this code doesn't work, and what I was misunderstanding.

How do I manage concurrent connections with postgres and node-postgres?

I have read a lot of posts here about the "too many connections" issue.
I am struggling to loop a modest amount of CSV data (10,000 rows) upload to ElephantSQL (psql).
I am on the free plan for ElephantSQL. And while I can upgrade to get more concurrent connections, the issue is that I am not sure how to manage connections.
Here is my code:
First I create individual URLs to pass to axios in the extractToRaw (raw_data table in psql)
readCSV(async (list) => {
const apiURLList = await list.map((item) => {
return `apiDomain=${domain}&api_key=${apiKey}`;
});
for (const url of apiURLList) {
await extractToRaw(url);
}
});
Then:
const extractToRaw = async (url) => {
let records = [];
try {
await axios({
method: "get",
url: url,
params: {
//things here
},
}).then((data) => {
const contactRecord = data.data.data;
const emailData = data.data.data.emails;
const metaData = data.data.meta;
//
if (metaData.results === 0) {
try {
console.log(`no emails for ${contactRecord.domain}`);
upload_no_email(contactRecord.domain);
} catch (err) {
console.log("name: ", err.name, "message: ", err.message);
}
} else
for (const record of emailData) {
console.log(`Writing ${record.value} record...`);
records.push({
firstname: record.first_name,
lastname: record.last_name,
position: record.position,
seniority: record.seniority,
email: record.value,
website: record.value.split("#")[1],
confidence: record.confidence,
});
console.log(records);
}
//upload to table
uploadToRaw(records);
});
} catch (err) {
console.log(err);
}
};
Finally - upload to PSQL
const uploadToRaw = (records) => {
console.log(`uploading from ${records[0].website}`);
for (const record of records) {
const valuesArr = [
record.firstname,
record.lastname,
record.position,
record.seniority,
record.email,
record.website,
record.confidence,
];
pool.query(
`
INSERT INTO raw_data(firstname, lastname, position, seniority, email, website, confidence) VALUES($1, $2, $3, $4, $5, $6, $7)`,
valuesArr
);
}
};
Without fail, I will get a "too many connections" error.
Am I using pool.query wrong?
UPDATE: add node-postgres initialise script
const { Pool, Client } = require("pg");
const connectionString =
"string here";
const pool = new Pool({
connectionString,
});
const client = new Client({
connectionString,
});
The call to create a new pool takes in a parameter call max, which is what the max number of connections in this pool will be set to
Care must be taken that it aligns with the number of connections available on the DB server

Best practice running queries in Node.js with MongoDB driver 3.6?

The official documentation of the Node.js Driver version 3.6 contains the following example for the .find() method:
const { MongoClient } = require("mongodb");
// Replace the uri string with your MongoDB deployment's connection string.
const uri = "mongodb+srv://<user>:<password>#<cluster-url>?w=majority";
const client = new MongoClient(uri);
async function run() {
try {
await client.connect();
const database = client.db("sample_mflix");
const collection = database.collection("movies");
// query for movies that have a runtime less than 15 minutes
const query = { runtime: { $lt: 15 } };
const options = {
// sort returned documents in ascending order by title (A->Z)
sort: { title: 1 },
// Include only the `title` and `imdb` fields in each returned document
projection: { _id: 0, title: 1, imdb: 1 },
};
const cursor = collection.find(query, options);
// print a message if no documents were found
if ((await cursor.count()) === 0) {
console.log("No documents found!");
}
await cursor.forEach(console.dir);
} finally {
await client.close();
}
}
To me this somewhat implies that I would have to create a new connection for each DB request I make.
Is this correct? If not, then what is the best practise to keep the connection alive for various routes?
You can use mongoose to set a connection with your database.
mongoose.connect('mongodb://localhost:27017/myapp', {useNewUrlParser: true});
then you need to define your models which you will use to communicate with your DB in your routes.
const MyModel = mongoose.model('Test', new Schema({ name: String }));
MyModel.findOne(function(error, result) { /* ... */ });
https://mongoosejs.com/docs/connections.html
It's 2022 and I stumbled upon your post because I've been running into the same issue. All the tutorials and guides I've found so far have setups that require reconnecting in order to do anything with the Database.
I found one solution from someone on github, that creates a class to create, save and check if a client connection exist. So, it only recreates a client connection if it doesn't already exist.
const MongoClient = require('mongodb').MongoClient
class MDB {
static async getClient() {
if (this.client) {
return this.client
}
this.client = await MongoClient.connect(this.url);
return this.client
}
}
MDB.url='<your_connection_url>'
app.get('/yourroute', async (req, res) => {
try {
const client = await MDB.getClient()
const db = client.db('your_db')
const collection = db.collection('your_collection');
const results = await collection.find({}).toArray();
res.json(results)
} catch (error) {
console.log('error:', error);
}
})

pool.request is not a function

I would like to setup my prepared statements with the mssql module. I created a query file for all user related requests.
const db = require('../databaseManager.js');
module.exports = {
getUserByName: async username => db(async pool => await pool.request()
.input('username', dataTypes.VarChar, username)
.query(`SELECT
*
FROM
person
WHERE
username = #username;`))
};
This approach allows me to require this query file and access the database by executing the query that is needed
const userQueries = require('../database/queries/users.js');
const userQueryResult = await userQueries.getUserByName(username); // call this somewhere in an async function
My database manager handles the database connection and executes the query
const sql = require('mssql');
const config = require('../config/database.js');
const pool = new sql.ConnectionPool(config).connect();
module.exports = async request => {
try {
const result = await request(pool);
return {
result: result.recordSet,
err: null
};
} catch (err) {
return {
result: null,
err
}
}
};
When I run the code I get the following error
UnhandledPromiseRejectionWarning: TypeError: pool.request is not a
function
Does someone know what is wrong with the code?
I think this happens because the pool is not initialized yet... but I used async/await to handle this...
Here is how I made your code work (I did some drastic simplifications):
const sql = require("mssql");
const { TYPES } = require("mssql");
const CONN = "";
(async () => {
const pool = new sql.ConnectionPool(CONN);
const poolConnect = pool.connect();
const getUserByName = async username => {
await poolConnect;
try {
const result = await pool.request()
.input("username", TYPES.VarChar, username)
.query(`SELECT
*
FROM
person
WHERE
username = #username;`);
return {
result: result.recordset,
err: null
};
} catch (err) {
return {
result: null,
err
};
}
};
console.log(await getUserByName("Timur"));
})();
In short, first read this.
You probably smiled when saw that the PR was created just 2 months before your questions and still not reflected in here.
Basically, instead of:
const pool = new sql.ConnectionPool(config).connect();
you do this:
const pool = new sql.ConnectionPool(config);
const poolConnection = pool.connect();
//later, when you need the connection you make the Promise resolve
await poolConnection;

Retrieving example for jshs2

I have a working code to fire query on hive using node module "jdbc", as an alternative i was trying "jshs2" , i am able to connect to hive and fire query but still stuck to retrieve the resultset, can anyone who have used the "jshs2", module can put up an example.
Thanks for any help.
I just started a project where I have to connect to hive from node too. I was able to run a query on database and iterate through the resultset using the following demo code:
const {
Configuration,
HiveConnection,
IDLContainer,
} = require('jshs2');
const options = {
auth: 'NOSASL',
host: 'myServer',
port: myPort,
};
const hiveConfig = new Configuration(options);
const idl = new IDLContainer();
async function main() {
await idl.initialize(hiveConfig);
const connection = await new HiveConnection(hiveConfig, idl);
const cursor = await connection.connect();
const res = await cursor.execute('SELECT * FROM orders LIMIT 10');
if (res.hasResultSet) {
const fetchResult = await cursor.fetchBlock();
fetchResult.rows.forEach((row) => {
console.log(row);
});
}
cursor.close();
connection.close();
}
main().then(() => {
console.log('Finished.');
});
I'm using node v8.x, so I can use ES6 features like destructuring and async/await. If your node version is older you must work with promise chains.

Resources