this.parent.acquire is not a function on prepared statements - node.js

I use the mssql (https://www.npmjs.com/package/mssql) module for my database. Normally I use postgres databases which lead to pg (https://www.npmjs.com/package/pg).
I want to setup prepared statements for the mssql database. When using the pg module it's quite easy.
This is how I do it with pg:
I setup my databaseManager
const { Pool } = require('pg');
const db = require('../config/database.js');
const pool = new Pool(db);
function queryResponse(result, err) {
return {
result,
err
};
}
module.exports = async (text, values) => {
try {
const result = await pool.query(text, values);
return queryResponse(result.rows, null);
} catch (err) {
return queryResponse(null, err);
}
};
and whenever I want to query the database I can call this module and pass in my statement and values. An example for todo apps would be
todos.js (query file)
const db = require('../databaseManager.js');
module.exports = {
getAllTodosFromUser: values => {
const text = `
SELECT
id,
name,
is_completed AS "isCompleted"
FROM
todo
WHERE
owner_id = $1;
`;
return db(text, values);
}
};
I wanted to create an mssql equivalent. From the docs I see that the module differs from the pg module.
I changed my databaseManager to
const sql = require('mssql');
const config = require('../config/database.js');
const pool = new sql.ConnectionPool(config).connect();
module.exports = async (queryBuilder) => {
try {
const preparedStatement = await new sql.PreparedStatement(pool);
return queryBuilder(sql, preparedStatement, async (query, values) => {
await preparedStatement.prepare(query);
const result = await preparedStatement.execute(values);
await preparedStatement.unprepare();
return {
result: result.rows,
err: null
};
});
} catch (err) {
return {
result: null,
err
}
}
};
and my query file would pass in the required parameters for the preparedStatement object
const db = require('../databaseManager.js');
module.exports = {
getUserByName: username => db((dataTypes, statementConfigurator, processor) => {
statementConfigurator.input('username', dataTypes.VarChar);
const query = `
SELECT
*
FROM
person
WHERE
username = #username;
`;
return processor(query, { username });
})
};
I was hoping that this approach would return the desired result but I get the error
this.parent.acquire is not a function
and don't know if my code is wrong. If it is, how can I setup my prepared statements correctly?
Edit:
I just found out that the error comes from this line of code
await preparedStatement.prepare(query);
but I think I took it correctly from the docs
https://tediousjs.github.io/node-mssql/#prepared-statement

I thought this question deserved a little bit more explanation than the answer what OP gave. The solution is no different than what OP already answered.
The issue remains same, pool mustn't have resolved from its promise pending state. So it just has to be awaited.
module.exports = async queryBuilder => {
try {
await pool; // Waiting for pool resolve from promise pending state.
const preparedStatement = await new sql.PreparedStatement(pool);
// ..
} catch (err) {
// ..
}
};
When you try to build a prepared statement, you pass the pool as an argument to its constructor. In its constructor is the below line
this.parent = parent || globalConnection
After which when you prepare the statement the flow leads to this line which would cause the issue since at that time this.parent's value was still a promise which was yet to be resolved.
this.parent.acquire(this, (err, connection, config) => {

Related

Node.js Value from Firebase not updating array in time for function call (even with Async Await)

I'm sure this issue is from my lack of async/await knowledge or general best practices in general. However, I cannot figure out what I am doing wrong. I'm looking to pull a value from my firestore database, include it into an array of "global variables" and then call it when ever I need it in other modules. However when I check the array pulled in the test function, it always returns a promise. The below code is an example of the last thing I tried.
index.js
const vars = require('./variables');
test();
async function test() {
console.log('Expect database value', await vars.global()['lovePizza']);
}
variables.js (this is working, array updates)
const db = require('./database');
Let globalVars = {
lovePiza : '',
}
const variables = {
async global() {
globalVars['lovePizza'] = (globalVars['lovePizza'] === '') ? db.getLovePizza() : globalVars['lovePizza'];
return globalVars;
}
}
module.exports = variables;
database.js (this is working, value gets pulled from db)
const database = {
async getLovePizza() {
const usersRef = db.collection('accounts').doc('user1');
const doc = await usersRef.get();
if (!doc.exists) {
console.log('No such document!');
} else {
return doc.data()['lovePizza'];
}
}
}
module.exports = database;
Terminal Response:
Expect database value undefined
I saw a problem in your variables.js, I hope the problem is solved with these changes, use await before db.getLovePizza()
use try/catch when you use async/await
const db = require('./database');
Let globalVars = {
lovePiza : '',
}
async global() {
if(globalVars['lovePizza'] === ''){
try {
let result = await db.getLovePizza()//use await
console.log(result);
globalVars['lovePizza'] = result
} catch (error) {
console.log(error)
//do somthing for error handlig like throw
}
}
else{
globalVars['lovePizza'] = globalVars['lovePizza'] //I think there is no need for this
}
return globalVars;
}
index.js
test() {
let result = await vars.global()
console.log('Expect database value', result);// added to question
console.log('Expect database value', result['lovePizza']);// added to question
}

await not working for insert command in nodejs

I have an array of addons and I want to insert them into the db table.
var addons = [sample,sample,.....]
return new Promise((resolve,reject) => {
addons.foEach(async addon => {
// first check if the items is in db
const response = await Kinex.where({}).from('table_name');
if(response.length == 0){
// insert new record
const insertResp = kinex('table_name').insert(addon)
addon.system_id = insertResp[0];
}else{
addon.system_id = response[0].id;
}
})
})
What I expected is to have unique record in the database, but the above code produced duplicate record in the database. Please help to find out the issue with the code.
The problem is running async function inside a loop. As mentioned by #Felix, forEach doesn't know about async functions and doesn't wait for your where query to return. If you wanna do things in async manner inside loops, you can do it with for..of loops. Also make sure to always use try/catch blocks while using async/await. Below is the code in your case:
const addons = [sample,sample,.....];
return new Promise(async (resolve, reject) => {
try {
for (let addon of addons) {
// first check if the items is in db
const response = await Kinex.where({}).from('table_name');
if (response.length) {
const insertResp = await kinex('table_name').insert(addon)
addon.system_id = insertResp[0];
} else addon.system_id = response[0].id;
resolve(); // resolve with whatever you wants to return
}
} catch (e) {
reject(e)
}
});
You can read more on for..of with async/await here.
As pointed by #Sándor, here's the code using Promise.all:
var addons = [sample, sample, .....]
return Promise.all(addons.map(async addon => {
// Do your async stuff here
// first check if the items is in db
const response = await Kinex.where({}).from('table_name');
if (response.length == 0) {
// insert new record
const insertResp = kinex('table_name').insert(addon)
addon.system_id = insertResp[0];
} else {
addon.system_id = response[0].id;
}
}))

How to await a streaming sql query in node.js

I need to call out to a function that runs a sql query, with row level functionality, and await the entire process before continuing.
Function code:
const sql = require('mssql')
exports.doit = ()=>{
const pool1 = new sql.ConnectionPool(dbConfig);
const pool1Connect = pool1.connect();
pool1.on('error', err => {
console.error('error occurred on pool')
})
await pool1Connect
try {
const request = pool1.request();
request.stream = true;
request.query('select * from dbo.user');
request.on('row', async orow => {
console.log('outer row');
const innerPool = new sql.ConnectionPool(dbConfig);
const innerConnection = innerPool.connect();
innerPool.on('error', err => {
console.error('error occurred on pool')
});
const iConnection = await innerConnection;
connections.push(iConnection);
const innerRequest = innerPool.request();
innerRequest.stream = true;
var iquery = 'select * from dbo.order where userId='+ orow.userId
innerRequest.query(iquery);
innerRequest.on('row', async irow => {
console.log(`User: ${orow.userId} Order: ${irow.orderId}`);
});
innerRequest.on('done', async () => {
console.log('inner done');
iConnection.close();
});
});
request.on('done', async () => {
console.log('outer done');
})
} catch (err) {
console.error('SQL error', err);
}
sql.on('error', err => {
// ... error handler
})
}
Then call the above function like this:
var doit = require('./testmeHandler.js').doit;
doit()
.then(()=>{
console.log("I AM DONE");
});
OR
await doit();
console.log('I AM DONE');
You get the idea...
But what is really happening is, the function gets called, then 'I AM DONE' and then the results of all the sql calls.
Can someone help me get 'I AM DONE' at the bottom? Still getting used to the async/await and promises.
Thanks
After quite a bit of time trying to get this to work synchronously from the caller I gave up and re-wrote the method to use the regular query (not streaming) and implemented my own paging/throttling as to control memory usage. It works great now!
I am using a connection pool to allow for sub queries and other processes to occur async within a batch of results.
I will post the updated code.
Somehow I believe you have jumbled it all up a bit.
Use this
exports.doit = async ()=>
{
const request = new sql.Request(conn)
let records = await request.query('select * from dbo.user')
records.forEach(async r=>{
try{
// do something
const inner = new sql.Request(conn)
let recordInner = await request.query(innerQuery)
recordInner.forEach(async r=>{//do some stuff})
inner.close()
}
catch(err){
//do something with the error
}
records.close()
})
}
The execution:
async execute(){
const result = await doit()
return result
}
execute()
Though I have no idea why you are using two connections at all . Just try writing a more defined query using JOIN OR WHERE subquery. You can achieve all this in a single query instead of a using nested connection. SQL though a bit old, it really is quite powerful.
select * from dbo.order WHERE userId IN (SELECT userId FROM dbo.user)
Makes more sense to me. But, whatever floats your boat.
More on sub-queries: https://www.dofactory.com/sql/subquery

Retrieving values from database using node and mssql

Here's a simple script i made:
const Nightmare = require('nightmare');
const sql = require('mssql');
const itens = getRecords();
async function getRecords(){
let itensList = [];
const cfg = {
//config here
};
sql.connect(cfg, function(err){
if(err) console.log(err);
let request = new sql.Request();
request.query("<query here>", (err, result) => {
if(err) console.log(err);
itensList = result;
});
return itensList;
});
}
async function getPrices(){
try{
console.log(itens)
}catch(e){
console.log(e);
}
}
getPrices();
Everything works, but when the getPrices() function gets called, here's what's being logged:
Promise { undefined }
What am i missing here?
request.query is being called, but itenslist is being returned before it can be assigned.
Basically, the order of what is happening is:
request.query is called, and starts running the query.
Since request.query is asynchronous, we move on to the next task - returning itenlist
request.query finishes running, and assigns itenlist the expected value after it has already been returned.
To get your desired functionality, I would recommend using callbacks (which node-mssql supports). Alternatively, you could use the await keyword. For instance:
var queryText = 'SELECT 1 AS Value';
var queryResults = await connection.query(queryText);

Sharing DB Connection across AWS Lambda function calls

So I'm following the example here https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs, to optimize my lambda functions.
I've tried two approaches and tested them locally using serverless-offline and both don't seem to work.
First Approach
// endpoint file
import {connectToDatabase} from "lib/dbUtils.js";
let cachedDb = null;
export function post(event, context, callback) {
let response;
context.callbackWaitsForEmptyEventLoop = false;
connectToDatabase()
.then(//do other stuff
// lib/dbUtils.js
export async function connectToDatabase() {
if (cachedDb && cachedDb.serverConfig.isConnected()) {
console.log(" using cached db instance");
return cachedDb;
}
cachedDb = await mongoose.createConnection(
process.env.DB_URL,
async err => {
if (err) {
throw err;
}
}
);
return cachedDb;
}
Second Approach
global.cachedDb = null;
export function post(event, context, callback) {
let response;
context.callbackWaitsForEmptyEventLoop = false;
connectToDatabase()
.then(connection => createUser(event.body, connection))
// lib/dbUtils.js
export async function connectToDatabase() {
// eslint-disable-next-line
if (global.cachedDb && global.cachedDb.serverConfig.isConnected()) {
// eslint-disable-next-line
console.log(" using cached db instance");
// eslint-disable-next-line
return global.cachedDb;
}
// eslint-disable-next-line
global.cachedDb = await mongoose.createConnection(
process.env.DB_URL,
async err => {
if (err) {
throw err;
}
}
);
// eslint-disable-next-line
return global.cachedDb;
}
In both cases the using cached db instance console log does not run.
Why does this not work? Is this because of serverless-offline?
The answer is simple: serverless-offline doesn't simulate the full AWS. Use the AWS console to to make a real Lambda
The MongoDB Atlas guide is OK, but it's also worth checking the official AWS Lambda documentation describing the context option in each lambda:
callbackWaitsForEmptyEventLoop – Set to false to send the response right away when the callback executes, instead of waiting for the Node.js event loop to be empty. If false, any outstanding events will continue to run during the next invocation.
It's possible to run your code on a real Lambda and see using cached db instance on the console. Since MongoDB's JavaScript code is fairly poor, I've written out my own version below:
var MongoClient = require("mongodb").MongoClient
let db = null
var log = console.log.bind(console)
var print = function(object) {
return JSON.stringify(object, null, 2)
}
// Use your own credentials (and better yet, put them in environment variables)
const password = `notactuallyapassword`
const uri = `mongodb+srv://lambdauser:${password}#fakedomain.mongodb.net/test?retryWrites=true`
exports.handler = function(event, context, callback) {
log(`Calling MongoDB Atlas from AWS Lambda with event: ${print(event)}`)
var document = JSON.parse(JSON.stringify(event))
const databaseName = "myDatabase",
collectionName = "documents"
// See https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs
// and https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-context.html#nodejs-prog-model-context-properties
context.callbackWaitsForEmptyEventLoop = false
return createDoc(databaseName, collectionName, document)
}
async function createDoc(databaseName, collectionName, document) {
var isConnected = db && db.serverConfig.isConnected()
if (isConnected) {
log(`Already connected to database, warm start!`)
} else {
log(`Connecting to database (cold start)`)
var client = await MongoClient.connect(uri)
db = client.db(databaseName)
}
var result = await db.collection(collectionName).insertOne(document)
log(`just created an entry into the ${collectionName} collection with id: ${result.insertedId}`)
// Don't close the connection thanks to context.callbackWaitsForEmptyEventLoop = false - this will re-use the connection on the next called (if it can re-use the same Lambda container)
return result
}
Use the Test button to run the lambda above twice in the AWS Lambda console.
The first time you run it you'll see Connecting to database (cold start)
The second time you'll see Already connected to database, warm start!
See the log output section in screenshot below:

Resources