failed to connect to xxxxx:xxxx in 150000ms node mssql - node-mssql

I have a node js job. This job is using node-cron to run some logic in every hour.
It's connecting to sql server database using node mssql package.
connection file code:
const sql = require('mssql');
const conn = (function () {
try {
return new sql.ConnectionPool(obj);
}
catch (err) {
throw err;
}
}())
const pool = (function () {
try {
return conn.connect();
}
catch (err) {
throw err;
}
}());
const myConn = async function getConn() {
try {
return await pool;
} catch (err) {
throw err;
}
};
module.exports = {
myConn
}
Code to call stored procedure:
async function callProc(procedureName, inputList, callback) {
try {
const pool = await connectionFile.myConn();
const request = new sql.Request(pool);
if (inputList) {
for (const param of inputList) {
request.input(param.name, param.type, param.value);
}
}
const result = await request.execute(procedureName);
callback(null, result);
}
catch (err) {
callback(err, null);
}
}
Intermittently my job is failing with error "failed to connection to in 15000ms"
Any suggestion?

Related

mongodb method without function not working in nodejs

I don't know much Javascript and was making a nodejs app. My mongodb query in nodejs is working only when the query has a function method like .toArray
Here's the database.js file
const {MongoClient} = require('mongodb');
const uri = "mongodb+srv://name:pass#clusterurl/metro4?retryWrites=true&w=majority";
// all fields are correctly filled
const client = new MongoClient(uri);
try {
// Connect to the MongoDB cluster
client.connect(err =>{
if(err) throw err;
let db = client.db('metro4');
db.collection('Station').find().toArray(function(err, result){
if(err) throw err;
console.log(result);
});
let a = db.collection('Station').findOne({'_id':4});
if(a) {
console.log(a);
}
else{
console.log("No a\n");
}
module.exports = db;
});
} catch (e) {
console.error(e);
} finally {
client.close();
}
when I run the app, the db.collection('Station').find().toArray runs fine and output the result but the second query of findOne doesn't work.
Any help is appreciated.
The findOne method returns a Promise. You should handle its result in a callback function:
db.collection('Station').findOne({ _id: 4 }, function (err, a) {
if (err) {
console.log(err);
} else if (a) {
console.log(a);
} else {
console.log('No a\n');
}
});
Or using async - await:
client.connect(async (err) => {
...
let a = await db.collection('Station').findOne({ _id: 4 })
...
});
EDIT
To handle the import - export problem you should handle the datase connection operations separate async functions.
You may use the connection function to return the database instance:
const {MongoClient} = require('mongodb');
const uri = "mongodb+srv://name:pass#clusterurl/metro4?retryWrites=true&w=majority";
// all fields are correctly filled
const client = new MongoClient(uri);
const connectDB = async () => {
try {
// Connect to the MongoDB cluster
await client.connect();
return client.db('metro4');
} catch (e) {
throw e;
}
}
const disconnectDB = () => {
client.close();
}
module.exports = { connectDB, disconnectDB };
Then use these functions to handle your database related operations:
const { connectDB, disconnectDB } = require('../database');
const getStations = async () => {
const db = connectDB();
if (!db) return;
try {
const data = await db.collection('Station').find().toArray();
return data;
} catch (err) {
throw err;
} finally {
disconnectDB();
}
}
const getStation = async (id) => {
const db = connectDB();
if (!db) return;
try {
const data = await db.collection('Station').findOne({ _id: id});
return data;
} catch (err) {
throw err;
} finally {
disconnectDB();
}
}

Nodejs Multiple pools created on refresh

I have several DBs for which i am using connection pools in node.js. Every time i refresh page i think pools are created again. i refresh page 3 times and 3 times promises resolved. i have removed several databases just to make it little bit easier to read here.
and if i un-comment connection close line my app crashes. i can't seem to figure out why
const config = require("../config/config");
const oracledb = require("oracledb");
var crm1connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.crm1.user,
password: config.crm1.password,
connectString: config.crm1.connectString,
poolAlias: config.crm1.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("CRM1 Promise resolved")
});
});
var query2connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.query2.user,
password: config.query2.password,
connectString: config.query2.connectString,
poolAlias: config.query2.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("QUERY2 Promise resolved --------")
});
});
var promiseArray = [crm1connPromise, crm2connPromise, crm3connPromise, crm4connPromise, csfp1connPromise, csfp2connPromise, csfp3connPromise, csfp4connPromise, cact1connPromise, cact2connPromise, cact3connPromise, cact4connPromise, cospconnPromise, cchnconnPromise, bbaseconnPromise, bcdrconnPromise, vcdbconnPromise, crptconnPromise, query2connPromise];
function getDBConnection (dbname) {
return new Promise((resolve, reject) => {
try {
Promise.all(promiseArray).then((message) => {
console.log(message);
const pool = oracledb.getPool(dbname);
pool.getConnection( (err, connection) => {
if (err) {
reject(err);
console.log(err);
}
resolve(connection);
});
});
} catch (error) {
reject(error);
}
});
}
module.exports.query = function(dbname, sql, bind = []){
return new Promise ((resolve,reject) =>{
var conn
try {
getDBConnection(dbname).then((connection) =>{
connection.execute(sql,bind,(err,result)=>{
if (err){
reject(err);
}
resolve(result);
})
//connection.close(0);
})
} catch (error) {
reject(error);
}
})
}
you can use 'Singleton'
please google 'Singleton pattern' and examples.
like this:
dataBaseManager.js:
'use strict'
var Singleton = (function () {
var instance;
function createInstance() {
var object = new dataBaseManager();
return object;
}
return {
getInstance: function () {
if (!instance) {
instance = createInstance();
}
return instance;
}
};
})();
function dataBaseManager() {
this.connected = false;
this.client = null;
this.dataBase = null;
//public methods
this.connect = function () {
try {
your_database.connect({}, (err, client) => {
if (err) {
this.connected = false;
this.client = null;
this.dataBase = null;
return;
}
this.connected = true;
this.client = client;
this.dataBase = client.db();
});
} catch (error) {
}
};
this.disconnect = function () {
try {
if (this.client) {
this.client.close();
this.connected = false;
this.client = null;
this.dataBase = null;
}
} catch (error) {
}
}
}
module.exports = Singleton;
repository.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
your_get_dample_data_from_data_base_func = function (data) {
dataBaseManager.dataBase
.find({})
.toArray(function (err, result) {
if (err) {
return callback(err, null);
}
callback(null, result);
});
};
index.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
function connect() {
dataBaseManager.connect();
}
function disconnect() {
dataBaseManager.disconnect();
}
Look at the node-oracledb example webappawait.js which starts the pool outside the web listener code path.
async function init() {
try {
await oracledb.createPool({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString
});
const server = http.createServer();
server.on('error', (err) => {
console.log('HTTP server problem: ' + err);
});
server.on('request', (request, response) => {
handleRequest(request, response);
});
await server.listen(httpPort);
console.log("Server is running at http://localhost:" + httpPort);
} catch (err) {
console.error("init() error: " + err.message);
}
}
async function handleRequest(request, response) {
. . .
}

Using lambda function node-js I want to establish two different database engine (One is SQL serve and other is Postgres)

Want to connect two different database engines (i.e Postgres and Mssql) using nodeJS in lambda script.
exports.handler = (event, context, callback) => {
try {
sql.connect(config, (err) => {
if (err) {
console.log(err);
callback(err);
} else {
const req = new sql.Request();
req.query("select top 10 * from queue", async (error, result) => {
if (error) {
console.log(error);
callback(error);
} else {
const { Client } = require('pg');
const client = new Client();
await client.connect();
var res = await client.query("select * from nq");
var notificationData = res.rows;
console.log(notificationData);
console.log(params);*/
sql.close();
callback(null, result.recordset);
}
});
}
});
sql.on('error', (err) => {
console.log(err);
callback(err);
});
} catch (e) {
console.log(e);
console.error(e);
sql.close();
}
};
Want to retrieve data from SQL server database and based on the ID want to retrieve similar data from Postgres Database from the same lambda script function.

Nodejs module export, wait for function to finish

I am developping an app using node js
in my export_db.js where I export the connection and query function
When I call the function, inside the export_db the output is really a string filled with information, but in my main.js the output is undefined, as if the function wasn't finished and the code continue to run before the results comes in.
How can I force it to wait ?
File export_db.js
var con = mysql.createConnection({
....
});
con.connect(function(err) {if (err) throw err;});
con.CustomQuery = function(SQLQuery){
..DO Stuff
return stringoutput="";
con.query(SQLQuery, function (err, result,fields) {
if (err) throw err;
var arr = result;
//Do Stuff Transform result into a String
stringoutput=result as string
});
return string output;
});
module.exports = con;
File import_db.js
var db = require('../db/export_db_connection');
//DO Stuff
Queryresult = db.CustomQuery(SQLQuery);
bot.reply(Queryresult) // Send the String to the bot to get a response message
//DO Stuffs
Since your code is fundamentally asynchronous in nature (you have to wait for it to be ready). It might be better to change your _db.js to export a factory function which returns a promise which resolves with an instance of the connection when it is available.
// _db.js
function connect() {
return new Promise(function (resolve, reject) {
var con = mysql.createConnection({
//....
});
con.CustomQuery = function (SQLQuery) {
return new Promise(function(resolve, reject){
con.query(SQLQuery, function(err, result, fields){
if(err) return reject(err)
// var str = convert result to string here
resolve(str);
})
})
};
con.connect(function (err) {
if (err) return reject(err);
resolve(con)
});
})
}
let conn = null;
function getConnection() {
if (!conn) {
conn = connect();
}
return conn;
}
module.exports = getConnection;
And then, when you want to use the connection:
var getConnection = require('/path/to/_db.js');
getConnection()
.then(function (conn) {
return conn.CustomQuery(sqlQuery)
})
.then(function(str){
// Query result is available here
console.log(str);
})
You can also do this without Promises using callbacks
// _db.js
function connect(cb) {
var con = mysql.createConnection({
//....
});
con.CustomQuery = function (SQLQuery) {
//..DO Stuff
// return stringoutput="";
};
con.connect(function (err) {
if (err) return cb(err);
cb(null, con)
});
})
}
let conn = null;
function getConnection(cb) {
if (!conn) {
return connect(function(err, con){
if(err) return cb(err);
conn = con;
cb(null, conn);
});
}
cb(null, conn);
}
module.exports = getConnection;
And then, when you want to use the connection:
var getConnection = require('/path/to/_db.js');
getConnection(function (err, conn) {
if(err){
// handle errors
}
QueryResult = conn.CustomQuery(SQLQuery);
})

Catching Uncaught Error in my async await function

I am trying to use async/await keywords with Redis and NodeJS. I can catch simple errors but I can't catch exceptions coming from getKeys function. Following mocha test fails. My catch block is also not called. I am using NodeJS v9.3.0 and bluebird v3.5.1 and redis 2.8.0
const redis = require("redis");
const bluebird = require("bluebird");
const assert = require('assert');
bluebird.promisifyAll(redis.RedisClient.prototype);
class RedisManager {
constructor(host) {
this.client = redis.createClient({'host':host});
}
async getKeys(key) {
let result = await this.client.hgetallAsync(key);
return result;
}
async simple(key) {
throw Error('Simple Error: ' + key)
}
}
describe('Redis Manager Test:', function() {
it('catches simple errors',function (done) {
let manager = new RedisManager('BOGUSHOST');
let key = 'testKey';
manager.simple(key)
.then(function (value) {
console.log('Value: ' + value);
done(value,null)
})
.catch(function (err) {
if (err = Error('Simple Error: ' + key)) {
done(null);
} else {
done(err);
}
})
});
it('catches Redis errors',function(done) {
let manager = new RedisManager('BOGUSHOST');
manager.getKeys('Some')
.then(function(value) {
console.log('Value: ' + value);
done(value,null)
})
.catch(function (err) {
console.log('Caught Error: ' + err);
done(err,null);
})
})
});
You should use try/catch block to handle Uncaught error rejection when using async/await.
async getKeys(key) {
let result = await this.client.hgetallAsync(key);
try {
return result;
}
catch (err) {
return err;
}
}

Resources