Multiple database connection in single connection pool using Node JS - node.js

I am trying to create a database connection pool in Node js using generic-pool package. With single database the pool is working fine but I want to use more than one database in single pool to be use. In this context I am facing issue. Generic pool is not creating pool for both the database in single pool. Below is my code. I am using trireme-jdbc for JDBC connection ang generic-pool for connection pooling.
var Pool = require('C:JCI/trireme-jdbc/node_modules/generic-pool').Pool;
var jdbc = require('C:/Program Files/nodejs/node_modules/trireme-jdbc');
var configOpenedge = require('C:/Program Files/nodejs/node_modules/trireme-jdbc/testconf/config-openedge.js');
var configPostgre = require('C:/Program Files/nodejs/node_modules/trireme-jdbc/testconf/config-postgre.js');
var pool = new Pool({
name : 'Anil-JCI',
create : function(callback) {
var connOpenedge = new jdbc.Database({
url : configOpenedge.url,
properties : configOpenedge.properties,
});
var connPostgre = new jdbc.Database({
url : configPostgre.url,
properties : configPostgre.properties,
/*
* minConnections : 1, maxConnections : 2000, idleTimeout : 60
*/
});
callback(null, connOpenedge);
},
destroy : function(client) {
client.end();
},
max : 10,
// optional. if you set this, make sure to drain() (see step 3)
min : 2,
// specifies how long a resource can stay idle in pool before being removed
idleTimeoutMillis : 30,
// if true, logs via console.log - can also be a function
log : true
});
console.log("connection created");
pool.acquire(function(err, clientOpenedge, clientPostgre) {
if (err) {
throw err;
}
else {
clientOpenedge.execute(
'select * from "po" where "po-num" = ? and "vend-num" = ? ', [
4322452, 4301170 ], function(err, result, rows) {
// pool.release(client);
console.log(err);
rows.forEach(function(row) {
console.log(row);
});
console.log("Openedge Data Printed...");
});
clientPostgre.execute("select * from employees ",
[ ], function(err, result) {
// pool.release(client);
console.log("we are in postgre");
console.log(result);
console.log("Postgre Data Printed...");
});
}
});

Related

NodeJS Redis (ioredis) hgetall throwing undefined error

I am building a NodeJS app that connects to a Redis cluster using the ioredis module.
Overview of the issue:
A lookup query for a key that doesn't exist in the cache returns an empty object as expected
A lookup query for a key that DOES exist cache is throwing an error but the error is undefined.
I created a controller to create and manage the connection:
const redis = require('ioredis');
const consoleLogger = require('../logger/logger.js').console;
const redisCtrl = {};
redisCtrl.defaultPrefix = 'lookup:';
// Local variable to store the full time connection to Redis for lookups
let _redisClient;
// Redis connection config
redisCtrl.redisConnect = {
port: process.env.REDIS_PORT || 6379,
host: process.env.REDIS_HOST,
password: process.env.REDIS_PASSWORD
};
// Redis config
redisCtrl.redisConfig = {
dnsLookup: (address, callback) => callback(null, address),
redisOptions: {
tls: process.env.REDIS_SSL === 'false' ? false : true,
password: process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 1
}
};
// Retrieve the redis connection
redisCtrl.redisClient = async () => {
if (!_redisClient) {
_redisClient = await redisCtrl.getNewConnect();
}
return _redisClient;
}
redisCtrl.getNewConnect = async () => {
let makeConnection;
if (process.env.REDIS_CLUSTER === 'true') {
makeConnection = new redis.Cluster([redisCtrl.redisConnect], redisCtrl.redisConfig);
} else {
makeConnection = new redis(redisCtrl.redisConnect);
}
makeConnection.on("connect", function (err) {
if (!err) {
consoleLogger.info("REDIS connected");
} else {
consoleLogger.info("REDIS connection error");
consoleLogger.error(JSON.stringify(err));
}
});
makeConnection.on("error", function (error) {
consoleLogger.info("REDIS error");
consoleLogger.error(JSON.stringify(error));
throw new Error(error);
});
return makeConnection;
}
redisCtrl.closeInstance = (cb) => {
if (_redisClient) {
_redisClient.quit(cb);
}
}
module.exports = redisCtrl;
This works to establish the connection.
However, when attempting to get a result, an empty error is thrown from the hgetall method.
/**
* Lookup asset by assetId in Redis cache
* Return asset data object
* #param {str} assetId
*/
assetsCtrl.lookupByAssetId = async (assetId) => {
// Prepend default cache prefix to lookup value
const lookupKey = `${redisPrefix || `lookup:`}${assetId}`;
let cachedAsset;
try {
cachedAsset = await assetsCtrl.redisClient.hgetall(lookupKey);
} catch (e) {
consoleLogger.error(`Lookup by assetId failed. Lookup key: ${lookupKey}`);
consoleLogger.error(e);
throw new Error(e);
}
return cachedAsset;
}
The error is thrown but the error is undefined. The "catch" block of the redisClient.hgetall(lookupKey) line is getting called but the error is not defined.
error: Lookup by assetId failed. Lookup key: lookup:test123456789
**error: undefined {"command":{"name":"hget","args":["lookup:test123456789"]}}**
Questions: How can I troubleshoot this issue? How can I see the details of the error that is being thrown?
As stated in the comments above, the hgetall() is not working because the datatype corresponding to the lookup value was not hash.
Changing it to get() has fixed the issue.

How to capture ftp-client connect attempt errors

I use the npm package ftp-client to download files from an ftp server. I cannot determine how to capture possible errors when attempting to connect to the remote server. Documentation on the ftp-client package is very sparse.
Can anyone help?
My code is below. As it stands any connection error (e.g. invalid password) just crashes the application.
const ftpClient = require('ftp-client');
require('dotenv').config();
const dtf_login = process.env.dtf_login;
const dtf_psw = process.env.dtf_psw;
const dtf_host = process.env.dtf_host;
function doDirDownload(remoteDir, localDir, callback){
var ftpOptions = {logging : 'none', overwrite : 'all'};
var ftpConfig = {host : dtf_host,
port : 21,
user : dtf_login,
password : dtf_psw};
c = new ftpClient(ftpConfig, ftpOptions);
c.connect((err) => {
if(err){
callback(err);
} else {
c.download(remoteDir, localDir,
{overwrite: 'all'}, (result) => {
callback(result);
});
}
});
}

Update variable value in module

I'm a real newbie in node.js so pls understand my possible stupidity
I'm trying to use a external file to serve as a module so I can use it in other files. The project is bigger than this but let's say my module is:
var Tools = module.exports = {
result_arr: [],
object_data: {
times : [],
temps1 : [],
temps2 : [],
temps3 : [],
temps4 : [],
levels : [],
flows : []
},
getLastNRows: function (whereIsData, DB_info, table, NRows) {
if (whereIsData == "MySQL") {
function setValue (value) {
Tools.result_arr = value;
}
function dataArray2Object (array_data) {
Tools.object_data.times = array_data.map(row => row.timestamp);
Tools.object_data.temps1 = array_data.map(row => row.temp1);
Tools.object_data.temps2 = array_data.map(row => row.temp2);
Tools.object_data.temps3 = array_data.map(row => row.temp3);
Tools.object_data.temps4 = array_data.map(row => row.temp4);
Tools.object_data.levels = array_data.map(row => row.level_ice_bank);
Tools.object_data.flows = array_data.map(row => row.flow);
}
var queryString = "SELECT timestamp, temp1, temp2, temp3, temp4, level_ice_bank, flow FROM " +
table + " ORDER BY id DESC LIMIT " + NRows + ";";
var connnection = mysql.createConnection(DB_info);
connnection.connect(function(err) {
console.log("connected");
if (err) throw err;
});
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
});
}
else {
console.log("Function only accepts data stored in MySQL.\n(u still have to improve...)");
return;
}
};
The variable object_data is supposed to be used in a main file. This way, whenever I call getLastNRows, I expect object_data to be updated by the operations in getLastNRows. The main file would be:
var tools = require('./tools');
var where2save = "MySQL";
var info_db = {
host : "127.0.0.1",
user : "root",
password: "xxxx",
database: "mydb",
port : 3306
};
var table = "tempdata";
var NRows = 4;
tools.getLastNRows(where2save, info_db, table, NRows);
console.log(tools.object_data);
What is observed is that, in fact, tools.object_data is not updated by getLastNRows in the main file, although console.log(Tools.object_data); from the tools.js (module) file logs the updated values. So my question is:
How can I make getLastNRows update tools.object_data (which is empty when created) in the main file?
Is getLastNRows asynchronous? Cuz it seems to me that is the cause of the problem.
It calls the getLastNRows in main which then calls connection.query, which gets put on a worker thread then immediately continues to the console.log where tools.object_data has not been updated.
Try:
getLastNRows: function (whereIsData, DB_info, table, NRows, cb) {
// ...
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
cb()
});
// ...
}
// in main
tools.getLastNRows(where2save, info_db, table, NRows, function() {
console.log(tools.object_data);
});

Pool getConnection undefined

The problem is when I tried the pool connection via variable I'm getting following error
TypeError: sourCon.sourceDBConnection.getConnection is not a function
but if I try the normal connection it's fine.
The code is as follows:
File. ./db/source_db.js
module.exports =
{
sourceDBConnection: function () {
return mysql.createPool({
connectionLimit : 10,
host : 'localhost',
user : 'root',
password : '',
database : 'testdb'
});
}
};
File: app.js
app.get('/report-url', function (req, res) {
corporateModel.DetailReport(dateTime(),request);
})
File. ./models/corporate.js
var sourCon = require("../common_config/source_db");
module.exports =
{
DetailReport: function (dateTime,request,Q) {
var whereTripQueryAppend = "";
sourCon.sourceDBConnection.getConnection(function(err, finConnection) {
/* Get the last inserted ID */
finConnection.query("SELECT student_id FROM studen LIMIT 1", function (error, lastResults, fields) {
/* some process here */
})
})
})
}
Do you know why it works for the normal connection and it's not working for the pool connections? Also if I defined the connection in the same file it works fine. AS
File. ./models/corporate.js
var sourceDBConnection = mysql.createPool({
connectionLimit : 10,
host : 'localhost',
user : 'root',
password : '',
database : 'testdb'
});
var sourCon = require("../common_config/source_db");
module.exports =
{
DetailReport: function (dateTime,request,Q) {
var whereTripQueryAppend = "";
sourceDBConnection.getConnection(function(err, finConnection) {
/* Get the last inserted ID */
finConnection.query("SELECT student_id FROM studen LIMIT 1", function (error, lastResults, fields) {
/* some process here */
})
})
})
}

Mongoose Connection is not Restored when DB becomes accessible again

The problem is Mongoose loses its connection in the following scenario:
Node App is started, all seems fine (DB accessible, etc.)
Mongo server process (version 2.6.10) running locally is stopped, then started after 15 seconds, all seems fine (DB accessible, etc.)
Mongo process is stopped.
a request to Express is made, trying to go to DB (using Mongoose) - inaccessible as expected.
Mongo process is started.
The same request is made, DB is inaccessible even though Mongo is up.
If I restart the Node JS app, all works fine.
Why is a failing query (using Mongoose version 4.4.5) to Mongo prevents the connection from being restored when the DB process is up again?
Should we implement a retry mechanism to try and restore the connection until DB becomes accessible?
I've tried the configuration mentioned here but it didn't work.
Any help would be appreciated.
Bellow is a sample code of our connection helper:
'use strict';
const _ = require('lodash');
const mongoose = require('mongoose');
const config = require('../config');
const logger = require('../logger');
const _instance = Symbol('instance');
const _enforcer = Symbol('enforcer');
const _members = Symbol('members');
/**
* Singleton implementation of ConnectionHelper module
* This module is responsible to reusing common db connections in the application
* #type {ConnectionsHelper}
*/
module.exports = class ConnectionsHelper {
constructor(enforcer) {
if (enforcer !== _enforcer) {
throw new Error('invalid singleton instantiation');
}
initConnectionFromConfig.call(this);
}
/**
* The single instance
* #returns {*}
*/
static get instance() {
if (!this[_instance]) {
this[_instance] = new ConnectionsHelper(_enforcer);
}
return this[_instance];
}
/**
* method retrieves connection by its name
* #param connectionKey
* #returns {*}
*/
getConnection(connectionKey) {
return this[_members][connectionKey];
}
/**
* method disconnects all underlying connections
* #returns {void|MongooseThenable}
*/
closeConnections() {
return mongoose.disconnect();
}
};
function initConnectionFromConfig() {
this[_members] = {};
const dbsConnections = config.get('dbsConnections');
_.forEach(dbsConnections, (connection, connectionName) => {
const protocol = connection.protocol;
const repSetPath = connection.mongoPath.join(',');
const options = connection.options;
options.server = {auto_reconnect: true, socketOptions: {keepAlive: 1, connectTimeoutMS: 30000 }};
options.replset = {socketOptions: {keepAlive: 1, connectTimeoutMS: 30000 }};
this[_members][connectionName] = mongoose.createConnection(protocol + repSetPath, options);
addConnectionEvents.call(this, connectionName);
});
}
function initConnectionIfNeeded() {
this[_members] = {};
const dbsConnections = config.get('dbsConnections');
_.forEach(dbsConnections, (connection, connectionName) => {
const protocol = connection.protocol;
const repSetPath = connection.mongoPath.join(',');
const options = connection.options;
//options.server = {auto_reconnect: true, socketOptions: {keepAlive: 1, connectTimeoutMS: 30000 }};
//options.replset = {socketOptions: {keepAlive: 1, connectTimeoutMS: 30000 }};
this[_members][connectionName] = mongoose.createConnection(protocol + repSetPath, options);
addConnectionEvents.call(this, connectionName);
});
}
function addConnectionEvents(connectionName) {
const connection = this[_members][connectionName];
connection.on('connected', () => {
logger.debug(`Mongoose connection open`);
});
connection.on('error', (err) => {
logger.debug(`Mongoose connection error: ${err}`);
});
connection.on('exception', (err) => {
logger.debug(`Mongoose connection exception: ${err}`);
});
connection.on('disconnected', () => {
logger.debug(`Mongoose connection disconnected`);
});
connection.on('reconnected', () => {
logger.debug(`Mongoose connection reconnected`);
});
connection.on('open', () => {
logger.debug(`Mongoose connection is open`);
});
// If the Node process ends, close the Mongoose connection
process.on('SIGINT', () => {
connection.close(() => {
logger.debug(`Mongoose default connection disconnected through app termination`);
process.exit(0);
});
});
}
I had the same problem. What I did is to restart mongoose in here:
connection.on('disconnected', () => {
logger.debug(`Mongoose connection disconnected`);
mongoose.connect(path);
});
And I also use setTimeout() so that it tries every 3 secs.
mongoose.connection.on('disconnected', function () {
console.log('Mongoose default connection disconnected');
console.log("Trying to reconnect");
function connect(){
mongoose.connect(dbPath, function(err){
if(err) console.log("Error while trying to reconnect to MongoDB");
});
}
setTimeout(connect, 3000);
});
And I also use ForeverJS.

Resources