how to handle connection problems in node-mongodb-native - node.js

How do I stop queries from buffering and instead throw error when connection doesn't exist between application and database?
I'm using node-mongodb-native driver.
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://someremotedb:27017/test';
var db = null,
xcollection = null;
As suggested in this answer I am opening a connection and keeping it for later requests.
function connect() {
MongoClient.connect(url, function(err, _db) {
db = _db;
xcollection = db.collection('xcollection');
db.on('close', function() {
console.log("connection close mongoDB");
connection_retry();
});
db.on('error', function(err) {
console.log("connection err mongoDB ",err);
connection_retry();
db.close();
});
});
}
and I use it like this.
module.exports.xcollection_find = function (_x, _cb) {
try {
xcollection.findOne({x: _x}, { _id: 0 }, function(err, doc) {
if(err) return _cb(err, null);
if(doc===null) return _cb(null, {success: false, data: null});
return _cb(null, {success: true, data: doc});
});
}
catch(e) {
return _cb(e, null);
}
}
I call connect and then everything works fine as expected.
Except for when I interrupt the internet connection to the db. (ie- disconnect internet on my pc with app running), I don't see any errors. All requests made timeouts with this message.
[MongoError: server c99.kahana.mongohq.com:27017 received an error {"name":"MongoError","message":"read ETIMEDOUT"}]
But it takes around 5 to 10 minutes before the timeout occurs.
If the close or error event is thrown my reconnect function would fire which is.
function connection_retry () {
console.log("connection retry mongoDB");
setTimeout(function() {
connect();
}, 500);
}
But it never does.
If net connection is restored before the timeouts occurs(ie- 5 -10 mins), the queries are executed and the results are received.
How do I detect that the connection is down in the xcollection_find method?
Why are the on close or on error callbacks not executed?
update:
var options = {
db: {
bufferMaxEntries: 2
},
server: {
socketOptions: {
keepAlive: true,
connectTimeoutMS: 2000
},
auto_reconnect: true
}
}
MongoClient.connect(url, options, function(err, _db) {
setting bufferMaxEntries to 2 still doesn't solve problem and the requests are buffered and occurs on reconnect.

You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool. So open it once an reuse across all
requests.
https://groups.google.com/forum/#!msg/node-mongodb-native/mSGnnuG8C1o/Hiaqvdu1bWoJ
Node.js is single threaded you should not open as well as close the connection on same request.

Related

How to deal with connection lost to DB in NodeJS Express App

I am developing an express application which connects to a MongoDB database. It is possible that my express server looses the connection to database, but how should my express server react to this scenario? I prepared the following code where i check for a lost connection and then I kill the app.
const registerCustomer = async (req, res) => {
let validRegistrationCode;
try {
validRegistrationCode = await CustomerRegistrationCode.findOne({ code: req.body.code });
} catch (error) {
if (error instanceof MongoNetworkError || error instanceof MongooseServerSelectionError) {
console.error('Mongo | Mongoose Network Error occurred');
}
process.exit(1);
}
return res.json({ obj: validRegistrationCode });
}
For every access to the DB, I have to check for a connection error and I think this makes the code ugly. How can this be improved?
I think the failure of connection to mongoDB usually doesn't happen or it can also happen in case of network is loss, or your path, syntax is not correct and what you should do is check only once when setup mongoose to connect to your database, every time you save code nodemon will help you check if you have successfully connected
// connect model with database
const mongoose = require('mongoose');
async function connect() {
try {
await mongoose.connect('mongodb://localhost:27017/collections_clothes', {
// useNewUrlParser: true,
// useUnifiedTopology: true,
// useCreateIndex: true,
});
console.log("Access Database Success!");
} catch (error) {
console.log("Access FAIL!");
}
}
module.exports = { connect };

MongoClient doesnt trigger callback in case of error

MongoClient.connect(this._config.connectionString, {
useNewUrlParser: true,
useUnifiedTopology: true,
}, (err, db) => {
if (err) {
console.log(`error on connect : ${err}`);
return console.dir(err);
}
console.log('We are connected');
this._client = db;
});
This piece helps me connect to mongodb running locally, and works fine when DBserver is up and running. But if i stop db server and then try to execute this...it doesnt call the call back. Is there a way i can get call back in case of DB is down or not reachable?
below code doesnt get call back, in case of future DB errors.
this._client.on('error', args => this.onError(args));
Because of this issue of not getting call backs, in case of any severe errors...my server goes down without showing any error.

How to avoid a broken connection with ORACLEDB? Nodejs

I have this database connection. Inside the function where the comment is located, there is a data update cycle for rest api. The data is updated, but when the data in the Oracle database is updated, the connection may fail and after that all subsequent updated data will get undefined. How can you properly connect to the database so that there are no failures?
oracledb.getConnection(
{
user: db.user,
password: db.password,
connectString: db.connectString
},
connExecute
);
function connExecute(err, connection) {
if (err) {
console.error(err.message);
return;
}
sql = `SELECT * FROM db.test`;
connection.execute(sql, {}, { outFormat: oracledb.OBJECT },
function (err, db) {
if (err) {
console.error(err.message);
connRelease(connection);
return;
}
// data update loop
connRelease(connection);
});
}
function connRelease(connection) {
connection.close(
function (err) {
if (err) {
console.error(err.message);
}
});
}
You should be using a connection pool. Connection pools have built-in logic to detect connections with issues and create new connections transparently. See this series on creating a REST API for more details: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Keep in mind that issues can still happen, so you have to handle errors as needed for your application.
Mostly you add listener on connection object and on dissociation or failure again create connection. With minor changes you can adopt this approach and use listeners to check if connection is available if not connect again. There could be several reason that results in connection closing better handle exceptions, check if still connected and reconnect in case of error.
Or you can try this NPM this will do reconnection for you
https://www.npmjs.com/package/oracledb-autoreconnect
Ping me if you need calcification.
var dbConfig = {
host: '----',
user: '----',
password: '----',
database: '----',
port: ----
};
var connection;
function handleDisconnect() {
connection = <obj>.getConnection(dbConfig);
// Recreate the connection, since the old one cannot be reused.
connection.connect( function onConnect(err) {
// The server is either down
if (err) {
// or restarting (takes a while sometimes).
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 10000);
// We introduce a delay before attempting to reconnect,
}
// to avoid a hot loop, and to allow our node script to
});
// process asynchronous requests in the meantime.
// If you're also serving http, display a 503 error.
connection.on('error', function onError(err) {
console.log('db error', err);
if (err.code == 'PROTOCOL_CONNECTION_LOST') {
handleDisconnect();
// lost due to either server restart, or a
} else {
// connnection idle timeout (the wait_timeout
throw err;
// server variable configures this)
}
});
}
handleDisconnect();

In spite of socket-pouch connection being established, sync is not functioning

The project in question is using 11 PouchDBs. To ensure syncing, all 11 DBs were instantiated (with sync) when the Angular 5 application was loaded/bootstrapped. Since sync did not function (due to the limitations set by browsers) we moved towards socket-pouch as a solution. We disabled sync on all DBs and incorporated socket-pouch sync to only one DB. The socketPouchServer runs on localhost:5000 & the CouchDB is hosted on DigitalOcean.
On running the system,
the following logs are observed in the browser. As you can see, an "aborting" error is being logged.
https://user-images.githubusercontent.com/26055473/38247973-8eef489a-3764-11e8-8411-6b5b20436d19.png
the code for the same is
import PouchDB from 'pouchdb';
import PouchDBFind from 'pouchdb-find';
import SocketPouchClient from 'socket-pouch/client';
PouchDB.plugin(PouchDBFind);
PouchDB.adapter('socket', SocketPouchClient);
PouchDB.debug.enable('pouchdb:socket:*');
this.dailyMovementDB = new
PouchDB(`${username}_${environment.REQUIRED_DB_VERSION_NUMBER}_daily-movement`, { auto_compaction: true });
this.dailyMovementDBRemote = new PouchDB(
{
adapter: 'socket',
name: `${username}_${environment.REQUIRED_DB_VERSION_NUMBER}_daily-movement`,
url: `${environment.REMOTE_COUCH_DB_BASE_URL}`
});
var syncHandler = this.dailyMovementDB.replicate.to(this.dailyMovementDBRemote
).on('change', function (change) {
// yo, something changed!
console.log('yo, something changed', change);
instance.autosaveMessageService.syncingProcessEndedSubject.next(false);
}).on('paused', function (info) {
console.log('replication was paused, usually because of a lost connection', info);
// replication was paused, usually because of a lost connection
instance.autosaveMessageService.syncingProcessEndedSubject.next(true);
}).on('active', function (info) {
// replication was resumed
console.log('replication was resumed', info);
instance.autosaveMessageService.syncingProcessStartedSubject.next();
}).on('denied', function (info) {
// handle complete
console.log('denied', info);
instance.autosaveMessageService.syncingProcessEndedSubject.next(false);
}).on('complete', function (info) {
// handle complete
console.log('handle complete', info);
instance.autosaveMessageService.syncingProcessEndedSubject.next(false);
}).on('error', function (err) {
console.log(err);
// instance.createDailyMovementPouchDBs(username);
// totally unhandled error (shouldn't happen)
});
And the following logs appear on localhost:5000 (socketPouchServer)
https://user-images.githubusercontent.com/26055473/38248011-ac49b9ca-3764-11e8-8501-74576c8c1e1f.png
the following is the code for the socketPouchServer
var socketPouchServer = require('socket-pouch/server');
const PouchDB = require('pouchdb');
socketPouchServer.listen(5000, {
remoteUrl: 'http://remoteurl:5984',
}, () => {
console.log('Hi');
});
Please guide how to resolve this issue.

How do I ignore redis if it is not available?

I want my application (lets say a simple node file for now) to work as it is even if redis is not available. I'm not able to do it the correct way. This is what I've tried.
var redis = require('redis');
var redisClient = null;
var getRedisClient = function(){
if(redisClient){
return redisClient;
}
try {
redisClient = redis.createClient({connect_timeout : 5000, max_attempts : 1});
redisClient.on("error", function(err) {
console.error("Error connecting to redis", err);
redisClient = null;
});
return redisClient;
} catch(ex){
console.log("error initialising redis client " + ex);
return null;
}
};
try {
var client = getRedisClient();
console.log("done!");
} catch (ex){
console.log("Exception");
}
However, with this code my application exits if redis is not available (it shouldn't because i've not given a process.exit() command).
How can I solve this?
Checking for Successful Connection on Start
Using a promise, you could guarantee that at least initially, you were able to connect to redis without error within a specified time period:
const redis = require('redis');
const Promise = require('bluebird');
function getRedisClient(timeoutMs){
return new Promise((resolve, reject) => {
const redisClient = redis.createClient();
const timer = setTimeout(() => reject('timeout'), timeoutMs);
redisClient.on("ready", () => {
clearTimeout(timer);
resolve(redisClient);
});
redisClient.on("error", (err) => {
clearTimeout(timer);
reject(err);
});
});
};
const redisReadyTimeoutMs = 10000;
getRedisClient(redisReadyTimeoutMs)
.then(redisClient => {
// the client has connected to redis sucessfully
return doSomethingUseful();
}, error => {
console.log("Unable to connect to redis", error);
});
You Need Proper Error Handling
The redis client being non-null does NOT guarantee using it won't throw an error.
you could experience infrastructure misfortune e.g. crashed redis process, out of memory or network being down.
a bug in your code could cause an error e.g. invalid or missing arguments to a redis command.
You should be handling redis client errors as a matter of course.
DON'T null the Redis Client on Error
It won't give you much but it will force you to check for null every time you try and use it.
The redis client also has inbuilt reconnect and retry mechanisms that you'll miss out on if you null it after the first error. See the redis package docs, look for retry_strategy.
DO Wrap your redis client code with try .. catch ... or use .catch in your promise chain.
DO Make use of a retry_strategy.

Resources