MongoClient throw MongoError: server instance pool was destroyed - node.js

I saw these posts on SO describing this error. Most of them was by the reason that JavaScript is async and mongoClient.close() called outside of callback. That's not my case, but I don't know what else can be the reason.
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
module.exports = class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
}

I observed that you open mongoClient.connect() in the insertOne() method, and also call mongoClient.close() within that method, with mongoClient as a global variable.
My hunch is that either:
There's another method that calls mongoClient that was closed by this method, or
You called the insertOne(article) twice
I can confirm that the second reason is the most likely one. Here's the code I tried:
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
};
x = new Mongo()
setTimeout(function() { x.insertOne({'a': 1}); }, 1000);
setTimeout(function() { x.insertOne({'a': 2}); }, 2000);
The two setTimeout was there to ensure that the two insertOne() are called one after another. Result:
MongoError: server instance pool was destroyed
The way your code is currently structured, the node driver creates a new connection pool every time insertOne() is called. This is not optimal, and prevents the node driver to utilize connection pooling.
Instead of calling mongoClient.connect() inside insertOne(), call it globally outside of class Mongo. Pass the global connection object (the returned object from mongoClient.connect()) instead of the mongoClient object itself to your insertOne() method.

Related

Socket already opened issue when using Redis cache along with Node.js

I am facing, "Error: Socket already opened issue" when I am using Redis along with my node.js project.
I am trying to cache database results into Redis cache.. When Redis key is not empty, I will pick records from Redis Key. When its empty, I will pick from DB and set to Redis Key.
Here is my code:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
await client.connect();
const value = await client.get('indexitems');
if (value != null) {
await client.disconnect();
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
await client.disconnect();
resolve(PostsList);
});
});
}
})
}
I am facing "Error: Socket already opened issue" randomly. Some times it works without any issue. Some times it shows Error: Socket already opened.
Please help me to resolve this issue. Thanks.
Here is my complete error:
Error: Socket already opened
RedisSocket.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/socket.js:48:19)
Commander.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/index.js:156:70)
/home/ubuntu/Projects/Site/Web/routes/index.js:224:22
new Promise (<anonymous>)
GetPostItems (/home/ubuntu/Projects/Site/Web/routes/index.js:223:12)
/home/ubuntu/Projects/Site/Web/routes/index.js:23:29
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)
next (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:137:13)
Route.dispatch (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:112:3)
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)
The problem occurs as client.connect() is called, whereas the redis client is already connected.
Whenever client.get('indexitems') returns a value, then the connection is correctly closed by await client.disconnect();
However, if there is no value, then an asynchronous call to mySQL is made, and the disconnection is only made in the callback of that request.
As this mySQL call happens asynchronously, the function GetLatestPosts may be executed again before the redis connection is closed, and client.connect() called a second time, provoking the error.
Solution
The connection to the redis client might be opened only once when the server starts, and kept opened.
This reduce the overhead of opening a new one at each request, then closing it.
The adapted code might then look like below:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
async function start() {
await client.connect();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
const value = await client.get('indexitems');
if (value != null) {
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
resolve(PostsList);
});
});
}
})
}
}
start()
Extra remarks:
A graceful shutdown of the server might also be implemented, in order to close the connections to DB clients in a clean way.
Same goes for the mysql connection: it can be opened only once at server startup.
You might prefer to call client.quit() rather than client.disconnect(), in order to ensure that all commands get executed - as documented.
npm install ioredis works well for serverless applications rather than the native package.!
https://www.npmjs.com/package/ioredis

difference between call back function and normal function using node js

I implement callback function in node js. but I have doubt in callback function.I tried two function in node js one callback function and another normal function.both function i tried to run its given same result.I do no any one explain my code.
callback_function.js
const MongoClient = require('mongodb').MongoClient;
var ObjectId = require('mongodb').ObjectID
// Connection URL
var db =" "
MongoClient.connect('mongodb://localhost:27017', (err, client) => {
// Client returned
db = client.db('olc_prod_db');
gener(function(id)
{
db.collection('Ecommerce').find({ _id: new ObjectId(id) }, function(err,result)
{
console.log("hello")
})
})
function gener(callback)
{
db.collection('Ecommerce').find({}).toArray(function(err,result)
{
console.log("hai")
})
callback("5ccac2fd247af0218cfca5dd")
}
});
normal_function.js
const MongoClient = require('mongodb').MongoClient;
var ObjectId = require('mongodb').ObjectID
// Connection URL
var db =" "
MongoClient.connect('mongodb://localhost:27017', (err, client) => {
// Client returned
db = client.db('olc_prod_db');
gener()
function data()
{
console.log("hello")
}
function gener()
{
db.collection('Ecommerce').find({}).toArray(function(err,result)
{
console.log("hai")
})
data()
}
});
it showing both result hello and hai
If you are calling the same function the result is the same.
That's not a proper callback.
Callback is an asynchronous equivalent for a function. A callback
function is called at the completion of a given task. Node makes heavy
use of callbacks. All the APIs of Node are written in such a way that
they support callbacks.
In your case you are executing things synchronously.
You only call a function using it's pointer in a parameter of another function.
Example1
function gener(callback)
{
console.log("hai")
callback("5ccac2fd247af0218cfca5dd")
}
gener(function(id)
{
console.log("hello")
})
Example2
gener()
function data()
{
console.log("hello")
}
function gener()
{
console.log("hai")
data()
}

How to use async/await with mongoose

In node.js I had code like following:
mongoose.connect(dbURI, dbOptions)
.then(() => {
console.log("ok");
},
err => {
console.log('error: '+ err)
}
);
Now i want to do it with async/await syntax. So i could start with var mcResult = await mongoose.connect(dbURI, dbOptions);, afaik it will wait for operation, until it ends with any result (much like calling C function read() or fread() in syncronous mode).
But what should I write then? What does that return to the mcResult variable and how to check for an error or success? Basically I want a similar snippet, but written with proper async/await syntax.
Also I wonder because I have auto reconnect, among dbOptions:
dbOptions: {
autoReconnect: true,
reconnectTries: 999999999,
reconnectInterval: 3000
}
Would it "stuck" on await forever, in case if database connection is unavailble? I hope you can give me a clue on what would happen and how that would work.
Basically I want a similar snippet, but written with proper async/await syntax.
(async () => {
try {
await mongoose.connect(dbURI, dbOptions)
} catch (err) {
console.log('error: ' + err)
}
})()
Please try this, Below code has basics of db connectivity and a query :
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
let url = 'mongodb://localhost:27017/test';
const usersSchema = new Schema({
any: {}
}, {
strict: false
});
const Users = mongoose.model('users', usersSchema, 'users');
/** We've created schema as in mongoose you need schemas for your collections to do operations on them */
const dbConnect = async () => {
let db = null;
try {
/** In real-time you'll split DB connection(into another file) away from DB calls */
await mongoose.connect(url, { useNewUrlParser: true }); // await on a step makes process to wait until it's done/ err'd out.
db = mongoose.connection;
let dbResp = await Users.find({}).lean(); /** Gets all documents out of users collection.
Using .lean() to convert MongoDB documents to raw Js objects for accessing further. */
db.close(); // Needs to close connection, In general you don't close & re-create often. But needed for test scripts - You might use connection pooling in real-time.
return dbResp;
} catch (err) {
(db) && db.close(); /** Needs to close connection -
Only if mongoose.connect() is success & fails after it, as db connection is established by then. */
console.log('Error at dbConnect ::', err)
throw err;
}
}
dbConnect().then(res => console.log('Printing at callee ::', res)).catch(err => console.log('Err at Call ::', err));
As we're talking about async/await then few things I wanted to mention - await definitely needs it's function to be declared as async - otherwise it would throw an error. And it's recommended to wrap async/await code inside try/catch block.
const connectDb = async () => {
await mongoose.connect(dbUri, dbOptions).then(
() => {
console.info(`Connected to database`)
},
error => {
console.error(`Connection error: ${error.stack}`)
process.exit(1)
}
)
}
connectDb().catch(error => console.error(error))
Lets assume the use of then() is prohibited, you can result to this...
const connectDb = async () => {
try {
await mongoose.connect(dbConfig.url, dbConfigOptions)
console.info(`Connected to database on Worker process: ${process.pid}`)
} catch (error) {
console.error(`Connection error: ${error.stack} on Worker process: ${process.pid}`)
process.exit(1)
}
}

Why does it return undefined?

So I have two files, server.js and db.js
Now Here is the code that I am having issue with :
server.js :
var DB = require('./db')
app.get("/test", (req, res) => {
console.log(DB.getPostAll())
})
db.js :
MongoClient.connect(uri, { useNewUrlParser: true })
.then(function (db) {
console.log("Connected")
var dbo = db.db('test')
module.exports.getPostAll = function getPostAll() {
return (
dbo.collection('posts').find({}).toArray(function (err, res) {
if (err) throw err;
else return res
})
)
}
})
.catch(function (err) {
})
Sorry for the silly question. But can anyone say what am I doing wrong here ?
I am trying to use two returns. Before this I tried to use a variable in place of the returns in db.js's getPostAll. But it also returns undefined.
There are plenty of bad practices in the code, I'll try to cover some.
Your db.js file triggers an asynchronous action the moment it is required.
You are not returning the Promise from the db.js file, hence your server.js file starts executing code without knowing if the connection to the DB was fulfilled/pending or rejected
You are exporting a method after a function is executed, this leads to many odd and unexpected side effects. It is best to define all your exports at the top level of the file.
The simplest way to solve your issue is:
server.js :
const connectDB = require('./db')
connectDB().then((db) => {
app.get("/test", (req, res) => {
console.log(db.getPostAll())
})
app.listen(...); // lift the server ONLY when the db is connected
});
db.js :
module.exports = function connectDB() {
return MongoClient.connect(uri, { useNewUrlParser: true })
.then(function (db) {
console.log("Connected")
var dbo = db.db('test')
return {
getPostAll() {
return dbo.collection('posts').find({}).toArray()
}
}
})
}
In the db.js file, I am exporting a function that returns a promise, this way I can tell when the connection is complete (by having the promise resolved), it returns an object with all the db methods you need (getPostAll).
In the server.js file, I am waiting for the async connection to be established before I lift the app, this way I know i have my application in a ready state when it is served, and I have the db methods readily available for my app.
You are getting undefined as your promise has not resolved. Try using an async function and await your db operation to finish. See working with async funtions . you can also try mongoose object modeling as your mongodb client.

Native mongodb connection for node.js with promises

I am running node.js express app on IIS with iisnode and i am having the following problem,
i got a file for the mongo repository
var MongoClient = require('mongodb').MongoClient;
var url = MY_URL;
var db = null;
MongoClient.connect(url, function(err, database) {
db = database
})
var repository = {};
repository.getAll = getAll;
module.exports = repository;
function getAll(collectionName)
{
return new Promise(
function(resolve, reject){
db.collection(collectionName).find().toArray(function(err, res) {
if (err) reject(err);
resolve(res);
});
});
}
Sometimes when i am trying to access the getAll function from a http get request i am getting an error
"Cannot read property 'collection' of null"
It happens because db is null.
But when i call it again it always returns the values.
Is it possible that the getAll function is being activated before the connection is being setup?
Or maybe the first call awakes the connection in some way?
It is hard for me to work on the problem since i cant reproduce it in a controlled manner. It usually happens after not using the app for a while ( it seems that sometimes restarting the IIS also triggers it ).
MongoClient.connect returns promise. So does getAll.
To ensure db always exists by the time you try to find something there you need to chain the promises. With few other bits fixed it could be:
const MongoClient = require('mongodb').MongoClient;
const url = MY_URL;
const dbConnected = MongoClient.connect(url); // it is a promise, db is not connected yet
function getAll(collectionName)
{
return dbConnected.then(db => // the connection promise resolved, we are good to use db
db.collection(collectionName).find().toArray() // it returns promise, no need to wrap it with another one
);
}
module.exports = {getAll};

Resources