Native mongodb connection for node.js with promises - node.js

I am running node.js express app on IIS with iisnode and i am having the following problem,
i got a file for the mongo repository
var MongoClient = require('mongodb').MongoClient;
var url = MY_URL;
var db = null;
MongoClient.connect(url, function(err, database) {
db = database
})
var repository = {};
repository.getAll = getAll;
module.exports = repository;
function getAll(collectionName)
{
return new Promise(
function(resolve, reject){
db.collection(collectionName).find().toArray(function(err, res) {
if (err) reject(err);
resolve(res);
});
});
}
Sometimes when i am trying to access the getAll function from a http get request i am getting an error
"Cannot read property 'collection' of null"
It happens because db is null.
But when i call it again it always returns the values.
Is it possible that the getAll function is being activated before the connection is being setup?
Or maybe the first call awakes the connection in some way?
It is hard for me to work on the problem since i cant reproduce it in a controlled manner. It usually happens after not using the app for a while ( it seems that sometimes restarting the IIS also triggers it ).

MongoClient.connect returns promise. So does getAll.
To ensure db always exists by the time you try to find something there you need to chain the promises. With few other bits fixed it could be:
const MongoClient = require('mongodb').MongoClient;
const url = MY_URL;
const dbConnected = MongoClient.connect(url); // it is a promise, db is not connected yet
function getAll(collectionName)
{
return dbConnected.then(db => // the connection promise resolved, we are good to use db
db.collection(collectionName).find().toArray() // it returns promise, no need to wrap it with another one
);
}
module.exports = {getAll};

Related

with node.js and mysql, How do you promisify pool.getConnection and use async/await while then doing a transaction

Ive found examples of how to promisify a query for connection = mysql.createConnection(config); and found examples on how to promisify pool.query for pool = mysql.createPool(config);, the issue is that all examples relating to pools either use a transaction without promisify async/await, or they use a pool.query which doesnt work with transactions since each use gets a new connection.
Basically, im expecting code something like this...
const mysql = require('mysql');
const util = require('util');
const pool = mysql.createPool(config);
const pool.getConnection = util.promisify(pool.getConnection).bind(pool);
const connection = pool.getConnection;
let sql = SOME SQL;
let sql2 = MORE SQL;
await connection.beginTransaction();
const results = await query(sql);
const results2 = await query(sql2);
await connection.commit();
await connection.release();
but the only working example i can seem to find is this...
pool.getConnection(function(err, connection)
{
if (err) throw err; // not connected!
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields)
{
// When done with the connection, release it.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
its like you are required to mix both formats. i normally figure this stuff out on my own but im at a complete loss here and cant find any docs regarding this either. At this point im assuming im just supposed to mix the formats or abandon this and find something that uses mysql that does what i want/expect.

Socket already opened issue when using Redis cache along with Node.js

I am facing, "Error: Socket already opened issue" when I am using Redis along with my node.js project.
I am trying to cache database results into Redis cache.. When Redis key is not empty, I will pick records from Redis Key. When its empty, I will pick from DB and set to Redis Key.
Here is my code:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
await client.connect();
const value = await client.get('indexitems');
if (value != null) {
await client.disconnect();
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
await client.disconnect();
resolve(PostsList);
});
});
}
})
}
I am facing "Error: Socket already opened issue" randomly. Some times it works without any issue. Some times it shows Error: Socket already opened.
Please help me to resolve this issue. Thanks.
Here is my complete error:
Error: Socket already opened
RedisSocket.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/socket.js:48:19)
Commander.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/index.js:156:70)
/home/ubuntu/Projects/Site/Web/routes/index.js:224:22
new Promise (<anonymous>)
GetPostItems (/home/ubuntu/Projects/Site/Web/routes/index.js:223:12)
/home/ubuntu/Projects/Site/Web/routes/index.js:23:29
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)
next (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:137:13)
Route.dispatch (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:112:3)
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)
The problem occurs as client.connect() is called, whereas the redis client is already connected.
Whenever client.get('indexitems') returns a value, then the connection is correctly closed by await client.disconnect();
However, if there is no value, then an asynchronous call to mySQL is made, and the disconnection is only made in the callback of that request.
As this mySQL call happens asynchronously, the function GetLatestPosts may be executed again before the redis connection is closed, and client.connect() called a second time, provoking the error.
Solution
The connection to the redis client might be opened only once when the server starts, and kept opened.
This reduce the overhead of opening a new one at each request, then closing it.
The adapted code might then look like below:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
async function start() {
await client.connect();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
const value = await client.get('indexitems');
if (value != null) {
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
resolve(PostsList);
});
});
}
})
}
}
start()
Extra remarks:
A graceful shutdown of the server might also be implemented, in order to close the connections to DB clients in a clean way.
Same goes for the mysql connection: it can be opened only once at server startup.
You might prefer to call client.quit() rather than client.disconnect(), in order to ensure that all commands get executed - as documented.
npm install ioredis works well for serverless applications rather than the native package.!
https://www.npmjs.com/package/ioredis

How and when should I connect to database with serverless?

I new to aws lambda and serverless.
Serverless provides me one file called handler.js.
How can I use the mongo database in this file?
In regular nodejs application I was connect to the mongo database then after the connection is good I was continue the application and launch the server. something like this:
(async() => {
await mongoose.connect(...);
const app = import('./app');
app.listen(....);
})();
But when I use serverless the application already launched. when to make the connection to db should be?
Create sperate mongoconnection.js to maintain mongo db connection and import it in handler.js
mongoconnection.js
"use strict";
// Import dependency.
const { MongoClient } = require('mongodb');
// Connection string to the database
const uri = process.env.MONGODB_URI;
// Validate that the database connection string has been configured.
if (!uri) {
throw new Error(
'The MONGODB_URI environment variable must be configured with the connection string ' +
'to the database.'
);
}
// Cached connection promise
let cachedPromise = null;
// Function for connecting to MongoDB, returning a new or cached database connection
module.exports.connectToDatabase = async function connectToDatabase() {
if (!cachedPromise) {
// If no connection promise is cached, create a new one. We cache the promise instead
// of the connection itself to prevent race conditions where connect is called more than
// once. The promise will resolve only once.
// Node.js driver docs can be found at http://mongodb.github.io/node-mongodb-native/.
cachedPromise =
MongoClient.connect(uri, { useNewUrlParser: true, useUnifiedTopology: true });
}
// await on the promise. This resolves only once.
const client = await cachedPromise;
return client;
}
handler.js
// Import dependency.
const { connectToDatabase } = require('./connect-to-mongodb');
// Handler
module.exports.handler = async function(event, context) {
// Get a MongoClient.
const client = await connectToDatabase();
// Use the connection to return the name of the connected database.
return client.db().databaseName;
}
Please refer below link for more details
https://docs.atlas.mongodb.com/best-practices-connecting-to-aws-lambda/

Access mongodb during meteor startup possible?

is there a way to access the mongodb during the Meteor.startup()
I need to insert/update a document in a collection during the Meteor.startup()
I tried:
// https://www.npmjs.com/package/mongodb
const MongoClient = require('mongodb').MongoClient;
// await may be missing but when i use await, i get a reserved keyword error
const mongodb = MongoClient.connect(process.env.MONGO_URL)
mongodb.collection('collection').insertOne(objectData)
// Error
// TypeError: mongodb.collection is not a function
// at Meteor.startup (server/migrations.js:1212:23)
and
const col = Mongo.Collection('collection');
// Error
// TypeError: this._maybeSetUpReplication is not a function
// at Object.Collection [as _CollectionConstructor] (packages/mongo/collection.js:109:8)
Anyone got a solution?
The reason you are having the error is not because you are accessing mongodb in the startup method, it's because the MongoClient.connect method is asynchronous, consequently, you can only access your mongodb collections after the connect method resolves. You should try something like this instead:
const MongoClient = require('mongodb').MongoClient;
MongoClient.connect(process.env.MONGO_URL, null, (err, client) => {
const db = client.db();
// You can now access your collections
const collection = db.collection('collection');
// NOTE: insertOne is also asynchronous
collection.insertOne(objectData, (err, result) => {
// Process the reult as you wish
// You should close the client when done with everything you need it for
client.close();
});
})
For more explanation on connecting via MongoClient, check here.

MongoClient throw MongoError: server instance pool was destroyed

I saw these posts on SO describing this error. Most of them was by the reason that JavaScript is async and mongoClient.close() called outside of callback. That's not my case, but I don't know what else can be the reason.
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
module.exports = class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
}
I observed that you open mongoClient.connect() in the insertOne() method, and also call mongoClient.close() within that method, with mongoClient as a global variable.
My hunch is that either:
There's another method that calls mongoClient that was closed by this method, or
You called the insertOne(article) twice
I can confirm that the second reason is the most likely one. Here's the code I tried:
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
};
x = new Mongo()
setTimeout(function() { x.insertOne({'a': 1}); }, 1000);
setTimeout(function() { x.insertOne({'a': 2}); }, 2000);
The two setTimeout was there to ensure that the two insertOne() are called one after another. Result:
MongoError: server instance pool was destroyed
The way your code is currently structured, the node driver creates a new connection pool every time insertOne() is called. This is not optimal, and prevents the node driver to utilize connection pooling.
Instead of calling mongoClient.connect() inside insertOne(), call it globally outside of class Mongo. Pass the global connection object (the returned object from mongoClient.connect()) instead of the mongoClient object itself to your insertOne() method.

Resources