Socket already opened issue when using Redis cache along with Node.js - node.js

I am facing, "Error: Socket already opened issue" when I am using Redis along with my node.js project.
I am trying to cache database results into Redis cache.. When Redis key is not empty, I will pick records from Redis Key. When its empty, I will pick from DB and set to Redis Key.
Here is my code:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
await client.connect();
const value = await client.get('indexitems');
if (value != null) {
await client.disconnect();
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
await client.disconnect();
resolve(PostsList);
});
});
}
})
}
I am facing "Error: Socket already opened issue" randomly. Some times it works without any issue. Some times it shows Error: Socket already opened.
Please help me to resolve this issue. Thanks.
Here is my complete error:
Error: Socket already opened
RedisSocket.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/socket.js:48:19)
Commander.connect (/home/ubuntu/Projects/Site/Web/node_modules/#node-redis/client/dist/lib/client/index.js:156:70)
/home/ubuntu/Projects/Site/Web/routes/index.js:224:22
new Promise (<anonymous>)
GetPostItems (/home/ubuntu/Projects/Site/Web/routes/index.js:223:12)
/home/ubuntu/Projects/Site/Web/routes/index.js:23:29
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)
next (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:137:13)
Route.dispatch (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/route.js:112:3)
Layer.handle [as handle_request] (/home/ubuntu/Projects/Site/Web/node_modules/express/lib/router/layer.js:95:5)

The problem occurs as client.connect() is called, whereas the redis client is already connected.
Whenever client.get('indexitems') returns a value, then the connection is correctly closed by await client.disconnect();
However, if there is no value, then an asynchronous call to mySQL is made, and the disconnection is only made in the callback of that request.
As this mySQL call happens asynchronously, the function GetLatestPosts may be executed again before the redis connection is closed, and client.connect() called a second time, provoking the error.
Solution
The connection to the redis client might be opened only once when the server starts, and kept opened.
This reduce the overhead of opening a new one at each request, then closing it.
The adapted code might then look like below:
const { response } = require('express');
var express = require('express');
var mysql = require('mysql');
const redis = require('redis');
const client = redis.createClient();
async function start() {
await client.connect();
function GetLatestPosts() {
return new Promise(async function(resolve, reject) {
const value = await client.get('indexitems');
if (value != null) {
resolve(JSON.parse(value));
}
else {
var PostsList;
mysqldb.getConnection(function (err, connection) {
var sql = "CALL PRC_GetPostsList()";
connection.query(sql, async function (err, data, fields) {
if (err) throw err;
PostsList = data[0];
await client.set('indexitems', JSON.stringify(PostsList));
await client.expire('indexitems', 86400);
resolve(PostsList);
});
});
}
})
}
}
start()
Extra remarks:
A graceful shutdown of the server might also be implemented, in order to close the connections to DB clients in a clean way.
Same goes for the mysql connection: it can be opened only once at server startup.
You might prefer to call client.quit() rather than client.disconnect(), in order to ensure that all commands get executed - as documented.

npm install ioredis works well for serverless applications rather than the native package.!
https://www.npmjs.com/package/ioredis

Related

with node.js and mysql, How do you promisify pool.getConnection and use async/await while then doing a transaction

Ive found examples of how to promisify a query for connection = mysql.createConnection(config); and found examples on how to promisify pool.query for pool = mysql.createPool(config);, the issue is that all examples relating to pools either use a transaction without promisify async/await, or they use a pool.query which doesnt work with transactions since each use gets a new connection.
Basically, im expecting code something like this...
const mysql = require('mysql');
const util = require('util');
const pool = mysql.createPool(config);
const pool.getConnection = util.promisify(pool.getConnection).bind(pool);
const connection = pool.getConnection;
let sql = SOME SQL;
let sql2 = MORE SQL;
await connection.beginTransaction();
const results = await query(sql);
const results2 = await query(sql2);
await connection.commit();
await connection.release();
but the only working example i can seem to find is this...
pool.getConnection(function(err, connection)
{
if (err) throw err; // not connected!
// Use the connection
connection.query('SELECT something FROM sometable', function (error, results, fields)
{
// When done with the connection, release it.
connection.release();
// Handle error after the release.
if (error) throw error;
// Don't use the connection here, it has been returned to the pool.
});
});
its like you are required to mix both formats. i normally figure this stuff out on my own but im at a complete loss here and cant find any docs regarding this either. At this point im assuming im just supposed to mix the formats or abandon this and find something that uses mysql that does what i want/expect.

How and when should I connect to database with serverless?

I new to aws lambda and serverless.
Serverless provides me one file called handler.js.
How can I use the mongo database in this file?
In regular nodejs application I was connect to the mongo database then after the connection is good I was continue the application and launch the server. something like this:
(async() => {
await mongoose.connect(...);
const app = import('./app');
app.listen(....);
})();
But when I use serverless the application already launched. when to make the connection to db should be?
Create sperate mongoconnection.js to maintain mongo db connection and import it in handler.js
mongoconnection.js
"use strict";
// Import dependency.
const { MongoClient } = require('mongodb');
// Connection string to the database
const uri = process.env.MONGODB_URI;
// Validate that the database connection string has been configured.
if (!uri) {
throw new Error(
'The MONGODB_URI environment variable must be configured with the connection string ' +
'to the database.'
);
}
// Cached connection promise
let cachedPromise = null;
// Function for connecting to MongoDB, returning a new or cached database connection
module.exports.connectToDatabase = async function connectToDatabase() {
if (!cachedPromise) {
// If no connection promise is cached, create a new one. We cache the promise instead
// of the connection itself to prevent race conditions where connect is called more than
// once. The promise will resolve only once.
// Node.js driver docs can be found at http://mongodb.github.io/node-mongodb-native/.
cachedPromise =
MongoClient.connect(uri, { useNewUrlParser: true, useUnifiedTopology: true });
}
// await on the promise. This resolves only once.
const client = await cachedPromise;
return client;
}
handler.js
// Import dependency.
const { connectToDatabase } = require('./connect-to-mongodb');
// Handler
module.exports.handler = async function(event, context) {
// Get a MongoClient.
const client = await connectToDatabase();
// Use the connection to return the name of the connected database.
return client.db().databaseName;
}
Please refer below link for more details
https://docs.atlas.mongodb.com/best-practices-connecting-to-aws-lambda/

Not sure why node is continuously running

I can't figure out why this app keeps running. I've tried using the why-is-node-running package but I'm not perfectly sure how to read the output properly. Here's the first output of it:
There are 30 handle(s) keeping the process running
# TCPWRAP
/node_modules/mongodb/lib/core/connection/connect.js:269 - socket = tls.connect(parseSslOptions(family, options));
/node_modules/mongodb/lib/core/connection/connect.js:29 - makeConnection(family, options, cancellationToken, (err, socket) => {
/node_modules/mongodb/lib/core/sdam/monitor.js:182 - connect(monitor.connectOptions, monitor[kCancellationToken], (err, conn) => {
/node_modules/mongodb/lib/core/sdam/monitor.js:206 - checkServer(monitor, e0 => {
/node_modules/mongodb/lib/core/sdam/monitor.js:92 - monitorServer(this);
My guess is it has something to do with MongoDB not closing properly. Although, when I removed all of the other functions between opening the client and closing it, it opened and closed perfectly.
Adding process.exit() at the end closes program properly, but I'd like to figure out why it isn't closing.
A summary of the app is that it is getting data from MongoDB, cleaning it, and then writing it into Firestore - so a lot of async actions going on, but I didn't see Firestore-related stuff pop up in the why-is-node-running logs.
const GrabStuffFromDBToCalculate = require("./helpers/GrabStuffFromDBToCalculate");
const SendToFirestore = require("./helpers/SendToFirestore");
const log = require("why-is-node-running");
const { MongoClient } = require("mongodb");
require("dotenv").config();
const main = async () => {
try {
const client = await MongoClient.connect(process.env.MONGODB_URI, {
useNewUrlParser: true,
useUnifiedTopology: true
});
const collection = await client.db("test").collection("testcollection");
const trip_object = await GrabStuffFromDBToCalculate(collection);
SendToFirestore(trip_object);
client.close();
log(); // "There are 30 handle(s) keeping the process running including node_modules/mongodb/lib/core/connection/connect.js:269 - socket = tls.connect(parseSslOptions(family, options));"
// process.exit() // this closes everything but I'd rather not have to use this
} catch (err) {
console.log(err);
client.close();
}
};
const runAsync = async () => {
await main(); // this exists because I'm usually running multiple main() functions
};
runAsync();
SendToFirestore code:
const firebase = require("firebase");
const firebaseConfig = require("../config");
module.exports = SendToFirestore = trip_object => {
if (!firebase.apps.length) {
firebase.initializeApp(firebaseConfig);
}
const db = firebase.firestore();
db.doc(`hello/${object._id}`).set({
objectid:object._id
});
};
GrabStuffFromDBToCalculate code (way simplified):
module.exports = GrabStuffFromDBToCalculate = async collection => {
const cursor = await collection
.aggregate([
// does a bunch of stuff here
])
.toArray();
const newObj = cursor[0];
return newObj;
};
Making my comment into an answer since it led to the missing piece.
Node does not shut down because you have an open Firestore connection. You will have to call terminate to allow the SDK to shut down and release resources:
db.terminate();
Which is relevant for allowing nodejs to shut itself down automatically.
Also, I'm not sure you understood that I was suggesting that you use await as in
await client.close()
before calling log() so you are sure that the client connection has been closed before you do the logging. client.close() is an asynchronous method so your original code would log() before that close was complete.

MongoClient throw MongoError: server instance pool was destroyed

I saw these posts on SO describing this error. Most of them was by the reason that JavaScript is async and mongoClient.close() called outside of callback. That's not my case, but I don't know what else can be the reason.
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
module.exports = class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
}
I observed that you open mongoClient.connect() in the insertOne() method, and also call mongoClient.close() within that method, with mongoClient as a global variable.
My hunch is that either:
There's another method that calls mongoClient that was closed by this method, or
You called the insertOne(article) twice
I can confirm that the second reason is the most likely one. Here's the code I tried:
const MongoClient = require('mongodb').MongoClient;
const url = "mongodb://localhost:27017/";
const mongoClient = new MongoClient(url, {
useNewUrlParser: true
});
class Mongo {
insertOne(article) {
mongoClient.connect((err, client) => {
const db = client.db('grabber');
db.collection("zr").insertOne(article, (err, res) => {
if (err) throw err;
mongoClient.close();
});
});
};
};
x = new Mongo()
setTimeout(function() { x.insertOne({'a': 1}); }, 1000);
setTimeout(function() { x.insertOne({'a': 2}); }, 2000);
The two setTimeout was there to ensure that the two insertOne() are called one after another. Result:
MongoError: server instance pool was destroyed
The way your code is currently structured, the node driver creates a new connection pool every time insertOne() is called. This is not optimal, and prevents the node driver to utilize connection pooling.
Instead of calling mongoClient.connect() inside insertOne(), call it globally outside of class Mongo. Pass the global connection object (the returned object from mongoClient.connect()) instead of the mongoClient object itself to your insertOne() method.

Native mongodb connection for node.js with promises

I am running node.js express app on IIS with iisnode and i am having the following problem,
i got a file for the mongo repository
var MongoClient = require('mongodb').MongoClient;
var url = MY_URL;
var db = null;
MongoClient.connect(url, function(err, database) {
db = database
})
var repository = {};
repository.getAll = getAll;
module.exports = repository;
function getAll(collectionName)
{
return new Promise(
function(resolve, reject){
db.collection(collectionName).find().toArray(function(err, res) {
if (err) reject(err);
resolve(res);
});
});
}
Sometimes when i am trying to access the getAll function from a http get request i am getting an error
"Cannot read property 'collection' of null"
It happens because db is null.
But when i call it again it always returns the values.
Is it possible that the getAll function is being activated before the connection is being setup?
Or maybe the first call awakes the connection in some way?
It is hard for me to work on the problem since i cant reproduce it in a controlled manner. It usually happens after not using the app for a while ( it seems that sometimes restarting the IIS also triggers it ).
MongoClient.connect returns promise. So does getAll.
To ensure db always exists by the time you try to find something there you need to chain the promises. With few other bits fixed it could be:
const MongoClient = require('mongodb').MongoClient;
const url = MY_URL;
const dbConnected = MongoClient.connect(url); // it is a promise, db is not connected yet
function getAll(collectionName)
{
return dbConnected.then(db => // the connection promise resolved, we are good to use db
db.collection(collectionName).find().toArray() // it returns promise, no need to wrap it with another one
);
}
module.exports = {getAll};

Resources