Node Postgres - release clients in a pool before closing pool - node.js

I am trying to gracefully stop my postgres db on process.on('SIGINT', handler) in my main index.js.
With Node Postgres, I am using a pool with the 4 clients. After reading the docs titled shut it down for closing a pool, I need to first release the clients.
From the exported pool itself(module.exports.pool = pool;)
, how do I access the clients in a pool to do this? This is how I initiate my postgres db, create the pool, and export it to other modules.
const colors = require('colors');
const pg = require('pg');
if (process.env.DOLPHIN_TEST) {
var dbName = 'test_dolphin_db';
var maxCon = 1
} else {
var dbName = 'dev_dolphin_db';
var maxCon = 4
}
const config = {
user: 'postgres',
database: dbName,
host: 'localhost',
max: maxCon,
idleTimeoutMillis: 30000
}
const pool = new pg.Pool(config);
function poolConnected(err, client, done, resolve) {
if(err) {
return promiseArgs.reject(`error fetching client from pool ${err}`);
}
client.query('SELECT $1::int AS number', ['1'], function(err, result) {
//call `done()` to release the client back to the pool
done();
if(err) {
return console.error('error running query', err);
}
console.log(colors.grey(
`Postgres is online using ${client.database} as user ${client.user}\n` +
`and is listening on ${client.host}`));
promiseArgs.resolve();
});
}
var promiseArgs = { resolve: undefined, reject: undefined };
function initDb() {
var promise = new Promise(function(resolve, reject) {
promiseArgs.resolve = resolve;
promiseArgs.reject = reject;
pool.connect(poolConnected);
pool.on('error', function (err, client) {
// if an error is encountered by a client while it sits idle in the pool
// the pool itself will emit an error event with both the error and
// the client which emitted the original error
// this is a rare occurrence but can happen if there is a network partition
// between your application and the database, the database restarts, etc.
// and so you might want to handle it and at least log it out
console.error('idle client error', err.message, err.stack)
});
});
return promise;
}
module.exports.pool = pool;
module.exports.initDb = initDb;

Related

Sequelize and Tedious Breaking Change on version 10.0.0 in NodeJS API

I'm upgrading npm modules sequelize and tedious to the latest versions (6.6.2 and 11.0.7 respectively). I can get Sequelize upgraded and working with tedious up to version 9.2.3 but the next version is 10 and there is a breaking change that I can't seem to find much documentation on (see link)
tedious breaking change
Where it seems to be breaking is when I am creating my connection pools. This line "_sqlConnection[p] = new Sequelize(sqlConfig[p]);"
const initSqlConnectionPool = async () => {
try
{
// loop through each defined connection in the sql config file
for (var p in sqlConfig)
{
if (Object.prototype.hasOwnProperty.call(sqlConfig,p))
{
// setup SQL server connection
try {
// Believe issue is here because after this statement _sqlConnnection is empty
_sqlConnection[p] = new Sequelize(sqlConfig[p]);
} catch (err) {
debug('Pooling Error', err);
}
// test connection and output status
await _sqlConnection[p]
.authenticate()
.then(() => {
debug(`Opened SQL connection pool to ${p}`);
})
.catch(err => {
debug(`Unable to form SQL Connection on ${p}`, err);
});
}
}
// attach to node.js termination events and kill connection pool
process.once('SIGTERM',await closeSqlConnectionPool).once('SIGINT',await closeSqlConnectionPool);
}
catch (err)
{
console.error(err.message);
}
};
The sqlConfig is imported and it looks like so...
module.exports = {
Pool1: {
host: process.env.SERVER,
database: process.env.DB,
username: process.env.USERNAME,
password: process.env.PASSWORD,
dialect: 'mssql', // the sql dialect of the database
logging: false, // disable logging or provide a custom logging function; default: console.log
options: {
enableArithAbort: true,
encrypt: true
},
pool: {
max: 10, // Maximum number of connection in pool
idle: 10000, // The maximum time, in milliseconds, that a connection can be idle before being released.
acquire: 30000 // The maximum time, in milliseconds, that pool will try to get connection before throwing error
}
}
};
The error I am getting is TypeError: connection.query is not a function when calling the an execute function to run a statement. I've confirmed that the _sqlConnection object is empty, which is what I'm guessing is the problem. Here is the code for the execute function
const execSQL = async (sql,options,host) => {
let result;
let connection;
let errMessage;
try
{
// get an SQL server connection
connection = await getSqlConnection(host);
result = await connection.query(sql,options);
}
catch (error)
{
console.error(error);
debug(error);
errMessage = await utils.removeSpecialChars(error.toString());
result = [{ status: constants.HTTP_INTERNAL_SERVER_ERROR, message: errMessage }];
}
finally
{
// debug('result', result);
return new Promise((resolve,reject) => { resolve(result); });
}
}
Here is the getSqlConnection function logic being called also, which is where the _sqlConnection is supposed to be populated
const getSqlConnection = async (host) => {
// check if host parameter was provided
if (host === undefined)
{
// default the host to first configuration in connection object if not provided
host = Object.keys(_sqlConnection)[0];
}
return new Promise((resolve,reject) => {
// check if the sql connection is active
if (_sqlConnection[host]) {
debug(`Connection opened to ${host}`);
resolve(_sqlConnection[host]);
}
else {
resolve({ Error: constants.CONNECTION_OPEN_ERROR + ' (SQL)' });
}
});
};

MongoError: Topology is closed, please connect despite established database connection

I am writing a web application that uses asynchronous database requests as a part of the api. Currently, I have an async express route that awaits function returns from async functions. Both of these functions return booleans and both query the database. One works correctly, however the second one does not.
Here is the MongoClient setup:
const MongoClient = require('mongodb').MongoClient;
const uri = config.uri; // Contains custom url for accessing database
const client = new MongoClient(uri, { useUnifiedTopology: true}, { useNewUrlParser: true }, { connectTimeoutMS: 30000 }, { keepAlive: 1});
where config is from a file imported as.
const config = require("./config.js");
and functions properly.
Here is the express setup:
app.post("/signup", async function(request, response) {
log("POST request at /signup");
log("BEFORE UNIQUE USER");
const isUniqueUser = await validateUniqueUser(request.body.email, request.body.password);
log(isUniqueUser);
const status = {
status: null
};
if (isUniqueUser) {
log("AFTER UNIQUE USER");
let userCreated = await createPracticeProfile(request.body.email, request.body.password);
log("user created: " + userCreated);
if (userCreated) {
status.status = "user_created";
}
response.json(status);
} else {
response.json(status);
}
console.log("********************************end");
});
The console outputs:
BEFORE UNIQUE USER
true (which it should be)
AFTER UNIQUE USER
MongoError: Topology is closed.
user created: undefined
***...***end
Here is the function for validating that a user is unique:
/* VALIDATE_UNIQUE_USER
USE: ensure user does not have existing profile
PARAMS: email (string), password (string)
RETURN: isUniqueUser (bool)
*/
async function validateUniqueUser(email, password) {
// connect to database
const database = await client.connect().catch(err => {
log("ERROR while connecting to database at: validateUniqueUser");
console.log(err);
client.close();
});
// database connection failed
if (!database) {
return false;
}
// connection successful => find user
let user;
try {
user = await database.db("guitar-practice-suite").collection("users").findOne({email: email});
} catch(err) {
log("ERROR while finding user in database at: validateUniqueUser");
console.log(err);
client.close();
return false;
} finally {
client.close();
// user not found (unique)
if (user === null || user === undefined) {
return true;
}
return false;
}
}
Here is the function for inserting the user into the collections:
/* CREATE_PRACTICE_PROFILE
USE: insert a practice profile into the database
PARAMS: email (string), password (string)
RETURN: userCreated (bool)
*/
async function createPracticeProfile(email, password) {
// hash password
let hashedPassword;
try {
hashedPassword = await new Promise((resolve, reject) => {
bcrypt.hash(password, null, null, function(err, hash) {
if (err) {
reject(err);
}
resolve(hash)
});
});
} catch(err) {
log("ERROR while hashing password at: createPracticeProfile");
console.log(err);
return false;
}
// connect to database
const database = await client.connect().catch(err => {
log("ERROR while connecting to database at: validateUniqueUser");
console.log(err);
client.close();
});
// database connection failed
if (!database) {
return false;
}
// database connection successful => insert user into database
let insertUserToUsers;
let insertUserToExercises;
let insertUserToCustomExercises;
try {
insertUserToUsers = await database.db("guitar-practice-suite").collection("users").insertOne({email: email, password: hashedPassword});
insertUserToExercises = await database.db("guitar-practice-suite").collection("exercises").insertOne({email: email});
insertUserToCustomExercises = await database.db("guitar-practice-suite").collection("custom-exercises").insertOne({email: email, exercises: []});
} catch(err) {
log("ERROR while inserting user into database at: createPracticeProfile");
console.log(err);
client.close();
return false;
} finally {
client.close();
return insertUserToUsers && insertUserToExercises && insertUserToCustomExercises;
}
}
I've found the solution to the problem, but I'm not sure I understand the reasoning.
The client.close() in the finally block of the validateUniqueUser function. It was closing the connection before the connection in the createPracticeProfile function was finished inserting the user.
When that line is taken out, the function works.
The issue is client variable needs to be reinstantiated again,
const client = new MongoClient(uri, { useUnifiedTopology: true}, { useNewUrlParser: true }, { connectTimeoutMS: 30000 }, { keepAlive: 1});
Try putting this in start of createPracticeProfile, validateUniqueUser and other functions
I was getting the error
MongoError: Topology is closed
because of the authentication problem
MongoEror: Authentication failed
In my case, the problem was with the password of my database. My password only contained numerical digits.
I changed the password to all characters and both the errors were solved.
Configure your client connection like below example
var MongoClient = require('mongodb').MongoClient;
var Server = require('mongodb').Server;
var mongoClient = new MongoClient(new Server('localhost', 27017));
mongoClient.open(function(err, mongoClient) {
var db1 = mongoClient.db("mydb");
mongoClient.close();
});
In my case - connecting to AtlasDB using the MongoClient - I had to whitelist the IP i was accessing the cluster from
I think your mongodb service is stopped, to start it
Task Manager -> Services -> Mongodb -> RightClick -> Start
My code has been working fine for a long time and hasn't thrown this error before: MongoError: Topology is closed.
But due to the fact that my laptop was turned on for a long time and I was simultaneously developing other projects on it, while the main one was running in the terminal, mongo most likely did not close one of the connections to the database and opened another in parallel, creating some kind of collision.
In general, in my case, the usual restart of the computer helped and a similar error did not occur again.

How to define default settings for each connection in a connection pool with Knex.js

I need to set for each connection in a connection pool default session configurations like TIME_ZONE, LNS_DATE_LANGUAGE and others for Oracle database. Knex.js documentation provide a snippet demonstrated below but for PostgreSQL:
var knex = require('knex')({
client: 'pg',
connection: {hos: ''},
pool: {
afterCreate: function (conn, done) {
// in this example we use pg driver's connection API
conn.query('SET timezone="UTC";', function (err) {
if (err) {
// first query failed, return error and don't try to make next query
done(err, conn);
} else {
// do the second query...
conn.query('SELECT set_limit(0.01);', function (err) {
// if err is not falsy, connection is discarded from pool
// if connection aquire was triggered by a query the error is passed to query promise
done(err, conn);
});
}
});
}
}
});
The oracledb way of doing that is defining a callback for the sessionCallback attribute:
async function init() {
try {
await oracledb.createPool({
user: 'USER',
password: 'PWD',
connectString: 'HOST:1521/SERVICE',
sessionCallback: initSession //HERE MY DEFAULT SETTINGS
});
} catch (e) {
console.error(e);
}
}
async function initSession(connection, requestedTag, cb) {
try {
await connection.execute(`
ALTER SESSION SET
TIME_ZONE='UTC'
NLS_LANGUAGE = 'BRAZILIAN PORTUGUESE'
NLS_TERRITORY = 'BRAZIL'
NLS_CURRENCY = 'R$'
NLS_ISO_CURRENCY = 'BRAZIL'
NLS_NUMERIC_CHARACTERS = ',.'
NLS_CALENDAR = 'GREGORIAN'
NLS_DATE_FORMAT = 'DD/MM/YYYY'
NLS_DATE_LANGUAGE = 'BRAZILIAN PORTUGUESE'
NLS_SORT = 'WEST_EUROPEAN'
NLS_TIMESTAMP_FORMAT = 'DD/MM/YYYY HH24:MI:SS'
NLS_DUAL_CURRENCY = 'R$'
`);
} catch (e) {
console.error(e);
cb(e);
} finally {
if (connection) {
await connection.close();
}
}
}
I would like to be able of translating this snippet above for a Knex.js implementation. How to do that?
Does your KNEX use Oracle DB's connection pool?? https://github.com/tgriesser/knex/issues/2665 show it as a future project.
Try the pg snippet you posted, buyt using the ALTER SESSION from your other example.

pg.query not calling callback function

I'm using pg for node to connect to my Postgres server.
Here is what I'm doing:
const Pool = require('pg').Pool;
var config = {
user: "mouser",
password: "my password",
host: "somethingsomething.rds.amazonaws.com",
port: 5432,
database: "mydb",
ssl: true
}
const db = new Pool(config);
console.log("hello world")
let queryString = `CREATE TABLE IF NOT EXISTS favoritememes (id serial PRIMARY KEY, image_url varchar(255), date_favorited TIMESTAMP DEFAULT CURRENT_TIMESTAMP);`
db.query(queryString, function(err, result) {
console.log("Created table");
if (err) console.log("error", err);
});
Currently, I see "hello world" printed to my console, but it never prints "created table" and I also never see an error.
I know that my credentials are correct because I can connect to the server with SQLWorkbench with those same credentials.
I think the issue is that you're not waiting for the pool to finish being created.
Additionally, I suggest using:
Promises for your db functions, it will make you feel better about javascript ;D.
I suggest you use parameterized query functions. They help keep the
code clean and also prevent you needing to deal with variable types
/ heavy escaping.
(BTW, I would put this in another file and export the parameterizedPromise via exports.parameterizedPromise = parameterizedPromise.
Here is a sample of how to use a parameterized query promise:
/*
parameterizedPromise('SELECT * FROM foodtable f WHERE f.flavor = $1 AND f.meal = $2;', ['SPICY', 'BREAKFAST'])
.then(function(result){
console.log(result.rows);
})
.catch(function(error){
console.warn(error);
});
*/
Here is a redacted version of what I had used a while back.
const pg = require('pg');
const DBConfig = {
host: 'xxxxxx',
port: 'xxxxxx',
user: 'xxxxxx',
password: 'xxxxxx',
database: 'xxxxxx',
max: 10, // max number of clients in the pool
ssl: true,
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
};
const postgresPool = new pg.Pool(DBConfig, function(){
console.log('connected to postgres database');
// Need to wait for the pool to finish connecting before you can fire off your query.
paramaterizedPromise(`CREATE TABLE IF NOT EXISTS favoritememes (id serial PRIMARY KEY, image_url varchar(255), date_favorited TIMESTAMP DEFAULT CURRENT_TIMESTAMP);`, [])
.then(function(result){
console.log(result);
})
.catch(function(error){
console.warn(error);
});
});
postgresPool.on('error', function (err, client) {
// if an error is encountered by a client while it sits idle in the pool
// the pool itself will emit an error event with both the error and
// the client which emitted the original error
// this is a rare occurrence but can happen if there is a network partition
// between your application and the database, the database restarts, etc.
// and so you might want to handle it and at least log it out
console.error('idle client error', err.message, err.stack);
});
const paramaterizedPromise = function(query, params) {
return new Promise( function(resolve, reject){
postgresPool.connect(function(err, client, done) {
if(err) {
reject('Error Fetching Client From Pool', err);
}
client.query(query, params, function(err, result) {
//call `done()` to release the client back to the pool
done(err);
if (err){
reject(err);
} else {
resolve(result);
}
});
});
});
}

Cannot enqueue Handshake after invoking quit

I have implemented the following code:
module.exports = {
getDataFromUserGps: function(callback)
{
connection.connect();
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
loginUser: function(login, pass, callback)
{
connection.connect();
connection.query(
"SELECT id FROM users WHERE login = ? AND pass = ?",
[login, pass],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
getUserDetails: function(userid, callback)
{
connection.connect();
connection.query(
"SELECT * FROM userProfilDetails LEFT JOIN tags ON userProfilDetails.userId = tags.userId WHERE userProfilDetails.userid = ?",
[userid],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
addTags: function(userId, tags)
{
connection.connect();
connection.query(
"INSERT INTO tag (userId, tag) VALUES (?, ?)",
[userId, tags],
function(err, results, fields)
{
if (err) throw err;
}
)
connection.end();
}
}
Everything works great only for the first time. If I want to "use" the query for the second time I get the following error:
Cannot enqueue Handshake after invoking quit
I have tried not to .end() connections but it didn't help.
How can I fix this issue?
If you using the node-mysql module, just remove the .connect and .end. Just solved the problem myself. Apparently they pushed in unnecessary code in their last iteration that is also bugged. You don't need to connect if you have already ran the createConnection call
According to:
Fixing Node Mysql "Error: Cannot enqueue Handshake after invoking quit.":
http://codetheory.in/fixing-node-mysql-error-cannot-enqueue-handshake-after-invoking-quit/
TL;DR You need to establish a new connection by calling the createConnection method after every disconnection.
and
Note: If you're serving web requests, then you shouldn't be ending connections on every request. Just create a connection on server
startup and use the connection/client object to query all the time.
You can listen on the error event to handle server disconnection and
for reconnecting purposes. Full code
here.
From:
Readme.md - Server disconnects:
https://github.com/felixge/node-mysql#server-disconnects
It says:
Server disconnects
You may lose the connection to a MySQL server due to network problems,
the server timing you out, or the server crashing. All of these events
are considered fatal errors, and will have the err.code =
'PROTOCOL_CONNECTION_LOST'. See the Error
Handling section for more information.
The best way to handle such unexpected disconnects is shown below:
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
connection = mysql.createConnection(connection.config);
handleDisconnect(connection);
connection.connect();
});
}
handleDisconnect(connection);
As you can see in the example above, re-connecting a connection is
done by establishing a new connection. Once terminated, an existing
connection object cannot be re-connected by design.
With Pool, disconnected connections will be removed from the pool
freeing up space for a new connection to be created on the next
getConnection call.
I have tweaked the function such that every time a connection is needed, an initializer function adds the handlers automatically:
function initializeConnection(config) {
function addDisconnectHandler(connection) {
connection.on("error", function (error) {
if (error instanceof Error) {
if (error.code === "PROTOCOL_CONNECTION_LOST") {
console.error(error.stack);
console.log("Lost connection. Reconnecting...");
initializeConnection(connection.config);
} else if (error.fatal) {
throw error;
}
}
});
}
var connection = mysql.createConnection(config);
// Add handlers.
addDisconnectHandler(connection);
connection.connect();
return connection;
}
Initializing a connection:
var connection = initializeConnection({
host: "localhost",
user: "user",
password: "password"
});
Minor suggestion: This may not apply to everyone but I did run into a minor issue relating to scope. If the OP feels this edit was unnecessary then he/she can choose to remove it. For me, I had to change a line in initializeConnection, which was var connection = mysql.createConnection(config); to simply just
connection = mysql.createConnection(config);
The reason being that if connection is a global variable in your program, then the issue before was that you were making a new connection variable when handling an error signal. But in my nodejs code, I kept using the same global connection variable to run queries on, so the new connection would be lost in the local scope of the initalizeConnection method. But in the modification, it ensures that the global connection variable is reset This may be relevant if you're experiencing an issue known as
Cannot enqueue Query after fatal error
after trying to perform a query after losing connection and then successfully reconnecting. This may have been a typo by the OP, but I just wanted to clarify.
I had the same problem and Google led me here. I agree with #Ata that it's not right to just remove end(). After further Googling, I think using pooling is a better way.
node-mysql doc about pooling
It's like this:
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
connection.query( 'bla bla', function(err, rows) {
connection.release();
});
});
Do not connect() and end() inside the function. This will cause problems on repeated calls to the function. Make the connection only
var connection = mysql.createConnection({
host: 'localhost',
user: 'node',
password: 'node',
database: 'node_project'
})
connection.connect(function(err) {
if (err) throw err
});
once and reuse that connection.
Inside the function
function insertData(name,id) {
connection.query('INSERT INTO members (name, id) VALUES (?, ?)', [name,id], function(err,result) {
if(err) throw err
});
}
AWS Lambda functions
Use mysql.createPool() with connection.destroy()
This way, new invocations use the established pool, but don't keep the function running. Even though you don't get the full benefit of pooling (each new connection uses a new connection instead of an existing one), it makes it so that a second invocation can establish a new connection without the previous one having to be closed first.
Regarding connection.end()
This can cause a subsequent invocation to throw an error. The invocation will still retry later and work, but with a delay.
Regarding mysql.createPool() with connection.release()
The Lambda function will keep running until the scheduled timeout, as there is still an open connection.
Code example
const mysql = require('mysql');
const pool = mysql.createPool({
connectionLimit: 100,
host: process.env.DATABASE_HOST,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
});
exports.handler = (event) => {
pool.getConnection((error, connection) => {
if (error) throw error;
connection.query(`
INSERT INTO table_name (event) VALUES ('${event}')
`, function(error, results, fields) {
if (error) throw error;
connection.destroy();
});
});
};
I think this issue is similar to mine:
Connect to MySQL
End MySQL service (should not quit node script)
Start MySQL service, Node reconnects to MySQL
Query the DB -> FAIL (Cannot enqueue Query after fatal error.)
I solved this issue by recreating a new connection with the use of promises (q).
mysql-con.js
'use strict';
var config = require('./../config.js');
var colors = require('colors');
var mysql = require('mysql');
var q = require('q');
var MySQLConnection = {};
MySQLConnection.connect = function(){
var d = q.defer();
MySQLConnection.connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'database'
});
MySQLConnection.connection.connect(function (err) {
if(err) {
console.log('Not connected '.red, err.toString().red, ' RETRYING...'.blue);
d.reject();
} else {
console.log('Connected to Mysql. Exporting..'.blue);
d.resolve(MySQLConnection.connection);
}
});
return d.promise;
};
module.exports = MySQLConnection;
mysqlAPI.js
var colors = require('colors');
var mysqlCon = require('./mysql-con.js');
mysqlCon.connect().then(function(con){
console.log('connected!');
mysql = con;
mysql.on('error', function (err, result) {
console.log('error occurred. Reconneting...'.purple);
mysqlAPI.reconnect();
});
mysql.query('SELECT 1 + 1 AS solution', function (err, results) {
if(err) console.log('err',err);
console.log('Works bro ',results);
});
});
mysqlAPI.reconnect = function(){
mysqlCon.connect().then(function(con){
console.log("connected. getting new reference");
mysql = con;
mysql.on('error', function (err, result) {
mysqlAPI.reconnect();
});
}, function (error) {
console.log("try again");
setTimeout(mysqlAPI.reconnect, 2000);
});
};
I hope this helps.
inplace of connection.connect(); use -
if(!connection._connectCalled )
{
connection.connect();
}
if it is already called then connection._connectCalled =true,
& it will not execute connection.connect();
note - don't use connection.end();
SOLUTION: to prevent this error(for AWS LAMBDA):
In order to exit of "Nodejs event Loop" you must end the connection, and then reconnect. Add the next code to invoke the callback:
connection.end( function(err) {
if (err) {console.log("Error ending the connection:",err);}
// reconnect in order to prevent the"Cannot enqueue Handshake after invoking quit"
connection = mysql.createConnection({
host : 'rds.host',
port : 3306,
user : 'user',
password : 'password',
database : 'target database'
});
callback(null, {
statusCode: 200,
body: response,
});
});
If you're trying to get a lambda, I found that ending the handler with context.done() got the lambda to finish. Before adding that 1 line, It would just run and run until it timed out.
You can use
debug: false,
Example:
//mysql connection
var dbcon1 = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "node5",
debug: false,
});
A little digging showed that I wasn't closing the connection at all.
So added this code before I opened up connection and when I was done with database manipulation
connection.end()
connection = mysql.createConnection(
// database connection details
)
connection.connect(function (err) {
if (!err) {
console.log("Connected!");
var sql = `Select something from my_heart;`
connection.query(sql, function (err, result) {
if (!err) {
console.log("1 record inserted");
res.send("Recieved")
} else {
console.log(err.sqlMessage)
res.send("error")
}
});
}
})
Just use connection.connect() once outside of module.exports. It should be connect() once when node server is initialised, not in every request.
You can do this in this way :--
const connection = sql.createConnection({
host: "****",
user: "****",
password: "*****",
database: "****"
})
connection.connect((error) => {
if( error ) throw new Error(error)
})
module.exports = {
getDataFromUserGps: function(callback)
{
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
},
****
****
****
}

Resources