Connection reset while executing multiple queries using Postgres pg and node - node.js

Here is my code that performs multiple select queries one after other.
const {Pool} = require('pg');
const pool = new Pool(POSTGRES_CONFIG);
var aggregateDomainCount = [], domainCount={};
pool.connect((err, client, release) => {
if (err) {
return console.error('Connection error', err.stack)
}
console.log('Connection established');
client.query(firstQuery, function(err, results) {
// call `done()` to release the client back to the pool
if(err) {
return console.error('error running query', err);
}
domainCount = {"data1":results.rows[0].count};
aggregateDomainCount.push(domainCount);
});
......
client.query(lastQuery, function(err, results) {
// call `done()` to release the client back to the pool
if(err) {
return console.error('error running query', err);
}
domainCount = {"dataN":results.rows[0].count};
aggregateDomainCount.push(domainCount);
release();
response.json(aggregateDomainCount);
});
});
The issue is while executing above queries, console first prints('Connection established') and then after series of execution, again it tries to enter pool.connect before completing all queries and prints('Connection established') and finally results in unknown error to user interface. If i reduce number of queries by some, then I get expected results but which is again inconsistent.
So far, I have tried
nesting client.query one inside other
nesting pool.connect by grouping set of queries
setting different timeout parameters
using await client.query()
But all the above resulted in same issue. Could you please point me in right direction.

Related

Postgres connection pooling error troubleshooting

I have an AWS Lambda function written in nodejs that is doing a set of recursive postgres database calls that result in the following error every time on the 81st call:
remaining connection slots are reserved for non-replication superuser
connections
I'm assuming I am leaking something at the postgres levels but I believe I am adhering to the recommended calls for performing a single pooled query as defined at https://node-postgres.com/features/pooling. I've simplified my code as shown below so that I'm only executing the same query every time and the result is still the same. The function testHarness is what initiates the logic within my lamba function. The intent here is execute a query against postgres, once it is completed to fire off the query again, repeating 500 times for this example. It always fails when the 81st call occurs. The DB_CONNECT environment variable contains the connection information including a "MAX" value of 3.
function testHarness(cb){
_test(0, cb);
}
function _test(pos, cb){
console.log(pos);
_testData(function (err, data){
if (err) return cb(err);
if (pos < 500){
_test(pos + 1, cb);
}
else{
return cb(null, 'done');
}
});
}
function _testData(cb){
const { Pool } = require('pg')
const pool = new Pool(JSON.parse(process.env.DB_CONNECT));
const sql = 'SELECT id, url, pub_date, description, title, duration FROM episodes WHERE feed_id = $1 ORDER BY pub_date DESC LIMIT 10';
pool.query(sql, ['28c65c8d-f96a-4499-a854-187eed7050bd'], (err, result) => {
if (err) throw err;
return cb(err, result);
})
}
So the problem is leaking Pool objects that you create in _testData function. After using a Pool you have to shut it down you and find the documentation here under "Shutdown" title, as it says:
pool.end()
But, the way you are using Pool does not make sense. It is better to put it in _testHarness function to be able to reuse the connection and save the connection overhead time to let your code run faster:
function testHarness(cb){
const { Pool } = require('pg')
const pool = new Pool(JSON.parse(process.env.DB_CONNECT));
_test(pool, 0, function(err, data){
pool.end();
cb(err, data);
});
}
function _test(pool, pos, cb){
console.log(pos);
_testData(pool, function (err, data){
if (err) return cb(err);
if (pos < 500){
_test(pos + 1, cb);
}
else{
return cb(null, 'done');
}
});
}
function _testData(pool, cb){
const sql = 'SELECT id, url, pub_date, description, title, duration FROM episodes WHERE feed_id = $1 ORDER BY pub_date DESC LIMIT 10';
pool.query(sql, ['28c65c8d-f96a-4499-a854-187eed7050bd'], (err, result) => {
if (err) throw err;
return cb(err, result);
})
}
I am not AWS user, but I guess it should be like any other postgres database service, you might need to change it a bit to fit AWS service.
Also, don't you have the ability to use async/await pattern? It is lot easier to comprehend.

nodejs pg transactions without nesting

I would like to know if it's possible to run a series of SQL statements and have them all committed in a single transaction.
The scenario I am looking at is where an array has a series of values that I wish to insert into a table, not individually but as a unit.
I was looking at the following item which provides a framework for transactions in node using pg. The individual transactions appear to be nested within one another so I am unsure of how this would work with an array containing a variable number of elements.
https://github.com/brianc/node-postgres/wiki/Transactions
var pg = require('pg');
var rollback = function(client, done) {
client.query('ROLLBACK', function(err) {
//if there was a problem rolling back the query
//something is seriously messed up. Return the error
//to the done function to close & remove this client from
//the pool. If you leave a client in the pool with an unaborted
//transaction weird, hard to diagnose problems might happen.
return done(err);
});
};
pg.connect(function(err, client, done) {
if(err) throw err;
client.query('BEGIN', function(err) {
if(err) return rollback(client, done);
//as long as we do not call the `done` callback we can do
//whatever we want...the client is ours until we call `done`
//on the flip side, if you do call `done` before either COMMIT or ROLLBACK
//what you are doing is returning a client back to the pool while it
//is in the middle of a transaction.
//Returning a client while its in the middle of a transaction
//will lead to weird & hard to diagnose errors.
process.nextTick(function() {
var text = 'INSERT INTO account(money) VALUES($1) WHERE id = $2';
client.query(text, [100, 1], function(err) {
if(err) return rollback(client, done);
client.query(text, [-100, 2], function(err) {
if(err) return rollback(client, done);
client.query('COMMIT', done);
});
});
});
});
});
My array logic is:
banking.forEach(function(batch){
client.query(text, [batch.amount, batch.id], function(err, result);
}
pg-promise offers a very flexible support for transactions. See Transactions.
It also supports partial nested transactions, aka savepoints.
The library implements transactions automatically, which is what should be used these days, because too many things can go wrong, if you try organizing a transaction manually as you do in your example.
See a related question: Optional INSERT statement in a transaction
Here's a simple TypeScript solution to avoid pg-promise
import { PoolClient } from "pg"
import { pool } from "../database"
const tx = async (callback: (client: PoolClient) => void) => {
const client = await pool.connect();
try {
await client.query('BEGIN')
try {
await callback(client)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
}
} finally {
client.release()
}
}
export { tx }
Usage:
...
let result;
await tx(async client => {
const { rows } = await client.query<{ cnt: string }>('SELECT COUNT(*) AS cnt FROM users WHERE username = $1', [username]);
result = parseInt(rows[0].cnt) > 0;
});
return result;

node-postgres : query not executing in sequence

i have kept insert & update code in 2 different files and based on condition
always insert should execute first and then update.but somehow update executes first then insert
test.js : simplified code
i am using these packages :pg , uuid
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
// ------INSERT
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
});
});
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
output
tom#tom:~$node test.js
Executing Insert query
Executing update query
finished executing Update query //WHY UPDATE FINISHES FIRST
finished executing Insert query
Note :
this problem can be easily solved by using async.But my insert code and update code are in different files and depending on some situation update code might execute.so don't want to use async
Problem
Even though Insert query goes to execute first why does update finishes first in output
am i missing any thing ..?
Lets solve this question step by step
you "stated so don't want to use async" libraries
solution 1 :
if PostgreSQL make update faster, update will return result before insert. If you want start executing update query only after finishing insert then
you should set connection pool capacity to 1.
pg.defaults.poolSize = 1
but you should do this before any pg.connect()
The connect method retrieves a Client from the client pool, or if all pooled clients are busy and the pool is not full, the connect method will create a new client passing its first argument directly to the Client constructor. In either case, your supplied callback will only be called when the Client is ready to issue queries or an error is encountered. The callback will be called once and only once for each invocation of connect.
Conclusion : your queries will execute in sequence.BUT BUT BUT this solution is BAD for scaling app as there will be always only one connection serving all users .So till one connection is serving one user , other users will have to wait for response.
Solution 2 :
you also stated "i have kept insert & update code in 2 different files"
it looks like you need to designed your code in such a way that it you are able to use asynchronus libraries , that solves this problem
As I already mentioned, the only way to ensure that update function will be fired only after insert function is done, is to call it inside of insert function callback. That are the basics of asynchronous programming.
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
});
You are missing the asynchronous nature of the pg.connect and also client.query. The call to these return a callback which passes the control to next expression before the completion of execution and hence non-blocking nature of nodejs. If you want to assure the correct flow, either call the successive ones inside the callback success
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
// ------INSERT
return pg.connect;
// ------UPDATE
return pg.connect;
// your calling file
var insert = require('/path/to/insertfile');
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
var update = require('/path/to/updatefile');
insert(conString, function (err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Insert query');
update(conString, function (error, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Update query');
done();
});
});
done();
});
});
But this is very prone to callback hell. So consider making all async call return a promise. Take a look at bluebird. If you want an ORM that has built in promise based call, you can take a look at sequelize. It might be handy for you.
It has syntax as easy as:
var Model1 = require('/path/to/model1');
var Model2 = require('/path/to/model2');
var insertObj = {
"someKey": "value"
};
Model1.create(insertObj)
.then( function (createdObj1) {
return Model2.findOne({
where: {
"filter": "filterValue"
}
});
})
.then( function (documentToUpdate) {
return documentToUpdate.update({
"fieldToUpdate": "value"
});
})
.then( null, function (err) {
console.log(err);
});

MongoDB query resets socket connection to Node.js server

Executing a find query to my MongoDB DB seems to reset the connection and make the node server crash.
My server handles socket events like this:
io.sockets.on('connection', function(socket) {
MongoClient.connect(url, function(err, db) {
if (err)
console.log('Error');
console.log('Connection established to', url);
var collection = db.collection('spedizioni');
socket.on('adminReq', function() {
handlers.handleAdmin(collection, socket);
});
});
});
the handleAdmin function is:
function handleAdmin(collection, socket) {
console.log('Admin event');
collection.find(null, function(err, raw) {
console.log('Find function');
console.log(raw);
if (err){
socket.emit('err');
console.log('Error function');
}
if (raw) {
socket.emit('adminRes', raw);
console.log('Response function');
} else {
socket.emit('adminNull');
console.log('Null function');
}
});
}
I want the query to return all items on the database; as per the MongoDB manual, I do that by executing a find query without a parameter.
I tried omitting null or using {} as first parameter but nothing changes.
Upon pressing the button to generate the adminReq event, the 'Connection to DB' string is printed on console and the firefox console signals a NEW connection to socket was estabilished; my client script connects at document.load once.
Below is the node console output after that; as you can see the query is executed; looking at the 'raw' output it seems failed attempts were made.
err is null and there is nothing else to output.
Looking at other answers about the 'maximum call stack' exceeded it seems it is caused by a recursive function usually, but it's not the case here.
http://pastebin.com/0xv1qcHn
Why is this the output and not the query result? Why is the connection reset?
A very similar function is working fine and the syntax for returning the whole DB seems correct, feels I am missing something very obvious...
not sure if you should can use null, but i think an empty object should work
you need to convert your result into an array
collection.find({}).toArray(function(err, raw) {
console.log('Find function');
console.log(raw);
if (err){
socket.emit('err');
console.log('Error function');
}
if (raw) {
socket.emit('adminRes', raw);
console.log('Response function');
} else {
socket.emit('adminNull');
console.log('Null function');
}
});

Cannot enqueue Handshake after invoking quit

I have implemented the following code:
module.exports = {
getDataFromUserGps: function(callback)
{
connection.connect();
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
loginUser: function(login, pass, callback)
{
connection.connect();
connection.query(
"SELECT id FROM users WHERE login = ? AND pass = ?",
[login, pass],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
getUserDetails: function(userid, callback)
{
connection.connect();
connection.query(
"SELECT * FROM userProfilDetails LEFT JOIN tags ON userProfilDetails.userId = tags.userId WHERE userProfilDetails.userid = ?",
[userid],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
addTags: function(userId, tags)
{
connection.connect();
connection.query(
"INSERT INTO tag (userId, tag) VALUES (?, ?)",
[userId, tags],
function(err, results, fields)
{
if (err) throw err;
}
)
connection.end();
}
}
Everything works great only for the first time. If I want to "use" the query for the second time I get the following error:
Cannot enqueue Handshake after invoking quit
I have tried not to .end() connections but it didn't help.
How can I fix this issue?
If you using the node-mysql module, just remove the .connect and .end. Just solved the problem myself. Apparently they pushed in unnecessary code in their last iteration that is also bugged. You don't need to connect if you have already ran the createConnection call
According to:
Fixing Node Mysql "Error: Cannot enqueue Handshake after invoking quit.":
http://codetheory.in/fixing-node-mysql-error-cannot-enqueue-handshake-after-invoking-quit/
TL;DR You need to establish a new connection by calling the createConnection method after every disconnection.
and
Note: If you're serving web requests, then you shouldn't be ending connections on every request. Just create a connection on server
startup and use the connection/client object to query all the time.
You can listen on the error event to handle server disconnection and
for reconnecting purposes. Full code
here.
From:
Readme.md - Server disconnects:
https://github.com/felixge/node-mysql#server-disconnects
It says:
Server disconnects
You may lose the connection to a MySQL server due to network problems,
the server timing you out, or the server crashing. All of these events
are considered fatal errors, and will have the err.code =
'PROTOCOL_CONNECTION_LOST'. See the Error
Handling section for more information.
The best way to handle such unexpected disconnects is shown below:
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
connection = mysql.createConnection(connection.config);
handleDisconnect(connection);
connection.connect();
});
}
handleDisconnect(connection);
As you can see in the example above, re-connecting a connection is
done by establishing a new connection. Once terminated, an existing
connection object cannot be re-connected by design.
With Pool, disconnected connections will be removed from the pool
freeing up space for a new connection to be created on the next
getConnection call.
I have tweaked the function such that every time a connection is needed, an initializer function adds the handlers automatically:
function initializeConnection(config) {
function addDisconnectHandler(connection) {
connection.on("error", function (error) {
if (error instanceof Error) {
if (error.code === "PROTOCOL_CONNECTION_LOST") {
console.error(error.stack);
console.log("Lost connection. Reconnecting...");
initializeConnection(connection.config);
} else if (error.fatal) {
throw error;
}
}
});
}
var connection = mysql.createConnection(config);
// Add handlers.
addDisconnectHandler(connection);
connection.connect();
return connection;
}
Initializing a connection:
var connection = initializeConnection({
host: "localhost",
user: "user",
password: "password"
});
Minor suggestion: This may not apply to everyone but I did run into a minor issue relating to scope. If the OP feels this edit was unnecessary then he/she can choose to remove it. For me, I had to change a line in initializeConnection, which was var connection = mysql.createConnection(config); to simply just
connection = mysql.createConnection(config);
The reason being that if connection is a global variable in your program, then the issue before was that you were making a new connection variable when handling an error signal. But in my nodejs code, I kept using the same global connection variable to run queries on, so the new connection would be lost in the local scope of the initalizeConnection method. But in the modification, it ensures that the global connection variable is reset This may be relevant if you're experiencing an issue known as
Cannot enqueue Query after fatal error
after trying to perform a query after losing connection and then successfully reconnecting. This may have been a typo by the OP, but I just wanted to clarify.
I had the same problem and Google led me here. I agree with #Ata that it's not right to just remove end(). After further Googling, I think using pooling is a better way.
node-mysql doc about pooling
It's like this:
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
connection.query( 'bla bla', function(err, rows) {
connection.release();
});
});
Do not connect() and end() inside the function. This will cause problems on repeated calls to the function. Make the connection only
var connection = mysql.createConnection({
host: 'localhost',
user: 'node',
password: 'node',
database: 'node_project'
})
connection.connect(function(err) {
if (err) throw err
});
once and reuse that connection.
Inside the function
function insertData(name,id) {
connection.query('INSERT INTO members (name, id) VALUES (?, ?)', [name,id], function(err,result) {
if(err) throw err
});
}
AWS Lambda functions
Use mysql.createPool() with connection.destroy()
This way, new invocations use the established pool, but don't keep the function running. Even though you don't get the full benefit of pooling (each new connection uses a new connection instead of an existing one), it makes it so that a second invocation can establish a new connection without the previous one having to be closed first.
Regarding connection.end()
This can cause a subsequent invocation to throw an error. The invocation will still retry later and work, but with a delay.
Regarding mysql.createPool() with connection.release()
The Lambda function will keep running until the scheduled timeout, as there is still an open connection.
Code example
const mysql = require('mysql');
const pool = mysql.createPool({
connectionLimit: 100,
host: process.env.DATABASE_HOST,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
});
exports.handler = (event) => {
pool.getConnection((error, connection) => {
if (error) throw error;
connection.query(`
INSERT INTO table_name (event) VALUES ('${event}')
`, function(error, results, fields) {
if (error) throw error;
connection.destroy();
});
});
};
I think this issue is similar to mine:
Connect to MySQL
End MySQL service (should not quit node script)
Start MySQL service, Node reconnects to MySQL
Query the DB -> FAIL (Cannot enqueue Query after fatal error.)
I solved this issue by recreating a new connection with the use of promises (q).
mysql-con.js
'use strict';
var config = require('./../config.js');
var colors = require('colors');
var mysql = require('mysql');
var q = require('q');
var MySQLConnection = {};
MySQLConnection.connect = function(){
var d = q.defer();
MySQLConnection.connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'database'
});
MySQLConnection.connection.connect(function (err) {
if(err) {
console.log('Not connected '.red, err.toString().red, ' RETRYING...'.blue);
d.reject();
} else {
console.log('Connected to Mysql. Exporting..'.blue);
d.resolve(MySQLConnection.connection);
}
});
return d.promise;
};
module.exports = MySQLConnection;
mysqlAPI.js
var colors = require('colors');
var mysqlCon = require('./mysql-con.js');
mysqlCon.connect().then(function(con){
console.log('connected!');
mysql = con;
mysql.on('error', function (err, result) {
console.log('error occurred. Reconneting...'.purple);
mysqlAPI.reconnect();
});
mysql.query('SELECT 1 + 1 AS solution', function (err, results) {
if(err) console.log('err',err);
console.log('Works bro ',results);
});
});
mysqlAPI.reconnect = function(){
mysqlCon.connect().then(function(con){
console.log("connected. getting new reference");
mysql = con;
mysql.on('error', function (err, result) {
mysqlAPI.reconnect();
});
}, function (error) {
console.log("try again");
setTimeout(mysqlAPI.reconnect, 2000);
});
};
I hope this helps.
inplace of connection.connect(); use -
if(!connection._connectCalled )
{
connection.connect();
}
if it is already called then connection._connectCalled =true,
& it will not execute connection.connect();
note - don't use connection.end();
SOLUTION: to prevent this error(for AWS LAMBDA):
In order to exit of "Nodejs event Loop" you must end the connection, and then reconnect. Add the next code to invoke the callback:
connection.end( function(err) {
if (err) {console.log("Error ending the connection:",err);}
// reconnect in order to prevent the"Cannot enqueue Handshake after invoking quit"
connection = mysql.createConnection({
host : 'rds.host',
port : 3306,
user : 'user',
password : 'password',
database : 'target database'
});
callback(null, {
statusCode: 200,
body: response,
});
});
If you're trying to get a lambda, I found that ending the handler with context.done() got the lambda to finish. Before adding that 1 line, It would just run and run until it timed out.
You can use
debug: false,
Example:
//mysql connection
var dbcon1 = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "node5",
debug: false,
});
A little digging showed that I wasn't closing the connection at all.
So added this code before I opened up connection and when I was done with database manipulation
connection.end()
connection = mysql.createConnection(
// database connection details
)
connection.connect(function (err) {
if (!err) {
console.log("Connected!");
var sql = `Select something from my_heart;`
connection.query(sql, function (err, result) {
if (!err) {
console.log("1 record inserted");
res.send("Recieved")
} else {
console.log(err.sqlMessage)
res.send("error")
}
});
}
})
Just use connection.connect() once outside of module.exports. It should be connect() once when node server is initialised, not in every request.
You can do this in this way :--
const connection = sql.createConnection({
host: "****",
user: "****",
password: "*****",
database: "****"
})
connection.connect((error) => {
if( error ) throw new Error(error)
})
module.exports = {
getDataFromUserGps: function(callback)
{
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
},
****
****
****
}

Resources