I am new to pg-postgres for node.js. I am trying to figure out the proper way to make queries. Right now I have this
var client = new pg.Client(connectionString);
client.connect();
And then in each of my http request routes, I have this kind of code:
var query = client.query(sql);
query.on('row', function(row, result) {
result.addRow(row);
});
query.on('end', function(data) {
if (data.rows[0].count === '1') {
return callback();
}
return failedCallback(req, res);
});
Is this the way to do it? Or should I do this in each http route handler
pg.connect(conString, function(err, client) {
// Use the client to do things here
client.end();
});
Also do I need to end the client in each http route handler in this way?
Matter of choice:
It is a matter of choice. The first allows you to reuse declared client in other functions, while the second client is inside your pg.connect. I don't think you can say one is proper and the other is not. I can rephrase the question as "what is better: c.query(sql, function (err, result) { or query.on('end', function(data) {".
Regarding client.end():
If you end connection in each http route handler, you won't be able to reuse client, unless you client.connect(); again. It does not mean though that you don't have to close it at all. I believe bonding client termination to response.send() is reasonable. If you don't close connections to postgres you will reach max_connections quite fast. Again - you will probably use connections pooling instead of connecting to pg directly, but it does not mean you should generate new and new connections endlessly.
Example of both:
//https://github.com/brianc/node-postgres
var pg = require('pg');
var conString = 'postgres://n#10.1.10.199/b';
var sql = 'SELECT substr(extract(epoch from now())::text,10,4) sec, $1::int client,pg_backend_pid() pid,count(*) from pg_stat_activity where usename = current_user group by client';
var client = new pg.Client(conString);
client.connect();
var query = client.query(sql,[1]);
query.on('end', function(data) {
console.log(data.rows);
});
var query = client.query(sql,[1]);
query.on('end', function(data) {
console.log(data.rows);
//client.end();
});
var to = setTimeout( //we start new client with small timeout to see one session with previous client
function() {
pg.connect(conString, function(err, c) {
// execute a query on our database
c.query('select pg_sleep(0.6);', function (err, result) {
});
c.query(sql,[2], function (err, result) {
if (err) throw err;
console.log(result.rows);
});
c.query(sql,[4], function (err, result) {
console.log(result.rows);
});
//c.end();
to;
});
}
, 500
);
var to = setTimeout(
function() {
var query = client.query(sql,[3]);
query.on('end', function(data) {
console.log(data.rows);
//client.end();
});
}
, 1000
);
Generates:
...js>node 1.js
[ { sec: '2.31', client: 1, pid: 23327, count: '1' } ]
[ { sec: '2.32', client: 1, pid: 23327, count: '1' } ]
[ { sec: '3.29', client: 3, pid: 23327, count: '2' } ]
[ { sec: '3.41', client: 2, pid: 23328, count: '2' } ]
[ { sec: '3.42', client: 4, pid: 23328, count: '2' } ]
As you can see, client 1 and client 3 share same pid, although called in different functions, because global var client was declared. Same for 2 and 4, called async in one pg.connect share one pid.
Now should you put your code into c.query(sql, function (err, result) { or query.on('end', function(data) { depends on what you want to achieve.
Related
I have a node application that makes a call to mongoDB every 10 seconds, but looking at the output in my terminal, the connections just keep counting up and never seem to close:
My code to hit the ddb every 10 seconds:
const MongoClient = require("mongodb").MongoClient
setInterval(function(){
MongoClient.connect(uri, (err, client) => {
if (err){
console.log(err);
}
database = client.db(databaseName)
getData(function(data){
if(data.length > 0){
db_response = data;
params["fieldA"] = db_response[0]['fieldA'];
}
})
})
}, 10000)
function getData(callback){
var query = { fieldA: "foo" };
database.collection(CollectionName).find(query).toArray(function(err, result){
if (err){
throw err;
}
callback(result);
})
}
(The vars uri, CollectionName and databaseName are declared earlier) I guess what i need to do (and havent yet figured out) is to connect to the DB once when the server starts, and then run the getData() function on successful connection, does that mean the database variable needs to be a global var??
As you correctly identified you only need to create your db connection once. So rather than wrapping the the db connection creation with setInterval, wrap setInterval around the only function you want to repeat, in this case getData.
On your other question, the database variable doesn't need to global but you are right getData does need to use it. Therefore pass it as an argument along with your callback function.
If you want to close your connection use client.close(); inside MongoClient.connect
const MongoClient = require("mongodb").MongoClient
MongoClient.connect(uri, (err, client) => {
if (err){
console.log(err);
}
const database = client.db(databaseName);
setInterval(function(){
getData(database, function(data){
if(data.length > 0){
db_response = data;
params["fieldA"] = db_response[0]['fieldA'];
}
})
}, 10000)
})
function getData(db, callback){
var query = { fieldA: "foo" };
db.collection(CollectionName).find(query).toArray(function(err, result){
if (err){
throw err;
}
callback(result);
})
}
While my database server is not available and any function of my node-express rest service like hiExpress is called, Nodejs crashes the node server and console of node reports
sql server connection closed
I do not want this to happen because either it should go to err function or at least it must be cautht by catch block. What could i do to avoid the crash of nodejs when database server is not available I am using following code which is absolutely fine as long as database server is available.
var sqlServer = require('seriate');
app.get('/hiExpress',function(req, res)
{
var sr = {error:'',message:''};
var sql= 'select * from table1 where id=? and name=?';
var params = {id: 5, name:'sami'};
exeDB(res,sr,sql, params);//sent only 4 parameters (not 6)
});
function exeDB(res, sr, sql, params, callback, multiple) {
try {
var obj = {};
for (p in params) {
if (params.hasOwnProperty(p)) {
obj[p] = {
type: sqlServer.VARCHAR,
val: params[p]
};
}
};
var exeOptions = {
query: sql,
params: obj
};
if (multiple) {
exeOptions.multiple = true;
}
sqlServer.execute(sqlServerConfigObject, exeOptions).then(function (results) {
sr.data = results;
if (callback)
callback(sr);
else
res.json(sr); //produces result when success
}, function (err) {
//sr.message = sql;
console.log(11);
sr.error = err.message;
res.json(sr);
});
}
catch (ex) {
console.log(21);
sr.error = ex.message;
res.json(sr);
}
}
Why I preferred to use seriate
I had not been much comfortable with node-SQL, especially when when it came to
multiple queries option even not using a transaction. It facilitates easy go to parameterized queries.
You can use transaction without seriate but with async like below
async.series([
function(callback) {db.run('begin transaction', callback)},
function(callback) {db.run( ..., callback)},
function(callback) {db.run( ..., callback)},
function(callback) {db.run( ..., callback)},
function(callback) {db.run('commit transaction', callback)},
], function(err, results){
if (err) {
db.run('rollback transaction');
return console.log(err);
}
// if some queries return rows then results[query-no] contains them
})
The code is very dirty. Pass req and res params to db-layer is not a good idea.
Try change exeDB. I'm not sure, but probably you don't set error catcher to promise
function exeDB(res, sr, sql, params, callback, multiple) {
// It will execute with no error, no doubt
var obj = {};
for (p in params) {
if (params.hasOwnProperty(p)) {
obj[p] = {
type: sqlServer.VARCHAR,
val: params[p]
};
}
};
var exeOptions = {
query: sql,
params: obj
};
if (multiple) {
exeOptions.multiple = true;
}
// Potential problem is here.
// Catch is useless because code below is asynchronous.
sqlServer.execute(sqlServerConfigObject, exeOptions).then(function (results) {
sr.data = results;
if (callback)
callback(sr);
else
res.json(sr); //produces result when success
}).error(function(err){ // !!! You must provide on-error
console.log(err);
};
}
I have the following code in nodejs that uses the pg (https://github.com/brianc/node-postgres)
My code to create subscriptions for an employee is as such.
client.query(
'INSERT INTO subscriptions (subscription_guid, employer_guid, employee_guid)
values ($1,$2,$3)', [
datasetArr[0].subscription_guid,
datasetArr[0].employer_guid,
datasetArr[0].employee_guid
],
function(err, result) {
done();
if (err) {
set_response(500, err, res);
logger.error('error running query', err);
return console.error('error running query', err);
}
logger.info('subscription with created');
set_response(201);
});
As you have already noticed datasetArr is an array. I would like to create mass subscriptions for more than one employee at a time. However I would not like to loop through the array. Is there a way to do it out of the box with pg?
I did a search for the same question, but found no solution yet.
With the async library it is very simple to use the query several times, and do the necessary error handling.
May be this code variant helps.
(for inserting 10.000 small json objects to an empty database it took 6 sec).
Christoph
function insertData(item,callback) {
client.query('INSERT INTO subscriptions (subscription_guid, employer_guid, employee_guid)
values ($1,$2,$3)', [
item.subscription_guid,
item.employer_guid,
item.employee_guid
],
function(err,result) {
// return any err to async.each iterator
callback(err);
})
}
async.each(datasetArr,insertData,function(err) {
// Release the client to the pg module
done();
if (err) {
set_response(500, err, res);
logger.error('error running query', err);
return console.error('error running query', err);
}
logger.info('subscription with created');
set_response(201);
})
It looks for me that the best way is the usage PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if(err) {
console.log(err);
} else {
console.log(result);
}
});
To do Bulk insert into Postgresql from NodeJS, the better option would be to use 'COPY' Command provided by Postgres and pg-copy-streams.
Code snippet from : https://gist.github.com/sairamkrish/477d20980611202f46a2d44648f7b14b
/*
Pseudo code - to serve as a help guide.
*/
const copyFrom = require('pg-copy-streams').from;
const Readable = require('stream').Readable;
const { Pool,Client } = require('pg');
const fs = require('fs');
const path = require('path');
const datasourcesConfigFilePath = path.join(__dirname,'..','..','server','datasources.json');
const datasources = JSON.parse(fs.readFileSync(datasourcesConfigFilePath, 'utf8'));
const pool = new Pool({
user: datasources.PG.user,
host: datasources.PG.host,
database: datasources.PG.database,
password: datasources.PG.password,
port: datasources.PG.port,
});
export const bulkInsert = (employees) => {
pool.connect().then(client=>{
let done = () => {
client.release();
}
var stream = client.query(copyFrom('COPY employee (name,age,salary) FROM STDIN'));
var rs = new Readable;
let currentIndex = 0;
rs._read = function () {
if (currentIndex === employees.length) {
rs.push(null);
} else {
let employee = employees[currentIndex];
rs.push(employee.name + '\t' + employee.age + '\t' + employee.salary + '\n');
currentIndex = currentIndex+1;
}
};
let onError = strErr => {
console.error('Something went wrong:', strErr);
done();
};
rs.on('error', onError);
stream.on('error', onError);
stream.on('end',done);
rs.pipe(stream);
});
}
Finer details explained in this link
Create your data structure as:
[ [val1,val2],[val1,val2] ...]
Then convert it into a string:
JSON.stringify([['a','b'],['c']]).replace(/\[/g,"(").replace(/\]/g,")").replace(/"/g,'\'').slice(1,-1)
append it to the query and you are done!
Agreed it has string parsing costs but its way cheaper than single inserts.
Use an ORM; eg: Objection.
Also, Increase the Connection pool size based on your db server and the number of active connection you need.
someMovie
.$relatedQuery('actors')
.insert([
{firstName: 'Jennifer', lastName: 'Lawrence'},
{firstName: 'Bradley', lastName: 'Cooper'}
])
.then(function (actors) {
console.log(actors[0].firstName);
console.log(actors[1].firstName);
});
I have implemented the following code:
module.exports = {
getDataFromUserGps: function(callback)
{
connection.connect();
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
loginUser: function(login, pass, callback)
{
connection.connect();
connection.query(
"SELECT id FROM users WHERE login = ? AND pass = ?",
[login, pass],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
getUserDetails: function(userid, callback)
{
connection.connect();
connection.query(
"SELECT * FROM userProfilDetails LEFT JOIN tags ON userProfilDetails.userId = tags.userId WHERE userProfilDetails.userid = ?",
[userid],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
addTags: function(userId, tags)
{
connection.connect();
connection.query(
"INSERT INTO tag (userId, tag) VALUES (?, ?)",
[userId, tags],
function(err, results, fields)
{
if (err) throw err;
}
)
connection.end();
}
}
Everything works great only for the first time. If I want to "use" the query for the second time I get the following error:
Cannot enqueue Handshake after invoking quit
I have tried not to .end() connections but it didn't help.
How can I fix this issue?
If you using the node-mysql module, just remove the .connect and .end. Just solved the problem myself. Apparently they pushed in unnecessary code in their last iteration that is also bugged. You don't need to connect if you have already ran the createConnection call
According to:
Fixing Node Mysql "Error: Cannot enqueue Handshake after invoking quit.":
http://codetheory.in/fixing-node-mysql-error-cannot-enqueue-handshake-after-invoking-quit/
TL;DR You need to establish a new connection by calling the createConnection method after every disconnection.
and
Note: If you're serving web requests, then you shouldn't be ending connections on every request. Just create a connection on server
startup and use the connection/client object to query all the time.
You can listen on the error event to handle server disconnection and
for reconnecting purposes. Full code
here.
From:
Readme.md - Server disconnects:
https://github.com/felixge/node-mysql#server-disconnects
It says:
Server disconnects
You may lose the connection to a MySQL server due to network problems,
the server timing you out, or the server crashing. All of these events
are considered fatal errors, and will have the err.code =
'PROTOCOL_CONNECTION_LOST'. See the Error
Handling section for more information.
The best way to handle such unexpected disconnects is shown below:
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
connection = mysql.createConnection(connection.config);
handleDisconnect(connection);
connection.connect();
});
}
handleDisconnect(connection);
As you can see in the example above, re-connecting a connection is
done by establishing a new connection. Once terminated, an existing
connection object cannot be re-connected by design.
With Pool, disconnected connections will be removed from the pool
freeing up space for a new connection to be created on the next
getConnection call.
I have tweaked the function such that every time a connection is needed, an initializer function adds the handlers automatically:
function initializeConnection(config) {
function addDisconnectHandler(connection) {
connection.on("error", function (error) {
if (error instanceof Error) {
if (error.code === "PROTOCOL_CONNECTION_LOST") {
console.error(error.stack);
console.log("Lost connection. Reconnecting...");
initializeConnection(connection.config);
} else if (error.fatal) {
throw error;
}
}
});
}
var connection = mysql.createConnection(config);
// Add handlers.
addDisconnectHandler(connection);
connection.connect();
return connection;
}
Initializing a connection:
var connection = initializeConnection({
host: "localhost",
user: "user",
password: "password"
});
Minor suggestion: This may not apply to everyone but I did run into a minor issue relating to scope. If the OP feels this edit was unnecessary then he/she can choose to remove it. For me, I had to change a line in initializeConnection, which was var connection = mysql.createConnection(config); to simply just
connection = mysql.createConnection(config);
The reason being that if connection is a global variable in your program, then the issue before was that you were making a new connection variable when handling an error signal. But in my nodejs code, I kept using the same global connection variable to run queries on, so the new connection would be lost in the local scope of the initalizeConnection method. But in the modification, it ensures that the global connection variable is reset This may be relevant if you're experiencing an issue known as
Cannot enqueue Query after fatal error
after trying to perform a query after losing connection and then successfully reconnecting. This may have been a typo by the OP, but I just wanted to clarify.
I had the same problem and Google led me here. I agree with #Ata that it's not right to just remove end(). After further Googling, I think using pooling is a better way.
node-mysql doc about pooling
It's like this:
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
connection.query( 'bla bla', function(err, rows) {
connection.release();
});
});
Do not connect() and end() inside the function. This will cause problems on repeated calls to the function. Make the connection only
var connection = mysql.createConnection({
host: 'localhost',
user: 'node',
password: 'node',
database: 'node_project'
})
connection.connect(function(err) {
if (err) throw err
});
once and reuse that connection.
Inside the function
function insertData(name,id) {
connection.query('INSERT INTO members (name, id) VALUES (?, ?)', [name,id], function(err,result) {
if(err) throw err
});
}
AWS Lambda functions
Use mysql.createPool() with connection.destroy()
This way, new invocations use the established pool, but don't keep the function running. Even though you don't get the full benefit of pooling (each new connection uses a new connection instead of an existing one), it makes it so that a second invocation can establish a new connection without the previous one having to be closed first.
Regarding connection.end()
This can cause a subsequent invocation to throw an error. The invocation will still retry later and work, but with a delay.
Regarding mysql.createPool() with connection.release()
The Lambda function will keep running until the scheduled timeout, as there is still an open connection.
Code example
const mysql = require('mysql');
const pool = mysql.createPool({
connectionLimit: 100,
host: process.env.DATABASE_HOST,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
});
exports.handler = (event) => {
pool.getConnection((error, connection) => {
if (error) throw error;
connection.query(`
INSERT INTO table_name (event) VALUES ('${event}')
`, function(error, results, fields) {
if (error) throw error;
connection.destroy();
});
});
};
I think this issue is similar to mine:
Connect to MySQL
End MySQL service (should not quit node script)
Start MySQL service, Node reconnects to MySQL
Query the DB -> FAIL (Cannot enqueue Query after fatal error.)
I solved this issue by recreating a new connection with the use of promises (q).
mysql-con.js
'use strict';
var config = require('./../config.js');
var colors = require('colors');
var mysql = require('mysql');
var q = require('q');
var MySQLConnection = {};
MySQLConnection.connect = function(){
var d = q.defer();
MySQLConnection.connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'database'
});
MySQLConnection.connection.connect(function (err) {
if(err) {
console.log('Not connected '.red, err.toString().red, ' RETRYING...'.blue);
d.reject();
} else {
console.log('Connected to Mysql. Exporting..'.blue);
d.resolve(MySQLConnection.connection);
}
});
return d.promise;
};
module.exports = MySQLConnection;
mysqlAPI.js
var colors = require('colors');
var mysqlCon = require('./mysql-con.js');
mysqlCon.connect().then(function(con){
console.log('connected!');
mysql = con;
mysql.on('error', function (err, result) {
console.log('error occurred. Reconneting...'.purple);
mysqlAPI.reconnect();
});
mysql.query('SELECT 1 + 1 AS solution', function (err, results) {
if(err) console.log('err',err);
console.log('Works bro ',results);
});
});
mysqlAPI.reconnect = function(){
mysqlCon.connect().then(function(con){
console.log("connected. getting new reference");
mysql = con;
mysql.on('error', function (err, result) {
mysqlAPI.reconnect();
});
}, function (error) {
console.log("try again");
setTimeout(mysqlAPI.reconnect, 2000);
});
};
I hope this helps.
inplace of connection.connect(); use -
if(!connection._connectCalled )
{
connection.connect();
}
if it is already called then connection._connectCalled =true,
& it will not execute connection.connect();
note - don't use connection.end();
SOLUTION: to prevent this error(for AWS LAMBDA):
In order to exit of "Nodejs event Loop" you must end the connection, and then reconnect. Add the next code to invoke the callback:
connection.end( function(err) {
if (err) {console.log("Error ending the connection:",err);}
// reconnect in order to prevent the"Cannot enqueue Handshake after invoking quit"
connection = mysql.createConnection({
host : 'rds.host',
port : 3306,
user : 'user',
password : 'password',
database : 'target database'
});
callback(null, {
statusCode: 200,
body: response,
});
});
If you're trying to get a lambda, I found that ending the handler with context.done() got the lambda to finish. Before adding that 1 line, It would just run and run until it timed out.
You can use
debug: false,
Example:
//mysql connection
var dbcon1 = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "node5",
debug: false,
});
A little digging showed that I wasn't closing the connection at all.
So added this code before I opened up connection and when I was done with database manipulation
connection.end()
connection = mysql.createConnection(
// database connection details
)
connection.connect(function (err) {
if (!err) {
console.log("Connected!");
var sql = `Select something from my_heart;`
connection.query(sql, function (err, result) {
if (!err) {
console.log("1 record inserted");
res.send("Recieved")
} else {
console.log(err.sqlMessage)
res.send("error")
}
});
}
})
Just use connection.connect() once outside of module.exports. It should be connect() once when node server is initialised, not in every request.
You can do this in this way :--
const connection = sql.createConnection({
host: "****",
user: "****",
password: "*****",
database: "****"
})
connection.connect((error) => {
if( error ) throw new Error(error)
})
module.exports = {
getDataFromUserGps: function(callback)
{
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
},
****
****
****
}
In Node.js, I have (briefly) this script:
var http = require('http');
var XmlStream = require('xml-stream');
var mongo = require('mongodb');
var tims = { ... };
var db = new mongo.Db('tims', new mongo.Server("127.0.0.1", 27017, {}), {w: 1});
db.open(function(e, db) {
var req = http.get({
host: tims.uri,
path: '/xml/'+tims.database+tims.services.database
}).on('response', function(res) {
res.setEncoding('utf8');
cProjects = db.collection("projects");
var xml = new XmlStream(res);
xml.on('updateElement: Tims ProjectID', function(project) {
// console.log(project.$text+' - '+project.$.title);
cProjects.update({project_id: project.$text}, {project_id: project.$text, title: project.$.title}, {upsert:true}, function(err, result) {
console.log('result: '+result);
});
});
xml.on('end', function(data) {
db.close();
});
});
});
I am using a Node.js package called xml-stream that pieces together response chunks from Node to get valid XML before processing. My problem: if I leave out
xml.on('end', function(data) {
db.close();
});
my connection never closes and the console just hangs. The upside is that console.log('result: '+result); writes to the console and I can see that my data was committed successfully. So if I leave in the end event and close the DB once all XML has been processed, the Node instance terminates before console.log('result: '+result) is written.
I am a newbie to both MongoDB and Node.js, so I was curious what the best practice is here for confirmation, or perhaps a simple pointing out of what I'm doing wrong.
Thanks for the help.
Looks like the 'end' event is occurring before all the update callbacks have completed. So you need to rework your code a bit to keep track of the number of updates still pending and only call db.close() once both the 'end' event has fired and all the pending updates have completed.
So something like this:
db.open(function(e, db) {
var req = http.get({
host: tims.uri,
path: '/xml/'+tims.database+tims.services.database
}).on('response', function(res) {
res.setEncoding('utf8');
cProjects = db.collection("projects");
var xml = new XmlStream(res);
var end = false;
var pending = 0;
xml.on('updateElement: Tims ProjectID', function(project) {
// console.log(project.$text+' - '+project.$.title);
++pending;
cProjects.update({project_id: project.$text}, {project_id: project.$text, title: project.$.title}, {upsert:true}, function(err, result) {
console.log('result: '+result);
if (--pending === 0 && end) {
db.close();
}
});
});
xml.on('end', function(data) {
end = true;
});
});
});