Related
I am using the following code snippet in a Node.js application to attempt to query a (local) postgres database:
var conString = "postgres://user:password#localhost:5432/mydatabase";
var client = new pg.Client(conString);
client.connect(function(err) {
if(err) {
return console.error('could not connect to postgres', err);
}
client.query("SELECT * FROM users WHERE name = $1 AND cred = $2", [String(req.body.usr), String(req.body.pword)], function(err, result) {
if(err) {
return console.error('error running query', err);
}
if (typeof result.rows[0] === "undefined") {
console.log("No user/password determined in DB for login attempt");
} else {
} //user/password is 'undefined' (NOT found in database)...OR NOT...
client.end();
});
});
I am receiving an error when the query runs...I believe the problem may possibly be the number of parameters in my query call...? If that is the case (or it is some other syntax problem) could anybody be so kind to inform how I should change the code to perform the query correctly...?
I simply need to take 2 (user-supplied) results from a form (req.body.usr and req.body.pword) and compare them to the database table 'users' to determine if the credentials are correct. I already believe the database connection works properly. Any advice greatly appreciated. I thank you in advance.
Change:
client.query("SELECT * FROM users WHERE name = $1 AND cred = $2", [String(req.body.usr), String(req.body.pword)], function(err, result) {...
to:
const query = {
text: "SELECT * FROM users WHERE name = $1 AND cred = $2",
values: [String(req.body.usr), String(req.body.pword)]
};
client.query(query, function(err, result) {
Read more: https://node-postgres.com/features/queries
I'm using pg for node to connect to my Postgres server.
Here is what I'm doing:
const Pool = require('pg').Pool;
var config = {
user: "mouser",
password: "my password",
host: "somethingsomething.rds.amazonaws.com",
port: 5432,
database: "mydb",
ssl: true
}
const db = new Pool(config);
console.log("hello world")
let queryString = `CREATE TABLE IF NOT EXISTS favoritememes (id serial PRIMARY KEY, image_url varchar(255), date_favorited TIMESTAMP DEFAULT CURRENT_TIMESTAMP);`
db.query(queryString, function(err, result) {
console.log("Created table");
if (err) console.log("error", err);
});
Currently, I see "hello world" printed to my console, but it never prints "created table" and I also never see an error.
I know that my credentials are correct because I can connect to the server with SQLWorkbench with those same credentials.
I think the issue is that you're not waiting for the pool to finish being created.
Additionally, I suggest using:
Promises for your db functions, it will make you feel better about javascript ;D.
I suggest you use parameterized query functions. They help keep the
code clean and also prevent you needing to deal with variable types
/ heavy escaping.
(BTW, I would put this in another file and export the parameterizedPromise via exports.parameterizedPromise = parameterizedPromise.
Here is a sample of how to use a parameterized query promise:
/*
parameterizedPromise('SELECT * FROM foodtable f WHERE f.flavor = $1 AND f.meal = $2;', ['SPICY', 'BREAKFAST'])
.then(function(result){
console.log(result.rows);
})
.catch(function(error){
console.warn(error);
});
*/
Here is a redacted version of what I had used a while back.
const pg = require('pg');
const DBConfig = {
host: 'xxxxxx',
port: 'xxxxxx',
user: 'xxxxxx',
password: 'xxxxxx',
database: 'xxxxxx',
max: 10, // max number of clients in the pool
ssl: true,
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
};
const postgresPool = new pg.Pool(DBConfig, function(){
console.log('connected to postgres database');
// Need to wait for the pool to finish connecting before you can fire off your query.
paramaterizedPromise(`CREATE TABLE IF NOT EXISTS favoritememes (id serial PRIMARY KEY, image_url varchar(255), date_favorited TIMESTAMP DEFAULT CURRENT_TIMESTAMP);`, [])
.then(function(result){
console.log(result);
})
.catch(function(error){
console.warn(error);
});
});
postgresPool.on('error', function (err, client) {
// if an error is encountered by a client while it sits idle in the pool
// the pool itself will emit an error event with both the error and
// the client which emitted the original error
// this is a rare occurrence but can happen if there is a network partition
// between your application and the database, the database restarts, etc.
// and so you might want to handle it and at least log it out
console.error('idle client error', err.message, err.stack);
});
const paramaterizedPromise = function(query, params) {
return new Promise( function(resolve, reject){
postgresPool.connect(function(err, client, done) {
if(err) {
reject('Error Fetching Client From Pool', err);
}
client.query(query, params, function(err, result) {
//call `done()` to release the client back to the pool
done(err);
if (err){
reject(err);
} else {
resolve(result);
}
});
});
});
}
i have kept insert & update code in 2 different files and based on condition
always insert should execute first and then update.but somehow update executes first then insert
test.js : simplified code
i am using these packages :pg , uuid
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
// ------INSERT
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
});
});
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
output
tom#tom:~$node test.js
Executing Insert query
Executing update query
finished executing Update query //WHY UPDATE FINISHES FIRST
finished executing Insert query
Note :
this problem can be easily solved by using async.But my insert code and update code are in different files and depending on some situation update code might execute.so don't want to use async
Problem
Even though Insert query goes to execute first why does update finishes first in output
am i missing any thing ..?
Lets solve this question step by step
you "stated so don't want to use async" libraries
solution 1 :
if PostgreSQL make update faster, update will return result before insert. If you want start executing update query only after finishing insert then
you should set connection pool capacity to 1.
pg.defaults.poolSize = 1
but you should do this before any pg.connect()
The connect method retrieves a Client from the client pool, or if all pooled clients are busy and the pool is not full, the connect method will create a new client passing its first argument directly to the Client constructor. In either case, your supplied callback will only be called when the Client is ready to issue queries or an error is encountered. The callback will be called once and only once for each invocation of connect.
Conclusion : your queries will execute in sequence.BUT BUT BUT this solution is BAD for scaling app as there will be always only one connection serving all users .So till one connection is serving one user , other users will have to wait for response.
Solution 2 :
you also stated "i have kept insert & update code in 2 different files"
it looks like you need to designed your code in such a way that it you are able to use asynchronus libraries , that solves this problem
As I already mentioned, the only way to ensure that update function will be fired only after insert function is done, is to call it inside of insert function callback. That are the basics of asynchronous programming.
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
});
You are missing the asynchronous nature of the pg.connect and also client.query. The call to these return a callback which passes the control to next expression before the completion of execution and hence non-blocking nature of nodejs. If you want to assure the correct flow, either call the successive ones inside the callback success
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
// ------INSERT
return pg.connect;
// ------UPDATE
return pg.connect;
// your calling file
var insert = require('/path/to/insertfile');
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
var update = require('/path/to/updatefile');
insert(conString, function (err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Insert query');
update(conString, function (error, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Update query');
done();
});
});
done();
});
});
But this is very prone to callback hell. So consider making all async call return a promise. Take a look at bluebird. If you want an ORM that has built in promise based call, you can take a look at sequelize. It might be handy for you.
It has syntax as easy as:
var Model1 = require('/path/to/model1');
var Model2 = require('/path/to/model2');
var insertObj = {
"someKey": "value"
};
Model1.create(insertObj)
.then( function (createdObj1) {
return Model2.findOne({
where: {
"filter": "filterValue"
}
});
})
.then( function (documentToUpdate) {
return documentToUpdate.update({
"fieldToUpdate": "value"
});
})
.then( null, function (err) {
console.log(err);
});
I am trying to obtain a column name value or multiple column name values but I am not obtaining it. I am pretty new to Nodejs but debugging it is even difficult. Here is my code:
var express = require('express');
var app = express();
var mongodb = require('mongodb');
var MongoClient = mongodb.MongoClient;
MongoClient.connect(url, function (err, db) {
if (err) {
console.log('Unable to connect', err);
} else {
console.log('Connected');
var collection = db.collection('users');
app.get('/db', function (request, response) {
collection.find({username: request.query.username, password: request.query.password}).toArray(function(err, docs) {
docs.forEach(function(doc) {
console.log(doc.toArray());
});
});
});
}
db.close();
});
I am basically authenticating if the username and password entered in db matches to my REST query and if yes then print success or redirect to a new page or something.
For starters, unless you are actually expecting more than one "user" to share the same "username" and "password" then you are better off using .findOne() for the singular response.
Then what is returned is just a plain JavaScript object, so just reference the properties:
collection.findOne(
{
username: request.query.username,
password: request.query.password
},
function(err,doc) {
if (err) throw err;
console.log( doc.username ); // Just the username value
}
);
If course if "nothing" is returned and there is no "error" then you didn't match anything with your criteria, so either "username" or "password" is wrong.
That's as the basic excercise, but really, authentication is a wheel that was built long ago and you would likely be better of implementing an existing module to handle it.
And get rid of db.close(). The functions here are asynchronous and this will execute before anything else completes. You basically never want to call this in real world applications.
I have implemented the following code:
module.exports = {
getDataFromUserGps: function(callback)
{
connection.connect();
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
loginUser: function(login, pass, callback)
{
connection.connect();
connection.query(
"SELECT id FROM users WHERE login = ? AND pass = ?",
[login, pass],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
getUserDetails: function(userid, callback)
{
connection.connect();
connection.query(
"SELECT * FROM userProfilDetails LEFT JOIN tags ON userProfilDetails.userId = tags.userId WHERE userProfilDetails.userid = ?",
[userid],
function(err, results, fields)
{
if (err) return callback(err, null);
return callback(null, results);
}
);
connection.end();
},
addTags: function(userId, tags)
{
connection.connect();
connection.query(
"INSERT INTO tag (userId, tag) VALUES (?, ?)",
[userId, tags],
function(err, results, fields)
{
if (err) throw err;
}
)
connection.end();
}
}
Everything works great only for the first time. If I want to "use" the query for the second time I get the following error:
Cannot enqueue Handshake after invoking quit
I have tried not to .end() connections but it didn't help.
How can I fix this issue?
If you using the node-mysql module, just remove the .connect and .end. Just solved the problem myself. Apparently they pushed in unnecessary code in their last iteration that is also bugged. You don't need to connect if you have already ran the createConnection call
According to:
Fixing Node Mysql "Error: Cannot enqueue Handshake after invoking quit.":
http://codetheory.in/fixing-node-mysql-error-cannot-enqueue-handshake-after-invoking-quit/
TL;DR You need to establish a new connection by calling the createConnection method after every disconnection.
and
Note: If you're serving web requests, then you shouldn't be ending connections on every request. Just create a connection on server
startup and use the connection/client object to query all the time.
You can listen on the error event to handle server disconnection and
for reconnecting purposes. Full code
here.
From:
Readme.md - Server disconnects:
https://github.com/felixge/node-mysql#server-disconnects
It says:
Server disconnects
You may lose the connection to a MySQL server due to network problems,
the server timing you out, or the server crashing. All of these events
are considered fatal errors, and will have the err.code =
'PROTOCOL_CONNECTION_LOST'. See the Error
Handling section for more information.
The best way to handle such unexpected disconnects is shown below:
function handleDisconnect(connection) {
connection.on('error', function(err) {
if (!err.fatal) {
return;
}
if (err.code !== 'PROTOCOL_CONNECTION_LOST') {
throw err;
}
console.log('Re-connecting lost connection: ' + err.stack);
connection = mysql.createConnection(connection.config);
handleDisconnect(connection);
connection.connect();
});
}
handleDisconnect(connection);
As you can see in the example above, re-connecting a connection is
done by establishing a new connection. Once terminated, an existing
connection object cannot be re-connected by design.
With Pool, disconnected connections will be removed from the pool
freeing up space for a new connection to be created on the next
getConnection call.
I have tweaked the function such that every time a connection is needed, an initializer function adds the handlers automatically:
function initializeConnection(config) {
function addDisconnectHandler(connection) {
connection.on("error", function (error) {
if (error instanceof Error) {
if (error.code === "PROTOCOL_CONNECTION_LOST") {
console.error(error.stack);
console.log("Lost connection. Reconnecting...");
initializeConnection(connection.config);
} else if (error.fatal) {
throw error;
}
}
});
}
var connection = mysql.createConnection(config);
// Add handlers.
addDisconnectHandler(connection);
connection.connect();
return connection;
}
Initializing a connection:
var connection = initializeConnection({
host: "localhost",
user: "user",
password: "password"
});
Minor suggestion: This may not apply to everyone but I did run into a minor issue relating to scope. If the OP feels this edit was unnecessary then he/she can choose to remove it. For me, I had to change a line in initializeConnection, which was var connection = mysql.createConnection(config); to simply just
connection = mysql.createConnection(config);
The reason being that if connection is a global variable in your program, then the issue before was that you were making a new connection variable when handling an error signal. But in my nodejs code, I kept using the same global connection variable to run queries on, so the new connection would be lost in the local scope of the initalizeConnection method. But in the modification, it ensures that the global connection variable is reset This may be relevant if you're experiencing an issue known as
Cannot enqueue Query after fatal error
after trying to perform a query after losing connection and then successfully reconnecting. This may have been a typo by the OP, but I just wanted to clarify.
I had the same problem and Google led me here. I agree with #Ata that it's not right to just remove end(). After further Googling, I think using pooling is a better way.
node-mysql doc about pooling
It's like this:
var mysql = require('mysql');
var pool = mysql.createPool(...);
pool.getConnection(function(err, connection) {
connection.query( 'bla bla', function(err, rows) {
connection.release();
});
});
Do not connect() and end() inside the function. This will cause problems on repeated calls to the function. Make the connection only
var connection = mysql.createConnection({
host: 'localhost',
user: 'node',
password: 'node',
database: 'node_project'
})
connection.connect(function(err) {
if (err) throw err
});
once and reuse that connection.
Inside the function
function insertData(name,id) {
connection.query('INSERT INTO members (name, id) VALUES (?, ?)', [name,id], function(err,result) {
if(err) throw err
});
}
AWS Lambda functions
Use mysql.createPool() with connection.destroy()
This way, new invocations use the established pool, but don't keep the function running. Even though you don't get the full benefit of pooling (each new connection uses a new connection instead of an existing one), it makes it so that a second invocation can establish a new connection without the previous one having to be closed first.
Regarding connection.end()
This can cause a subsequent invocation to throw an error. The invocation will still retry later and work, but with a delay.
Regarding mysql.createPool() with connection.release()
The Lambda function will keep running until the scheduled timeout, as there is still an open connection.
Code example
const mysql = require('mysql');
const pool = mysql.createPool({
connectionLimit: 100,
host: process.env.DATABASE_HOST,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
});
exports.handler = (event) => {
pool.getConnection((error, connection) => {
if (error) throw error;
connection.query(`
INSERT INTO table_name (event) VALUES ('${event}')
`, function(error, results, fields) {
if (error) throw error;
connection.destroy();
});
});
};
I think this issue is similar to mine:
Connect to MySQL
End MySQL service (should not quit node script)
Start MySQL service, Node reconnects to MySQL
Query the DB -> FAIL (Cannot enqueue Query after fatal error.)
I solved this issue by recreating a new connection with the use of promises (q).
mysql-con.js
'use strict';
var config = require('./../config.js');
var colors = require('colors');
var mysql = require('mysql');
var q = require('q');
var MySQLConnection = {};
MySQLConnection.connect = function(){
var d = q.defer();
MySQLConnection.connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'database'
});
MySQLConnection.connection.connect(function (err) {
if(err) {
console.log('Not connected '.red, err.toString().red, ' RETRYING...'.blue);
d.reject();
} else {
console.log('Connected to Mysql. Exporting..'.blue);
d.resolve(MySQLConnection.connection);
}
});
return d.promise;
};
module.exports = MySQLConnection;
mysqlAPI.js
var colors = require('colors');
var mysqlCon = require('./mysql-con.js');
mysqlCon.connect().then(function(con){
console.log('connected!');
mysql = con;
mysql.on('error', function (err, result) {
console.log('error occurred. Reconneting...'.purple);
mysqlAPI.reconnect();
});
mysql.query('SELECT 1 + 1 AS solution', function (err, results) {
if(err) console.log('err',err);
console.log('Works bro ',results);
});
});
mysqlAPI.reconnect = function(){
mysqlCon.connect().then(function(con){
console.log("connected. getting new reference");
mysql = con;
mysql.on('error', function (err, result) {
mysqlAPI.reconnect();
});
}, function (error) {
console.log("try again");
setTimeout(mysqlAPI.reconnect, 2000);
});
};
I hope this helps.
inplace of connection.connect(); use -
if(!connection._connectCalled )
{
connection.connect();
}
if it is already called then connection._connectCalled =true,
& it will not execute connection.connect();
note - don't use connection.end();
SOLUTION: to prevent this error(for AWS LAMBDA):
In order to exit of "Nodejs event Loop" you must end the connection, and then reconnect. Add the next code to invoke the callback:
connection.end( function(err) {
if (err) {console.log("Error ending the connection:",err);}
// reconnect in order to prevent the"Cannot enqueue Handshake after invoking quit"
connection = mysql.createConnection({
host : 'rds.host',
port : 3306,
user : 'user',
password : 'password',
database : 'target database'
});
callback(null, {
statusCode: 200,
body: response,
});
});
If you're trying to get a lambda, I found that ending the handler with context.done() got the lambda to finish. Before adding that 1 line, It would just run and run until it timed out.
You can use
debug: false,
Example:
//mysql connection
var dbcon1 = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "node5",
debug: false,
});
A little digging showed that I wasn't closing the connection at all.
So added this code before I opened up connection and when I was done with database manipulation
connection.end()
connection = mysql.createConnection(
// database connection details
)
connection.connect(function (err) {
if (!err) {
console.log("Connected!");
var sql = `Select something from my_heart;`
connection.query(sql, function (err, result) {
if (!err) {
console.log("1 record inserted");
res.send("Recieved")
} else {
console.log(err.sqlMessage)
res.send("error")
}
});
}
})
Just use connection.connect() once outside of module.exports. It should be connect() once when node server is initialised, not in every request.
You can do this in this way :--
const connection = sql.createConnection({
host: "****",
user: "****",
password: "*****",
database: "****"
})
connection.connect((error) => {
if( error ) throw new Error(error)
})
module.exports = {
getDataFromUserGps: function(callback)
{
connection.query("SELECT * FROM usergps",
function(err, results, fields) {
if (err) return callback(err, null);
return callback(null, results);
}
);
},
****
****
****
}