configuration.js
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host : 'localhost',
user : 'root',
password : '',
database : 'test'
});
pool.getConnection( function(err, connection) {
if (err) console.error('error connecting: ' + err.stack);
return;
});
module.exports = pool;
user.js model file
var async = require("async");
var homePage = function HomePage( user_id ) {
this.user_id = user_id;
this.db = require('../config/configuration');
}
homePage.prototype.getCourse = function( callback ){
var self = this;
self.db.query(' my sql query ', [ self.user_id ], function (error, results, fields) {
self.db.release();
callback(err,results);
});
};
I have exported my db connection here and I require that module(configuration.js) in every model file. I assume that it will load the same db connection everytime as from the Node.js docs:
every call to require('foo') will get exactly the same object returned, if it would resolve to the same file.
As this is the first time I am coding in JS and asynchronous programming I am confused about few things. Will my same configuration file be loaded as I want to have a singleton kinda pattern for my db file. Is my approach correct ?
When I do - self.db.release() I assume I am releasing a connection back to the pool. But it's giving an error and I am unable to figure it out as everything else is working fine.The error is -
self.db.release() is not a function
If I comment this line it's working fine. Is there a way to see what this release is returning as it will help in debugging.
Moreover is my approach correct.
Related
I have built an event api using php with use of cassandra and phpcassa.
Recently, I authored a node.js + helenus replacement of the same api. After I finished, I started to benchmark the node.js code with ab tool. However, after 1000+ inserts on cassandra, the connection can be lost and failover code is running. I'm inserting async to cassandra after each request occur.
I am instantiating ConnectionPool object when the node app starts. However, I use pool.connect() function in each request.
Does pool.connect() starts a new connection or binds the existing connection from ConnectionPool object ?
Notes:
In phpcassa, I didn't have such problems probably because I'm inserting to cassandra sync.
I'm using 5 cassandra nodes
Yes, pool.connect() creates a new connection to the pool everytime it's called, so you usually only have to call it one time when the application is started, and pass that connection to any module or method that needs it.
I usually use a module to do this
module.exports = function(app, cb) {
var domain = require('domain'),
d = domain.create(),
helenus = require('helenus'),
d.run(function() {
var pool = new helenus.ConnectionPool({
hosts : ['localhost:9160'],
user : "",
password : "",
keyspace : 'something',
cqlVersion : '3.0.0'
});
pool.connect(function(err, keyspace){
if(err){
cb(err, null);
} else {
cb(null, pool);
}
});
pool.on('error', function(err) {
cb(err, null);
});
});
d.on('error', function(err) {
console.log('error', err.stack);
cb(err, null);
});
}
and call it my app with something like this (really simplified)
var express = require('express');
var app = this.express();
var database = require('db');
database(app, function(err, conn) {
if (err==null) {
// connected, do stuff
conn.cql("SELECT * FROM table WHERE KEY = ?", [what], function(err, result) {
if (err==null) {
// get result
}
});
}
});
I've used this with millions of records, with continuos inserts and a large number of lookups, without any issue, Cassandra performs better for me than any other DB I've tried.
Context
If I have say 2 classes e.g. User.js class and Log.js class which both access a database in their own unique methods, and I have installed a MySQL database module e.g. db-mysql.
Question
How can I make it so the same (one) database instance can be used across both JS files?
Thoughts
The two methods I can think of at the moment are not very memory conscious I guess:
pass db parameter in
function(db){
this.db = db;
}
create an instance of it inside every class
function(){
this.db = require(moduleName);
}
I’m just looking for the best way and need a bit of guidance.
Create a separate file where you connect to db. You keep that connection in that modules' closure, and when you later require that module, from any other file, it will use that same connection.
The simple example is something like this: lib/db.js
var mysql = require('db-mysql');
var settings = { // import these from ENV is a good pattern
hostname: 'localhost'
, user: 'user'
, pw: '****'
, database: 'base'
}
var db =new mysql.Database(settings).on('error', function(error) {
console.log('ERROR: ' + error);
}).on('ready', function(server) {
console.log('Connected to ' + server.hostname + ' (' + server.version + ')');
}).connect();
module.exports = db;
Then use this in other files:
var db = require('../lib/db');
db.query(...)
You can even abstract some basic queries in the db.js, something like:
function getUsers(filterString, callback) {
db.query().select('id, username, email').from('users').where('username like', filterString)
.execute(callback);
}
module.exports.getUsers = getUsers
Then in other files:
var db = require('lib/db');
db.getUsers('mike', function(err, rows, cols) {
if(err) throw err;
return [rows, cols];
});
Pass the DB Parameter in.
Also create a new JS file called DBConfig.js where you can store the credentials for the MySQL DB. Use this javascript object to initiate your db. For Ex:
var db = require('DBConfig);
Inside your DBConfig.js, you can write
module.exports = {
host:'<host_url>',
username: root,
password: '',
database: '<database-name>'
}
In this manner you can use the same config accross the JS files.
My problem is that I can't retrieve data from my mongodb database... And I don't know why.
I probably do something wrong, here is a little samble which doesn't work.
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('akemichat', new Server('localhost', 27017), {w:1});
db.open(function (err, p_db) {
db = p_db;
});
db.collection('rooms', function (err, collection) {
if (!err) {
collection.find().toArray(function(err, items) {
items.forEach(function(room) {
console.log('hello'); // Never call...
});
});
} else {
console.log(err);
}
});
Notice that I have data in my database as shows the following
➜ akemichat git:(master) ✗ mongo
MongoDB shell version: 2.4.7
connecting to: test
> use akemichat
switched to db akemichat
> db.rooms.find()
{ "name" : "home", "_id" : ObjectId("527008e850305d1b7d000001") }
Thanks for help !
Notice: the example program never ends, I don't know why... Maybe because the connection is never closed but if I call the db.close() in the toArray callback, It will never be called because the callback never happends.
So many things in node are asynchronous. Your connection is open after you are trying to read from your collection.
You should query the collection after you know for sure you are connect. Down and dirty:
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('akemichat', new Server('localhost', 27017), {w:1});
db.open(function (err, p_db) {
db = p_db;
db.collection('rooms', function (err, collection) {
if (!err) {
collection.find().toArray(function(err, items) {
items.forEach(function(room) {
console.log('hello'); // Never call...
});
});
} else {
console.log(err);
}
});
});
I ran this locally and received back the "hello" message. Also your script never finishes because the node process will run until it is closed or crashes. This is by design. Which also means that you don't have to keep opening and closing your mongo connections. You can open a connection when your application starts and close it when your application is shut down.
I am trying to figure out the best way to pass a mysql connection (using node-mysql) between my routes for express.js. I am dynamically adding each route (using a for each file loop in routes), meaning I can't just pass in the connection to routes that need it. I either need to pass it to every route or none at all. I didn't like the idea of passing it to ones that dont need it so I created a dbConnection.js that the routes can individually import if they need. The problem is that I dont think I am doing it correctly. As of now, my dbConnection.js contains:
var mysql = require('mysql');
var db = null;
module.exports = function () {
if(!db) {
db = mysql.createConnection({
socketPath: '/tmp/mysql.sock',
user: '*********',
password: '*********',
database: '**********'
});
}
return db;
};
And I am importing it into each route using:
var db = require('../dbConnection.js');
var connection = new db();
But I would like to do it like this:
var connection = require('../dbConnection.js');
When I try it like this, however, I get an error saying connection has no method 'query' when I try to make a query.
I find it more reliable to use node-mysql's pool object. Here's how I set mine up. I use environment variable for database information. Keeps it out of the repo.
database.js
var mysql = require('mysql');
var pool = mysql.createPool({
host: process.env.MYSQL_HOST,
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASS,
database: process.env.MYSQL_DB,
connectionLimit: 10,
supportBigNumbers: true
});
// Get records from a city
exports.getRecords = function(city, callback) {
var sql = "SELECT name FROM users WHERE city=?";
// get a connection from the pool
pool.getConnection(function(err, connection) {
if(err) { console.log(err); callback(true); return; }
// make the query
connection.query(sql, [city], function(err, results) {
connection.release();
if(err) { console.log(err); callback(true); return; }
callback(false, results);
});
});
};
Route
var db = require('../database');
exports.GET = function(req, res) {
db.getRecords("San Francisco", function(err, results) {
if(err) { res.send(500,"Server Error"); return;
// Respond with results as JSON
res.send(results);
});
};
your solution will work if use db() instead of new db(), which returns an object and not the db connection
var db = require('../dbConnection.js');
//var connection = new db();
var connection = db();
I am new to Mocha, and only a little experience with Node/Express. My DbProvider module works perfectly (mongodb) when I am access it through my Express app. And now I want to test it. I have read the Mocha site and some tutorials I could find. But I have big trouble of finding an real-world example out there that I could follow (any links much appreciated!).
Here is my unsuccessful attempt to write a testfile:
var DbProvider = require('../db').DbProvider;
var assert = require('assert');
var dbProvider = new DbProvider('localhost', 27017, 'mydb');
var util = require('util');
console.log(util.inspect(dbProvider));
describe('DbProvider', function(){
describe('findAllNotes', function(){
it('should return some notes', function(){
dbProvider.findAllNotes({}, function (err, result){
assert(result.length > 0);
});
})
})
})
The output I get is this:
$ mocha
{}
✖ 1 of 1 test failed:
1) DbProvider findAllNotes should return some notes:
TypeError: Cannot call method 'collection' of undefined
at DbProvider.doOperation (/Users/frode/Node/json/db.js:46:11)
at DbProvider.findAllNotes (/Users/frode/Node/json/db.js:56:8)
at Context.<anonymous> (/Users/frode/Node/json/test/test.js:15:18)
(cutting out the rest)
It seems that I am unsuccessful to create the dbProvider. This works perfectly in my app... How can I make this work? (And perhaps also: Is the way I have set it up in general ok?)
Edit: Here is the db.js file:
// Database related
'use strict';
var MongoClient = require('mongodb').MongoClient;
var BSON = require('mongodb').BSONPure;
var ObjectID = require('mongodb').ObjectID;
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
var Validator = require('validator').Validator
var fieldMaxLength = 1024;
//var util = require('util');
var DbProvider = function(host, port, database) {
var dbUrl = "mongodb://"+host+":"+port+"/"+database;
var self = this;
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
});
};
// Do some basic validation on the data we get from the client/user
var validateParams = function(params, callback) {
// Let´ do a quick general sanity check on the length on all fields
for(var key in params) {
if(params[key].length > fieldMaxLength) callback(new Error('Field ' + key + ' is too long.'));
}
// and the let us check some specific fields better
if (params._id) {
if(checkForHexRegExp.test(params._id)) {
// In case of '_id' we also need to convert it to BSON so that mongodb can use it.
params._id = new BSON.ObjectID(params._id);
} else {
var err = {error: 'Wrong ID format'};
}
}
if(err) callback(err);
}
// Generalized function to operations on the database
// Todo: Generalize even more when user authenication is implemented
DbProvider.prototype.doOperation = function(collection, operation, params, callback) {
validateParams(params, callback);
var operationCallback = function(err, result) {
callback(err, result);
};
this.db.collection(collection, function(err, collection) {
if(operation==='find') {
collection.find().toArray(operationCallback);
} else {
collection[operation](params, operationCallback);
}
});
}
DbProvider.prototype.findAllNotes = function(params, callback) {
this.doOperation('notes', 'find', params, callback);
};
DbProvider.prototype.findNoteById = function(params, callback) {
this.doOperation('notes', 'findOne', params, callback);
};
DbProvider.prototype.saveNote = function(params, callback) {
params.created_at = new Date();
this.doOperation('notes', 'save', params, callback);
};
DbProvider.prototype.deleteNote = function(params, callback) {
this.doOperation('notes', 'remove', params, callback);
};
DbProvider.prototype.findUser = function(params, callback) {
this.doOperation('users', 'findOne', params, callback);
};
exports.DbProvider = DbProvider;
SOLUTION:
After Benjamin told me to handle the async nature of mongodb connecting to the database, and inspired by his suggestion on how to adapt the code, I split the constructor function DbProvider into two parts. The first part, the constructor DbProvider now just saves the db-parameters into a variable. The second part, a new function, DbProvider.connect does the actual async connection. See below.
var DbProvider = function(host, port, database) {
this.dbUrl = "mongodb://"+host+":"+port+"/"+database;
};
DbProvider.prototype.connect = function(callback) {
var self = this;
MongoClient.connect(this.dbUrl, function(err, db) {
self.db = db;
callback();
});
};
So I can now make a Mocha test like this (and async tests also need the "Done" included, like you see in the code below):
var assert = require('assert');
var DbProvider = require('../db').DbProvider;
var dbProvider = new DbProvider('localhost', 27017, 'nki');
describe('DbProvider', function(){
describe('findAllNotes', function(){
it('should return some notes', function(done){
dbProvider.connect(function(){
dbProvider.findAllNotes({}, function (err, result){
assert(result.length > 0);
done();
});
});
})
})
})
Note that the acutal test ("should return some notes") is nothing to be proud of. What I wanted here was to get set up so I am able to test something. Now that I finally acutally can do that, I need to write good tests (something in the the line of having a test database, clear it, test insert a document, test search for a document, and so on...).
And in my Express app, I used to set up the database like this:
var DbProvider = require('./db').DbProvider;
// Setup db instance
var dbProvider = new DbProvider(
process.env.mongo_host || 'localhost',
process.env.mongo_port || 27017,
process.env.mongo_db || 'nki'
);
Now I do the same, but in addition, I call the new connect-function:
// Connect to db. I use (for now) 1 connection for the lifetime of this app.
// And I do not use a callback when connecting here (we do in the testing)
dbProvider.connect(function(){});
Benjamin actually pointed out that it may be ok but not the best practice to have the database set up like this in an Express app. But until I figure out what the best practice really is, I will leave this code as it is. Here is a couple of links reagarding the subject I found (but I have still not concluded of how I will solve it myself):
What's the best practice for MongoDB connections on Node.js? and
[node-mongodb-native] MongoDB Best practices for beginner
If you like, you are very welcome to follow/fork/whatever this project on github. My goal is to get it as production ready I can. The link is
https://github.com/frodefi/node-mongodb-json-server
MongoClient.connect is asynchronous.
From the docs:
callback (function) – this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the initialized db object or null if an error occured.
That means DbProvider.db isn't set yet in the test which is why you're getting undefined.
In here:
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
});
You're telling it "update self.db after the connection happened", which is at least one event loop tick after this one (but may be more). In your mocha code you're executing your .describe and .it methods right after creating your DbProvider instance which means it was not initialized yet.
I suggest that you re-factor DbProvider to return a callback instead of being a constructor function. Maybe something along the lines of:
var getDbProvider = function(host, port, database,callback) {
var dbUrl = "mongodb://"+host+":"+port+"/"+database;
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
callback(db);
});
};
Which also means moving all the DBProvider methods to an object (maybe the callback will return a dbprovider object and not just a db?).
Another bug solved by using Unit Tests :)
This is what I used: https://github.com/arunoda/mocha-mongo
It has set of testing helpers for mongodb