My goal is to have integration test that tests a Node Module which saves values in the redis. Here is the test:
require('coffee-script');
var should = require('should')
, redis = require('redis')
, async = require('async')
, Domain = require('../index');
async.series([
function(callback){
db = redis.createClient();
callback(null,'zero');
},
function(callback){
db.flushdb( function (err, didSucceed) {
if(err){
console.log('error: ' + err.message);
callback(err, 'one');
}
});
callback(null, 'one');
},
function(callback){
should.exist(Domain);
domain = new Domain({'attOne':1, 'attTwo':2, 'id':1});
domain.should.have.property('save');
domain.should.have.property('attOne');
domain.should.have.property('attTwo');
domain.save.should.be.an.instanceof(Function);
callback(null, 'two');
},
function(){
db.end();
}
],
function(err, results){
if(err){
console.log('error encountered: ' + err.message);
db.end();
}
console.log('All tests passed');
}
);
With this test, the problem is that the redis connection is closed before the redis flushes the the database(If i remove the db.end() at all, the test hangs but the redis db is flushed). Apparently, the async.series is not working as i expect it where every function is run before the next one in a serial order or i am misunderstanding? how to ensure this test executes in serial flow so redis connection is not closed before the flush? ...any recommendations of any libraries/frameworks that could help me on what i am trying to accomplish here? Thank You
To end the connection and stop the hanging, use client.quit() or client.end() at proper place.
For example, put this at the end of the codes:
//...
setTimeout(function(){ client.quit(); }, 3000);
// 3000 means wait 3 seconds and quit the redis connection
client.unref() is another experimental function can do the job.
Refers to : doc
well, the issue is that the callback in the redis hash commands(i.e. flushdb, hgetall ) doesn't stop the particular async process making it hang. I am not sure whether the 'async' module isn't it stoping properly or the 'redis' module has something different that prohibits 'async' to close that process....if you know the answer, please, share.
The solution was 'mocha' test framework. After installing(i.e npm install mocha) and then running 'mocha test/mytest.js', it would exit without any changes to the above code...not even formatting according to Mocha framework, it runs the test and exits nicely. At the same time, it would hang if run with 'node test/mytest.js'....the mystery of why so still remains:)
I hope this helps...It's time for Mocha now:)
Related
I'm working with mongojs and writing tests for mocha running coverage with istanbul. My issue is that I would like to include testing db errors.
var mongojs = require('mongojs');
var db = mongojs.connect(/* connection string */);
var collection = db.collection('test');
...
rpc.register('calendar.create', function(/*... */) {
collection.update({...}, {...}, function (err, data) {
if (err) {
// this code should be tested
return;
}
// all is good, this is usually covered
});
});
the test looks like this
it("should gracefully fail", function (done) {
/* trigger db error by some means here */
invoke("calendar.create", function (err, data) {
if (err) {
// check that the error is what we expect
return done();
}
done(new Error('No expected error in db command.'));
});
});
There is a fairly complex setup script that sets up the integration testing environment. The current solution is to disconnect the database using db.close() and run the test resulting in an error as wanted. The problem with this solution arises when all the other tests after that require the database connection fail, as I try to reconnect without success.
Any ideas on how to solve this neatly? Preferably without writing custom errors that might not be raised by next version of mongojs. Or is there a better way of structuring the tests?
What about mocking the library that deals with mongo?
For example, assuming db.update is eventually the function that gets called by collection.update you might want to do something like
describe('error handling', function() {
beforeEach(function() {
sinon.stub(db, 'update').yields('error');
});
afterEach(function() {
// db.update will just error for the scope of this test
db.update.restore();
});
it('is handled correctly', function() {
// 1) call your function
// 2) expect that the error is logged, dealt with or
// whatever is appropriate for your domain here
});
});
I've used Sinon which is
Standalone test spies, stubs and mocks for JavaScript. No dependencies, works with any unit testing framework.
Does this make sense?
I'm doing some integration testing in my Node app and at some point in my code I call the following function:
async.parallel([
function foo(callback){
setTimeout(function(){
//DO SOMETHING HERE;
callback(null, result);
}, 500);
},
function bar(callback){
//DO SOMETHING HERE;
callback(null, result);
}],
function(err, results){
//Process results here and continue
});
This code is part of a larger node app.
Now when I try to test my code with Mocha the test hangs because the timeout in foo() is never fired and therefore the parallel execution never finishes. When I remove the setTimeout the execution is completed just fine.
Here's the test code:
it("test something", function(done) {
request(app)
.post(requestUrl)
.send(testRequest)
.expect(200)
.end(function(err, res){
(res.body.text).should.equal('Hello World');
done();
});
});
What I tried:
Apparently, the clock is disbaled during testing so I tried using Sinon to simulate the passing of time but to no avail.
How can I solve this?
Well I had a similar problem today.
I was using var clock = sinon.useFakeTimers() without clock.restore().
To try that, you can use:
console.log(new Date().getTime());
console.log(new Date().getTime());
If you get twice the same value, it means that you may have change the clock.
I do the clean-up in an after() call before any other describe. If all tests pass, the clean-up will do the job. But if any test fails, the clean-up code will receive an err: [Error: no open connections].
I think the assertion in the callback of mongodb throws an error cause the connection closed.
That make me confusing:
First, I think the callback of mongodb is the right place to put some assertions;
Second, the assertions will throw error when failed, and cause connection closes;
Finally, the clean-up will failed due to connection closed.
So, what else should I do to make clean-up to do its job even the assertion failed?
I have made a sample code below:
var mongo = require('mongoskin')
, should = require('should')
;
describe('mongo', function() {
var db;
before(function() {
console.log('before');
db = mongo.db('devstack.local:27017/test')
});
after(function(done) {
console.log('after');
db.dropDatabase(function(err) {
should.not.exist(err);// [Error: no open connections]
db.close(done);
});
});
describe('close', function() {
it('should count!=0', function(done) {
db.collection('empty').count(function(err, count) {
count.should.not.equal(0); // use an empty collection to make sure this fail
done();
});
})
})
})
Here's an hypothesis: the connection never happens.
When I run your test suite with:
db = mongo.db('nonexistent:3333/test')
instead of the address you have, I can completely reproduce your error. Note that:
count.should.not.equal(0); fails because count is undefined, not because any of the framework defined by the should module is called.
If I transform the test so that it checks err :
it('should count!=0', function(done) {
db.collection('empty').count(function(err, count) {
should.not.exist(err); // <<< This is where it fails now!
count.should.not.equal(0); // use an empty collection to make sure this fail
done();
});
});
Then the test fails at should.not.exist(err) and err is:
[Error: failed to connect to [nonexistent:3333]]
A couple of thoughts:
Always check err in your callbacks.
In the before callback which establishes the database connection, perform at least one operation which is guaranteed to fail if the connection is not established. You'd want an operation which is as inexpensive to perform as possible. I don't know Mongo very well but this seems to do the trick:
before(function (done) {
db = mongo.db(<put address here>, {safe: true});
db.open(function (err) {
should.not.exist(err);
done();
});
});
This way Mocha will detect the failure right away.
I am using connection pool in pg node js module.
As I understand when you are using connection pool, your connections created for you, and stored, when you need it, some method evoked, and you get already instantiated connection.
Now about pg:
Simple code
pg.connect(conString, function (err, client, done) {
if (err) {
callback(err);
} else {
client.query('SELECT * FROM users', function (err, result) {
done();
if (err) callback(err);
callback(null, result.rows);
});
}
});
When I call pg.connect method, I assume that, I get already instantiated connection from pool, when I call done I return my connection to the pool.
Now I want to debug my application, I can forgot to call done or do it not efficient way.
How can I print number of busy connections? Or another way to debug approach. Thank you in advance.
I had some problem, when request to my route hanging for 30 seconds, I suppose that it is, because I didn't return my connection to the pool.
From pg module documentation pages
var pg = require('pg');
pg.connect(function(err, client, done) {
var pool = pg.pools.getOrCreate();
console.log(pool.getPoolSize()); //1
console.log(pool.availableObjectsCount()); //0
done();
console.log(pool.getPoolSize()); //1
console.log(pool.availableObjectsCount()); //1
});
Hope this helps...
I have a one-shot Node script that makes some changes to a MongoDB database on MongoLab. However, once it finishes, it never exits the event loop (I always have to ctrl+C it), no matter how much db.close() and db.logout() calling I do.
What's strange is, if I start a local running instance of mongod and connect to that, the script finishes fine, but the remote connection just never ends.
Here is a short version of my script that still has the issue (taking the URL to the server on the command line). What's going on?
var mongodb = require("mongodb");
function onSuccess(cb){
return function(err) {
if (err) {
console.error(err)
} else {
cb.apply(this,Array.prototype.slice.call(arguments,1))
}
}
}
console.log("Connecting to "+process.argv[2]+' ...');
mongodb.MongoClient.connect(process.argv[2],onSuccess(function(db){
console.log("Connected.");
db.logout(onSuccess(function(logoutResult){
db.close(onSuccess(function(closeResult){
console.log("All finished. Can has prompt return nao?")
}));
}));
}));
Just tried the code with driver version 1.2.7/1.2.8 and the newest 1.2.9 against mongolab and it works correctly. So more likely its a weird combination of driver/os/node version that's causing this. I suggest upgrade your node and driver to the latest version and try again.
I suspect it has to do with the way you have defined your closures but I cannot quite put my finger on it.
For what is worth, below is the approach that I use and this does close the connection as expected:
MongoClient.connect(dbUrl, function(err, db) {
if(err) return callback(err);
var collection = db.collection(dbCollection);
collection.find().toArray(function(err, items){
db.close()
if(err) return callback(err);
callback(null, items);
});
});
You can find a full example here: https://github.com/hectorcorrea/mongoDbSample