I'm new to testing in node.js and I would like to mock the return of a specific function call in a process that looks like the following.
doSomething(function(err, res){
callAnotherOne(res, function(err, result){
getDataFromDB(result, function(err, docs){
//some logic over the docs here
})
})
})
The function that I want to mock is the getDataFromDB() and specifically the documents (using MongoDB) that it returns.
How could I do something like this with mocha?
Part of the code, strip from the logic in between, is the following:
filterTweets(item, input, function(err, item) {
//Some filtering and logging here
db.getTwitterReplies(item, function(err, result) {
if(err) {
return callback('Failed to retrieve tweet replies');
}
//Do some work here on the item using the result (tweet replies)
/***** Here I want to test that the result is the expected ****/
db.storeTweets(item function (err, result){
//error checks, logging
callback();
});
});
});
Based on the amount of twitter replies (function call "getTwitterReplies"), I will modify my object accordingly (didn't include that code). I want to see if based on different replies result, my object is constructed as expected.
p.s. I also checked into sinon.js after some searching and I managed to mock the return of a callback (by writing some testing code outside my project) but not the return of a callback of a nested function call.
Here's how I would approach this category of problem:
First create a "config.js" that wraps the dependencies that you'd like to inject. This will become your container.
var db = {
doSomeDbWork : function(callback){
callback("db data");
}
};
module.exports = {
db: db
};
From there, you can call config dependencies like so:
var config = require('./index/config');
config.db.doSomeDbWork(function(data){
res.render('index', { title: 'Express' , data:data});
});
And in your tests, inject a mock/spy easily:
var config = require('../routes/index/config');
config.db = {
doSomeDbWork : function(callback){
callback("fake db data");
}
};
var indexRouter = require('../routes/index');
indexRouter.get('/');
Because the require call refers to the same config module exports, the changes made to the config in the spec will be reflected where ever they are imported via require()
Related
I am a nodejs beginner and I ran into some callback behaviour that I don't understand. I am using an Express router to write Mongoose objects to Mongodb using a POST request. In the body of the request I am passing in a nested json structure with two fields - jobDetails and examples. The data in jobDetails is used to create a Job Mongoose object and the data in examples is used to create several Example Mongoose objects. The Job and Example objects are linked in that a Job contains a list of Example objects in one of it's fields.
The way I tried to implement this was with callbacks in the following way. Basically I save the Job object to mongo first, then iterate over the examples - each time creating an Example object and linking it to the job via the .job field and also saving the Example object to mongo. Then in the callback to the Example object save function I updated the Job object with the new Example object and saved the updated version to mongo.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
job.examples.push(eg);
job.save(function(err, job){
console.log(err);
});
console.log(err);
});
}
});
This did not perform as I would expect. Specifically, double the number of examples were actually saved to mongo with several duplicates and some missing. I understand that callbacks are asynchronous but to me that still doesn't explain why double the number of examples would be saved and some would be duplicated and some would be missing.
I eventually got it to work correctly without using callbacks at all in the following way.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
console.log(err);
});
job.examples.push(eg);
job.save(function(err,job){
console.log(err);
});
}
});
And I'm not sure if this is the optimal solution either. But I would like to know why my original approach resulted in the unintended behaviour.
This should work..
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function(err, result) {
if (!err) {
//save examples
for (i = 0; i < examples.length; i++) {
var eg = new Example({
content: examples[i]
});
eg.job = job;
eg.save(function(err, eg) {
job.examples.push(eg);
job.save(function(err, job) {
if (!err)
job.examples = [];
});
console.log(err);
});
}
}
});
});
I would suggest you use a library like async to perform such save operations in a step by step manner. Follow this approach for better readability of code and better results
var async = require('async');
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
var savedExamples = [];
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
async.eachSeries(examples, function iteratee(example, callback) {
var eg = new Example({
content: example
});
eg.job = job;
eg.save(function(err, savedEg) {
if(!err) {
savedExamples.push(savedEg);
}
callback(err)
});
}, function(err) {
if(err) {
//handle errors
}
job.examples = savedExamples;
job.save(function(err,job) {
if(err) {
//handle errors
}
//success callback
});
});
});
Using this approach you will have to call the save function for job only once after all other operations are completed. If an error is triggered at any point the whole flow is stopped. For more info regarding async library refer this!
I'm making a CRUD List App using Mongo/Express and having callback trouble!
I have a routes file with my HTTP methods and a services file that pulls in my mongoose model, Item.
Because this is from a class I'm taking I already have a GET and POST that are running correctly, but having a total headache trying to get my DELETE working.
Routes.js
router.post('/items', function(req, res) {
Item.save(req.body.name, function(item) {
res.status(201).json(item);
}, function(err) {
res.status(400).json(err);
});
});
router.delete('/items/:id'), function(req, res) {
var ref = req.params.id;
Item.remove(ref, function(err, item){
res.json(item);
})
};
Services.js
exports.save = function(name, callback, errback) {
Item.create({ name: name }, function(err, item) {
if (err) {
errback(err);
return;
}
callback(item);
});
};
exports.remove = function(id, callback){
Item.findByIdAndRemove(id, callback())
};
Obviously my delete/remove is still pretty CRUD.... Included the POST for reference. Help would be amazing as attempts at translating existing examples into my architecture have been total fails!
Your punctual problem is that you execute the callback instead of passing it into the mongoose method in the remove function:
exports.remove = function(id, callback){
Item.findByIdAndRemove(id, callback())
};
should be, so that mongoose can call it:
exports.remove = function(id, callback){
Item.findByIdAndRemove(id, callback)
};
I wonder why you feel it necessary to wrap the the mongoose methods findByIdAndRemove, and create with your own and to put them in a service? The new methods don't seem to add any value.
If you want to do things before or after deleting the model, it would be more idiomatic to trap the mongoose model life-cycle methods using middleware: http://mongoosejs.com/docs/middleware.html.
I have following code to fetch some data from the db (mongo).
function getAllUsers(){
var UsersPromise = Q.defer();
UserSchema.find({}, function(err, data){
if(err){
UsersPromise .reject(err);
}else {
UsersPromise .resolve(data);
}
});
return UsersPromise .promise;
}
Then I modify each of these models. I add certain fields to the model depending on the type of user. (This is working correctly).
function buildUsers(users){
// my code iterates over users and adds
// properties as required.
// Working fine.
return users; // updated users.
}
Now I want to save these updated models back to mongo and this is where it's making me pull my hair.
function saveUsers(users){
// here, the users are received correctly. But the following line to save the users fails.
var SaveUsersPromise = Q.defer();
UserSchema.save(users, function(err, data){
if(err){
SaveUsersPromise .reject(err);
} else {
SaveUsersPromise .resolve(data);
}
});
return SaveUsersPromise .promise;
}
Lastly I call these functions like:
DB.connect()
.then(getAllUsers)
.then(buildUsers)
.then(saveUsers)
.catch(errorHandler);
Everything works correctly untill I call UserSchema.save. What could be the problem?
PS: I am using mongoose.
TIA.
UserSchema.save accepts single instance, you have to loop through users and save each. Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Here's simple implementation using async.eachSeries
function saveUsers(users){
var async = require('async'); // <== npm install async --save
var SaveUsersPromise = Q.defer();
async.eachSeries(users, function(user, done){
UserSchema.save(user, done);
// or
user.save(done); // if user is Mongoose-document object
}, function(err){
if(err){
SaveUsersPromise.reject(err);
} else {
SaveUsersPromise.resolve();
}
});
return SaveUsersPromise.promise;
}
I am trying to developpe an API with NodeJs which accepts an object containing multiple queries to mongdb and answers an object with the different results (in fact Json).
I use express and my code is :
var nb_query=0;
var results;
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
nb_query=Object.keys(queries).length;
results={};
for(var nq in queries) { // for each query
do_find_query(nq,queries[nq], function() {
//todo : managing head
res.end(JSON.stringify(results));
}
);
}
} // end of api function
function do_find_query (name_query,query,callback) {
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err;
results[name_query]=docs;
nb_query--;
if(nb_query==0)
callback();
}
);
}
As you see, I use global vars to store the results and the counter nb_query. And I ask myself if it is a problem or not (now no because I am alone on the server, but when we will be thousands of billions? :-) ).
As I understand Node, there is only one thread and I think Node will finalize a started job unless he encoutered an io access. In this case, he stacks the io with the callback, and begins to answer to a new request.
If this is correct, I think that Node could answer to 2 or more different calls to my api (which need mongo calls) and so store different values in global vars which is shared (there's only one thread).
If this is right, I would also know what is the best way to change it.
I have the idea of declaring results and nb_query in api function and pass it to do_find_query, but nb_query isn't an object and is so not changed correctly.
I know I can put nb_query in an object to pass it 'by reference', but I want to know first if it is necessary and if it is a good way or if there is a better one.
Thanks for your help !
Doom.
------------------------------------------------------------------------------
EDIT :
I have change my code and it seems to work without global vars and without async library (which is for me using a hammer to swat a fly)
//api
app.get("/api/:p",api);
function api(req, res) {
var jsonq=decodeURIComponent(req.params.p);
//console.log(jsonq);
var queries=JSON.parse(jsonq);
var query_names=Object.keys(queries);
var results={};
var query_left=query_names.length;
query_names.map( function(query_name) {
var query=queries[query_name];
var collection=fdb.collection(query.collection);
collection.find(query.find,query.fields,query.options).toArray(function(err,docs) {
if(err) throw err; //todo : handle errors in a better way
results[query_name]=docs;
if(--query_left==0)
res.json(results);
}
);
}
);
}
But I still do not know if this is necessary to do or not. (I think so but I am new in Node so ...)
Thanks to mscdex as his answer make me known res.json() and help me understand scope of variable.
Instead of using globals, try this (uses the async module):
var async = require('async');
// ...
app.get('/api/:p', api);
function api(req, res) {
var jsonq = decodeURIComponent(req.params.p),
queries = JSON.parse(jsonq),
keys = Object.keys(queries),
queriesLeft = keys.length,
results = {};
async.each(keys, function(name, cb) {
var query = queries[name],
collection = fdb.collection(query.collection);
collection.find(query.find, query.fields, query.options)
.toArray(function(err, docs) {
if (err) return cb(err);
results[name] = docs;
cb();
}
);
}, function(err) {
if (err) throw err; // TODO: handle better
res.json(results);
});
} // end of api function
For example, I have the code below:
var db = require('./_mongo.js');
module.exports = {
check: function (cb) {
var content = {};
content.collection = 'counters';
content.query = {_id: 'ping'};
content.columns = {};
db.read(content, function(err, result){
if (err) {
cb(-1);
}
else {
cb(0);
}
});
}
};
How do I write a unit test for the 'check' function, without actually accessing the database, while at the same time checking if I am able to code the correct 'content' variable being passed to the read method?
You can mock an entire module with a mock framework, like sinon.js:
var db = sinon.mock(require('_mongo.js'))
I would not recommend to mock database access, it could require you to code all possible responses...
It would be best if you would hide the database access behind an abstracted service layer and mock that layer.
For example, you can create a database access layer in this way:
var db = require('./_mongo.js');
module.exports = {
//this is a mockable method
getCounter: function (id, callback) {
var content = {};
content.collection = 'counters';
content.query = {_id: id};
content.columns = {};
db.read(content, callback);
}
};
//and then using it
module.exports = {
check: function (cb) {
//access the actual method or the mock
da.getCounter('ping', function(err, result){
if (err) {
cb(-1);
}
else {
cb(0);
}
});
}
};
test-studio provides mechanisms for stubbing out module dependencies. It also supports things like executing individual or groups of tests and stepping node-inspector into individual tests.
Read more about it here.