I have following code to fetch some data from the db (mongo).
function getAllUsers(){
var UsersPromise = Q.defer();
UserSchema.find({}, function(err, data){
if(err){
UsersPromise .reject(err);
}else {
UsersPromise .resolve(data);
}
});
return UsersPromise .promise;
}
Then I modify each of these models. I add certain fields to the model depending on the type of user. (This is working correctly).
function buildUsers(users){
// my code iterates over users and adds
// properties as required.
// Working fine.
return users; // updated users.
}
Now I want to save these updated models back to mongo and this is where it's making me pull my hair.
function saveUsers(users){
// here, the users are received correctly. But the following line to save the users fails.
var SaveUsersPromise = Q.defer();
UserSchema.save(users, function(err, data){
if(err){
SaveUsersPromise .reject(err);
} else {
SaveUsersPromise .resolve(data);
}
});
return SaveUsersPromise .promise;
}
Lastly I call these functions like:
DB.connect()
.then(getAllUsers)
.then(buildUsers)
.then(saveUsers)
.catch(errorHandler);
Everything works correctly untill I call UserSchema.save. What could be the problem?
PS: I am using mongoose.
TIA.
UserSchema.save accepts single instance, you have to loop through users and save each. Mongoose doesn't have bulk inserts implemented yet (see issue #723).
Here's simple implementation using async.eachSeries
function saveUsers(users){
var async = require('async'); // <== npm install async --save
var SaveUsersPromise = Q.defer();
async.eachSeries(users, function(user, done){
UserSchema.save(user, done);
// or
user.save(done); // if user is Mongoose-document object
}, function(err){
if(err){
SaveUsersPromise.reject(err);
} else {
SaveUsersPromise.resolve();
}
});
return SaveUsersPromise.promise;
}
Related
I am a nodejs beginner and I ran into some callback behaviour that I don't understand. I am using an Express router to write Mongoose objects to Mongodb using a POST request. In the body of the request I am passing in a nested json structure with two fields - jobDetails and examples. The data in jobDetails is used to create a Job Mongoose object and the data in examples is used to create several Example Mongoose objects. The Job and Example objects are linked in that a Job contains a list of Example objects in one of it's fields.
The way I tried to implement this was with callbacks in the following way. Basically I save the Job object to mongo first, then iterate over the examples - each time creating an Example object and linking it to the job via the .job field and also saving the Example object to mongo. Then in the callback to the Example object save function I updated the Job object with the new Example object and saved the updated version to mongo.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
job.examples.push(eg);
job.save(function(err, job){
console.log(err);
});
console.log(err);
});
}
});
This did not perform as I would expect. Specifically, double the number of examples were actually saved to mongo with several duplicates and some missing. I understand that callbacks are asynchronous but to me that still doesn't explain why double the number of examples would be saved and some would be duplicated and some would be missing.
I eventually got it to work correctly without using callbacks at all in the following way.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
console.log(err);
});
job.examples.push(eg);
job.save(function(err,job){
console.log(err);
});
}
});
And I'm not sure if this is the optimal solution either. But I would like to know why my original approach resulted in the unintended behaviour.
This should work..
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function(err, result) {
if (!err) {
//save examples
for (i = 0; i < examples.length; i++) {
var eg = new Example({
content: examples[i]
});
eg.job = job;
eg.save(function(err, eg) {
job.examples.push(eg);
job.save(function(err, job) {
if (!err)
job.examples = [];
});
console.log(err);
});
}
}
});
});
I would suggest you use a library like async to perform such save operations in a step by step manner. Follow this approach for better readability of code and better results
var async = require('async');
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
var savedExamples = [];
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
async.eachSeries(examples, function iteratee(example, callback) {
var eg = new Example({
content: example
});
eg.job = job;
eg.save(function(err, savedEg) {
if(!err) {
savedExamples.push(savedEg);
}
callback(err)
});
}, function(err) {
if(err) {
//handle errors
}
job.examples = savedExamples;
job.save(function(err,job) {
if(err) {
//handle errors
}
//success callback
});
});
});
Using this approach you will have to call the save function for job only once after all other operations are completed. If an error is triggered at any point the whole flow is stopped. For more info regarding async library refer this!
I'm new to testing in node.js and I would like to mock the return of a specific function call in a process that looks like the following.
doSomething(function(err, res){
callAnotherOne(res, function(err, result){
getDataFromDB(result, function(err, docs){
//some logic over the docs here
})
})
})
The function that I want to mock is the getDataFromDB() and specifically the documents (using MongoDB) that it returns.
How could I do something like this with mocha?
Part of the code, strip from the logic in between, is the following:
filterTweets(item, input, function(err, item) {
//Some filtering and logging here
db.getTwitterReplies(item, function(err, result) {
if(err) {
return callback('Failed to retrieve tweet replies');
}
//Do some work here on the item using the result (tweet replies)
/***** Here I want to test that the result is the expected ****/
db.storeTweets(item function (err, result){
//error checks, logging
callback();
});
});
});
Based on the amount of twitter replies (function call "getTwitterReplies"), I will modify my object accordingly (didn't include that code). I want to see if based on different replies result, my object is constructed as expected.
p.s. I also checked into sinon.js after some searching and I managed to mock the return of a callback (by writing some testing code outside my project) but not the return of a callback of a nested function call.
Here's how I would approach this category of problem:
First create a "config.js" that wraps the dependencies that you'd like to inject. This will become your container.
var db = {
doSomeDbWork : function(callback){
callback("db data");
}
};
module.exports = {
db: db
};
From there, you can call config dependencies like so:
var config = require('./index/config');
config.db.doSomeDbWork(function(data){
res.render('index', { title: 'Express' , data:data});
});
And in your tests, inject a mock/spy easily:
var config = require('../routes/index/config');
config.db = {
doSomeDbWork : function(callback){
callback("fake db data");
}
};
var indexRouter = require('../routes/index');
indexRouter.get('/');
Because the require call refers to the same config module exports, the changes made to the config in the spec will be reflected where ever they are imported via require()
I'm building a basic blog in Node.js / Express using MongoDB w/ Mongoose ORM.
I have a pre 'save' hook that I'd like to use to auto-generate a blog/idea slug for me. This works fine and well, except for the part where I want to query to see if there are any other existing posts with the same slug before continuing.
However, it appears that this does not have access to .find or .findOne() and so I keep getting an error.
What's the best way to approach this?
IdeaSchema.pre('save', function(next) {
var idea = this;
function generate_slug(text) {
return text.toLowerCase().replace(/[^\w ]+/g,'').replace(/ +/g,'-').trim();
};
idea.slug = generate_slug(idea.title);
// this has no method 'find'
this.findOne({slug: idea.slug}, function(err, doc) {
console.log(err);
console.log(doc);
});
//console.log(idea);
next();
});
Unfortunately, it's not documented very well (no mention of it in the Document.js API docs), but Documents have access to their models through the constructor field - I use it all the time for logging things from plugins, which gives me access to which model they're attached to.
module.exports = function readonly(schema, options) {
schema.pre('save', function(next) {
console.log(this.constructor.modelName + " is running the pre-save hook.");
// some other code here ...
next();
});
});
For your situation, you should be able to do:
IdeaSchema.pre('save', function(next) {
var idea = this;
function generate_slug(text) {
return text.toLowerCase().replace(/[^\w ]+/g,'').replace(/ +/g,'-').trim();
};
idea.slug = generate_slug(idea.title);
// this now works
this.constructor.findOne({slug: idea.slug}, function(err, doc) {
console.log(err);
console.log(doc);
next(err, doc);
});
//console.log(idea);
});
In this you have got the document, not the model. Method findOne is not present on the document.
If you need the model, you can always retrieve it as is shown here. But more clever would be to just assign the model to a variable at the point of creation.
Then use this variable anywhere you desire. If it is in another file, then use module.exports and require to get it anywhere else in your project.
Something like this:
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
mongoose.connect('mongodb://localhost/dbname', function (err) {
// if we failed to connect, abort
if (err) throw err;
var IdeaSchema = Schema({
...
});
var IdeaModel = mongoose.model('Idea', IdeaSchema);
IdeaSchema.pre('save', function(next) {
var idea = this;
function generate_slug(text) {
return text.toLowerCase().replace(/[^\w ]+/g,'').replace(/ +/g,'-').trim();
};
idea.slug = generate_slug(idea.title);
// this has no method 'find'
IdeaModel.findOne({slug: idea.slug}, function(err, doc) {
console.log(err);
console.log(doc);
});
//console.log(idea);
next();
});
// we connected ok
})
I'm developing an application and need to add many items at a time.
How can I do that with node.js?
This is the npm module for parse.com but there is no method like
insertAll("Foo", [objs...], ...)
I don't want to insert single object every time.
Write a convenience function that interfaces between your application and parse.com. You will have to write the iteration code once (or debug mine)
var async = require('async');
var parseApp = require('node-parse-api').Parse;
var APP_ID = "";
var MASTER_KEY = "";
var parseApp = new Parse(APP_ID, MASTER_KEY);
function insertAll(class, objs, callback){
// create an iterator function(obj,done) that will insert the object
// with an appropriate group and call done() upon completion.
var insertOne =
( function(class){
return function(obj, done){
parseApp.insert(class, obj, function (err, response) {
if(err){ return done(err); }
// maybe do other stuff here before calling done?
var res = JSON.parse(response);
if(!res.objectId){ return done('No object id') };
done(null, res.objectId);
});
};
} )(class);
// async.map calls insertOne with each obj in objs. the callback is executed
// once every iterator function has called back `done(null,data)` or any one
// has called back `done(err)`. use async.mapLimit if throttling is needed
async.map(objs, insertOne, function(err, mapOutput){
// complete
if(err){ return callback(err) };
// no errors
var objectIds = mapOutput;
callback(null, objectIds);
});
};
// Once you've written this and made the function accessible to your other code,
// you only need this outer interface.
insertAll('Foo', [{a:'b'}, {a:'d'}], function(err, ids){
if(err){
console.log('Error inserting all the Foos');
console.log(err);
} else {
console.log('Success!);
};
});
I have a dashboard view ( dashboard.jade ) that will display two panels with different information, all that info should be retrieved from a database and then sent to the view.
Let's say i have a route file ( document.js ) with two actions defined:
exports.getAllDocuments = function(req, res){
doc = db.model('documents', docSchema);
doc.find({}, function(err, documents) {
if (!err) {
// handle success
}
else {
throw err;
}
});
};
exports.getLatestDocumentTags = function(req, res){
tags = db.model('tags', tagSchema);
tags.find({}, function(err, docs) {
if (!err) {
// handle success
}
else {
throw err;
}
});
};
These functions would only serve the porpuse of retrieving data from the database.
Now i would like to send that data to the dashboard view from my dashboard.js route file under exports.index function where i render my dashboard view.
The problem is, since the db calls will be async i wouldn't have access to the data before i could call the view.
I guess i could have an action that simply did all my db calls and through callbacks deliver all the data at once to the view but that would make my data retrieval actions not reusable.
I'm really confused on how to tackle this problem correctly, probably i'm getting this async thing all wrong. Can someone give me some hints on how to do this properly ?
Here's something to pique your interest.
//Check out the async.js library
var async = require('async');
//Set up your models once at program startup, not on each request
//Ideall these would be in separate modules as wel
var Doc = db.model('documents', docSchema);
var Tags = db.model('tags', tagSchema);
function index(req, res, next) {
async.parallel({ //Run every function in this object in parallel
allDocs: async.apply(Doc.find, {}) //gets all documents. async.apply will
//do the equivalent of Doc.find({}, callback) here
latestDocs: async.apply(Tags.find, {})
], function (error, results) { //This function gets called when all parallel jobs are done
//results will be like {
// allDocs: [doc1, doc2]
// latestDocs: [doc3, doc4]
// }
res.render('index', results);
});
}
exports.index = index;
};
Try some more tutorials. If you haven't had the "a ha" moment about how async programming works in node, keep going through guided, hand-held tutorials before trying to write brand new programs without guidance.
//Check out the async.js library and mangoose model
var mongoOp = require("./models/mongo");
var async = require('async');
router.get("/",function(req,res){
var locals = {};
var userId = req.params.userId;
async.parallel([
//Load user Data
function(callback) {
mongoOp.User.find({},function(err,user){
if (err) return callback(err);
locals.user = user;
callback();
});
},
//Load posts Data
function(callback) {
mongoOp.Post.find({},function(err,posts){
if (err) return callback(err);
locals.posts = posts;
callback();
});
}
], function(err) { //This function gets called after the two tasks have called their "task callbacks"
if (err) return next(err); //If an error occurred, we let express handle it by calling the `next` function
//Here `locals` will be an object with `user` and `posts` keys
//Example: `locals = {user: ..., posts: [...]}`
res.render('index.ejs', {userdata: locals.user,postdata: locals.posts})
});