I have a big problem.
I want to iterate over collection a result set and for each set i want to find one result.
This looks like this:
router.get('/', function(req, res) {
var floors = [];
var rooms = [];
req.db.collection('floors').find().sort({_id: 1}).forEach(function(floor) {
floors.push(floor);
});
req.db.collection('rooms').find().sort({_id: 1}).forEach(function(room) {
req.db.collection('floors').findOne({_id: new ObjectID(room.floorId)}, function(error, floor) {
room.floor = floor;
rooms.push(room);
});
});
res.render('rooms', { floors: floors, rooms: rooms });
});
The Problem is that the page will be rendered before the iteration is complete.
I tried to use async and promises, but i didn't get it to run.
Basically you have to wait until all your queries are done before sending the rendering result. Unfortunately you don't use promises so this will get a bit messy.
It appears that you are using the native client and according to the docs there is a second callback that gets called when all iterations are done
http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#forEach
router.get('/', function(req, res, next) {
var floors = [];
var rooms = [];
function done(err){
if(err) {
return next(err);
}
res.render('rooms', { floors: floors, rooms: rooms });
}
function getRooms(err){
if(err){
return next(err);
}
req.db.collection('rooms').find().sort({_id: 1}).forEach(function(room) {
// you already have all the floors, no need to hit the db again
floors.find(floor => floor._id === room.floorId); // not sure about this 100% as _id could be an object
}, done);
}
req.db.collection('floors').find().sort({_id: 1}).forEach(function(floor) {
floors.push(floor);
}, getRooms);
});
to be noted that this request will get quite heavy when your db grows.
Related
I am in the process of learning Node, Express and Mongoose and creating a web application. Sometimes, in one page, I need to display data from two or more of my collections. Although it works just fine, right now I use a bunch of nested if statements and have realized that the code has become very messy looking.
Example:
app.get("/jobs/:id/edit", function(req, res){
Job.findById(req.params.id, function(err, foundJob){
if (err){
console.log(err)
} else {
User.find({}, function(err, users){
if(err){
console.log(err);
} else {
Client.find({}, function(err, clients){
if(err) {
console.log(err);
} else {
let start_date = foundJob.start_date;
let end_date = foundJob.end_date;
start_date = moment(start_date).format("MM-DD-YYYY");
end_date = moment(end_date).format("MM-DD-YYYY");
// Redirect
res.render("edit_job", {job: foundJob, users: users, clients: clients, start_date, end_date});
}
});
}
});
}
});
});
This example is for a page that displays information from just three collections. Is there a better way to write this kind of code? I feel like using a table of collection names and using a for loop might work, but I am unsure how I would write that.
As an update, I tried the following logic, but it did not work:
app.get("/", function(req, res){
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
collections[i].find({}, function(err, foundCollection){
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
No matter what I do, endCollections[i] remains undefined even though I have it set to be foundCollection, which is not undefined.
Thanks.
in the for-loop, you're executing an asynchronous block of code (collection.find()), so javaScript will not wait till this asynchronous code executed then do the next block of code which is the render, that's why you got an empty array
you need to use async/await to force javaScript to wait until the asynchronous block of code executed, then do the rest
just add async to the main function to be able to use await inside this function
something like this
app.get("/", async function(req, res){ // <== note the async keyword here
let collections = [Client, User, Ticket, Job];
let endCollections = [];
for (let i = 0; i < collections.length; i++){
await collections[i].find({}, function(err, foundCollection){ // <== note the await keyword here
if (err) {
console.log(err);
} else {
endCollections[i] = foundCollection;
}
})
}
res.render("dashboard", {clients: endCollections[0]});
hope it helps
I am a nodejs beginner and I ran into some callback behaviour that I don't understand. I am using an Express router to write Mongoose objects to Mongodb using a POST request. In the body of the request I am passing in a nested json structure with two fields - jobDetails and examples. The data in jobDetails is used to create a Job Mongoose object and the data in examples is used to create several Example Mongoose objects. The Job and Example objects are linked in that a Job contains a list of Example objects in one of it's fields.
The way I tried to implement this was with callbacks in the following way. Basically I save the Job object to mongo first, then iterate over the examples - each time creating an Example object and linking it to the job via the .job field and also saving the Example object to mongo. Then in the callback to the Example object save function I updated the Job object with the new Example object and saved the updated version to mongo.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
job.examples.push(eg);
job.save(function(err, job){
console.log(err);
});
console.log(err);
});
}
});
This did not perform as I would expect. Specifically, double the number of examples were actually saved to mongo with several duplicates and some missing. I understand that callbacks are asynchronous but to me that still doesn't explain why double the number of examples would be saved and some would be duplicated and some would be missing.
I eventually got it to work correctly without using callbacks at all in the following way.
router.post('/jobs', function (req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function (err, job) {
console.log(err);
});
//save examples
for(i=0; i<examples.length;i++){
var eg = new Example({content: examples[i]});
eg.job=job;
eg.save(function (err, eg){
console.log(err);
});
job.examples.push(eg);
job.save(function(err,job){
console.log(err);
});
}
});
And I'm not sure if this is the optimal solution either. But I would like to know why my original approach resulted in the unintended behaviour.
This should work..
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
//save job
job.save(function(err, result) {
if (!err) {
//save examples
for (i = 0; i < examples.length; i++) {
var eg = new Example({
content: examples[i]
});
eg.job = job;
eg.save(function(err, eg) {
job.examples.push(eg);
job.save(function(err, job) {
if (!err)
job.examples = [];
});
console.log(err);
});
}
}
});
});
I would suggest you use a library like async to perform such save operations in a step by step manner. Follow this approach for better readability of code and better results
var async = require('async');
router.post('/jobs', function(req, res, next) {
var job = new Job(req.body.jobDetails);
var examples = req.body.examples;
var savedExamples = [];
console.log("JOB DETAILS");
console.log(req.body.jobDetails);
console.log("EXAMPLES");
console.log(req.body.examples);
async.eachSeries(examples, function iteratee(example, callback) {
var eg = new Example({
content: example
});
eg.job = job;
eg.save(function(err, savedEg) {
if(!err) {
savedExamples.push(savedEg);
}
callback(err)
});
}, function(err) {
if(err) {
//handle errors
}
job.examples = savedExamples;
job.save(function(err,job) {
if(err) {
//handle errors
}
//success callback
});
});
});
Using this approach you will have to call the save function for job only once after all other operations are completed. If an error is triggered at any point the whole flow is stopped. For more info regarding async library refer this!
I use nodejs and express for my js server.
I need a way to fetch the number of models inside a collection and send it to Backbone.
The express side is rather straight forward:
db.books.count(function (err, booksNr) {
if (err) return;
console.log(booksNr)
res.json(booksNr);
});
But how do i send this over to backbone?
My first thought was to bing the booksNr to a collection, but I can't make it work.
Here is how I fetch a collection from Backbone:
books.fetch(
{
data: {
'fetchType' : 'list',
'currentPage': 1,
'perPage': 3
},
success: function(books) {
console.log(books);
var books = books.models;
var template = _.template(listBooksTpl, {
books : books,
});
setTimeout(function () {
that.$el.html(template);
$(that.preLoader).hide();
that.$el.show('slow');
}, 500);
},
error: function() {
console.log("BookList error!");
}
}
);
Here is how I send the collection data to Backbone:
exports.books.paginated = function (req, res) {
var currentPage = parseInt(req.query.currentPage - 1),
perPage = parseInt(req.query.perPage),
skip = parseInt(currentPage * perPage);
db.books.find({}).limit(perPage).skip(skip, function (err, books) {
if (err) return;
res.send(books);
});
};
Any ideas?
It is quite simple, you need an api for retrieving books, in express.js:
app.get('/api/books', function(req, res){
var query = createQueryFromReq(req);
BookModel.find(query).exec(function(err, books) {
res.send(books);
});
});
Then you need a Books Backbone.Collection in your client side, notice that it does the parsing automatically, you don't need to add your own success parser:
var Book = Backbone.Model.extend({});
var Books = Backbone.Collection.extend({
model:Book,
url: "/api/books",
});
// Some main.js
var myBooks = new Books();
myBooks.fetch({
data: {/* your data */},
reset: true // Trigger reset in backbone 1.0+
});
Now, you should listen to "reset" event fired on myBooks and act upon it
After Comments:
The need is to be able to paginate from the server, I would recommend using https://github.com/backbone-paginator/backbone.paginator
if you want to use your own solution, you could be using this approach, on the Server side supply the extra information:
app.get('/api/books', function(req, res){
var limit = req.params.limit;
var offset = req.params.offset;
var bookCount = getBookCount(); // how many books we have
BookModel.find(query).skip(offset).limit(limit).exec(function(err, books) {
res.send({
limit: limit,
offset: offset,
total: bookCount,
books:books,
});
});
});
On the Client side you should use the parse function on Books:
var Books = Backbone.Collection.extend({
model:Book,
url: "/api/books",
parse: function(response){
this.total = response.total;
this.offset = this.offset + this.books.length; // I would test this line :) it might need a -1 also here
return response.books; // only using the books part of the response to create the models
}
});
Now you need to keep track on your limit and offset, and in the first time you fetch use data give it offset zero:
myBooks.fetch({
data: {offset:0, limit:10},
});
Another alternative it to have this Collection part of a Paginator Model, (but you can just use backbone.paginator for this)
Upon more research, here is how I did it...
Is this a good way to go?
Is it a good practice to fet a model without an ID?
In Backbone, I instantiate a model without any id:
// Random model - get total number of books.
book = new Book();
// Get the total number of books.
book.fetch({
data : { 'fetchType' : 'booksNr' },
success : function (booksNr) {
that.booksNr = booksNr.get('booksNr');
},
error : function () {
console.log("Error! Failed to get the total number of books.");
}
});
In express, I look for the query string: fetchType and do the following:
exports.books.paginated = function (req, res) {
var fetchType = req.query.fetchType;
if (fetchType == 'booksNr') {
db.books.count(function (err, booksNr) {
if (err) return;
res.json({ 'booksNr' : booksNr });
});
}
};
I have a dashboard view ( dashboard.jade ) that will display two panels with different information, all that info should be retrieved from a database and then sent to the view.
Let's say i have a route file ( document.js ) with two actions defined:
exports.getAllDocuments = function(req, res){
doc = db.model('documents', docSchema);
doc.find({}, function(err, documents) {
if (!err) {
// handle success
}
else {
throw err;
}
});
};
exports.getLatestDocumentTags = function(req, res){
tags = db.model('tags', tagSchema);
tags.find({}, function(err, docs) {
if (!err) {
// handle success
}
else {
throw err;
}
});
};
These functions would only serve the porpuse of retrieving data from the database.
Now i would like to send that data to the dashboard view from my dashboard.js route file under exports.index function where i render my dashboard view.
The problem is, since the db calls will be async i wouldn't have access to the data before i could call the view.
I guess i could have an action that simply did all my db calls and through callbacks deliver all the data at once to the view but that would make my data retrieval actions not reusable.
I'm really confused on how to tackle this problem correctly, probably i'm getting this async thing all wrong. Can someone give me some hints on how to do this properly ?
Here's something to pique your interest.
//Check out the async.js library
var async = require('async');
//Set up your models once at program startup, not on each request
//Ideall these would be in separate modules as wel
var Doc = db.model('documents', docSchema);
var Tags = db.model('tags', tagSchema);
function index(req, res, next) {
async.parallel({ //Run every function in this object in parallel
allDocs: async.apply(Doc.find, {}) //gets all documents. async.apply will
//do the equivalent of Doc.find({}, callback) here
latestDocs: async.apply(Tags.find, {})
], function (error, results) { //This function gets called when all parallel jobs are done
//results will be like {
// allDocs: [doc1, doc2]
// latestDocs: [doc3, doc4]
// }
res.render('index', results);
});
}
exports.index = index;
};
Try some more tutorials. If you haven't had the "a ha" moment about how async programming works in node, keep going through guided, hand-held tutorials before trying to write brand new programs without guidance.
//Check out the async.js library and mangoose model
var mongoOp = require("./models/mongo");
var async = require('async');
router.get("/",function(req,res){
var locals = {};
var userId = req.params.userId;
async.parallel([
//Load user Data
function(callback) {
mongoOp.User.find({},function(err,user){
if (err) return callback(err);
locals.user = user;
callback();
});
},
//Load posts Data
function(callback) {
mongoOp.Post.find({},function(err,posts){
if (err) return callback(err);
locals.posts = posts;
callback();
});
}
], function(err) { //This function gets called after the two tasks have called their "task callbacks"
if (err) return next(err); //If an error occurred, we let express handle it by calling the `next` function
//Here `locals` will be an object with `user` and `posts` keys
//Example: `locals = {user: ..., posts: [...]}`
res.render('index.ejs', {userdata: locals.user,postdata: locals.posts})
});
given the async nature of mongoose (or sequelize, or redis) queries, what do you do when you have multiple queries you need to make before rendering the view?
For instance, you have a user_id in a session, and want to retrieve some info about that particular user via findOne. But you also want to display a list of recently logged in users.
exports.index = function (req, res) {
var current_user = null
Player.find({last_logged_in : today()}).exec(function(err, players) {
if (err) return res.render('500');
if (req.session.user_id) {
Player.findOne({_id : req.session.user_id}).exec(function(err, player) {
if (err) return;
if (player) {
current_user = player
}
})
}
// here, current_user isn't populated until the callback fires
res.render('game/index', { title: 'Battle!',
players: players,
game_is_full: (players.length >= 6),
current_user: current_user
});
});
};
So res.render is in the first query callback, fine. But what about waiting on the response from findOne to see if we know this user? It is only called conditionally, so I can't put render inside the inner callback, unless I duplicate it for either condition. Not pretty.
I can think of some workarounds -
make it really async and use AJAX on the client side to get the current user's profile. But this seems like more work than it's worth.
use Q and promises to wait on the resolution of the findOne query before rendering. But in a way, this would be like forcing blocking to make the response wait on my operation. Doesn't seem right.
use a middleware function to get the current user info. This seems cleaner, makes the query reusable. However I'm not sure how to go about it or if it would still manifest the same problem.
Of course, in a more extreme case, if you have a dozen queries to make, things might get ugly. So, what is the usual pattern given this type of requirement?
Yep, this is a particularly annoying case in async code. What you can do is to put the code you'd have to duplicate into a local function to keep it DRY:
exports.index = function (req, res) {
var current_user = null
Player.find({last_logged_in : today()}).exec(function(err, players) {
if (err) return res.render('500');
function render() {
res.render('game/index', { title: 'Battle!',
players: players,
game_is_full: (players.length >= 6),
current_user: current_user
});
}
if (req.session.user_id) {
Player.findOne({_id : req.session.user_id}).exec(function(err, player) {
if (err) return;
if (player) {
current_user = player
}
render();
})
} else {
render();
}
});
};
However, looking at what you're doing here, you'll probably need to look up the current player information in multiple request handlers, so in that case you're better off using middleware.
Something like:
exports.loadUser = function (req, res, next) {
if (req.session.user_id) {
Player.findOne({_id : req.session.user_id}).exec(function(err, player) {
if (err) return;
if (player) {
req.player = player
}
next();
})
} else {
next();
}
}
Then you'd configure your routes to call loadUser wherever you need req.player populated and the route handler can just pull the player details right from there.
router.get("/",function(req,res){
var locals = {};
var userId = req.params.userId;
async.parallel([
//Load user Data
function(callback) {
mongoOp.User.find({},function(err,user){
if (err) return callback(err);
locals.user = user;
callback();
});
},
//Load posts Data
function(callback) {
mongoOp.Post.find({},function(err,posts){
if (err) return callback(err);
locals.posts = posts;
callback();
});
}
], function(err) { //This function gets called after the two tasks have called their "task callbacks"
if (err) return next(err); //If an error occurred, we let express handle it by calling the `next` function
//Here `locals` will be an object with `user` and `posts` keys
//Example: `locals = {user: ..., posts: [...]}`
res.render('index.ejs', {userdata: locals.user,postdata: locals.posts})
});
Nowadays you can use app.param in ExpressJS to easily establish middleware that loads needed data based on the name of parameters in the request URL.
http://expressjs.com/4x/api.html#app.param