Using mongoose middleware to add async virtuals - node.js

In a node.js / Mongoose project, I have a schema which contains references to external image files.
var PageSchema = new Schema({
title: String
, media: {
digest: String
, name: String
}
});
Those files have additional properties which are stored in the file itself: url, width, height, exif fields, etc. Those fields will need to be populated before the model being sent to res.render().
For some fields, things are synchronous and a virtual just does the job:
PageSchema.virtual('media.url').get(function () {
return appPaths.fileUrl(this.media);
});
However, width / height, or exif fields require async calls. I thought of using middleware to populate them, but this does not seem to work:
PageSchema.post('init', function(next) {
var media = this.media;
var fileName = filedb.absoluteFilePath(media);
im.identify(fileName, function(err, features) {
if (err) {
media.width = 0;
media.height = 0;
} else {
media.width = features.width;
media.height = features.height;
}
next();
});
});
What am I doing wrong? Is there a common design pattern for solving this kind of problem? (Other than duplicating this information in the database itself?)

The real problem here is that mongoose currently seems to have a wonky implementation of post callbacks. While pre('init',function(next){ ... }); works as you expect, post('init',function(next){ ... }); does not actually get passed a next function. In fact, the post init callback does not receive any arguments whatsoever when it is called.
As such, I usually write a wrapper for my query callbacks to make a sort of DIY middleware:
var setAsyncVirtuals = function(callback){
return function(err, docs){
if(err) return callback(err);
var i = done = docs.length;
if(i > 0)
while(i--){
(function(i){
var filename = getFilename();
im.identify(filename, function(err, features) {
if (err) {
docs[i].media.width = 0;
docs[i].media.height = 0;
} else {
docs[i].media.width = features.width;
docs[i].media.height = features.height;
}
done--;
if(done <= 0) callback(null, docs);
});
})(i); // bind i to hold value for async call
}
else callback(null, docs);
}
}
then
Page.find({}, setAsyncVirtuals(function(err,docs){
res.send(docs); // these have media.width & media.height assigned
}));

Related

Assign keystonejs callback function data to array

I'm new to node.js and currently working on a project using keystonejs cms and MongoDB. Now I'm stuck in getting data related to multiple collections. Because of this callback functions, I couldn't return an array with relational data. My code something similar to this sample code.
var getAgenda = function(id, callback){
callback = callback || function(){};
if(id){
AgendaDay.model.find({summit:id}).exec(function (err, results3) {
var arr_agenda = [];
var arr_agenda_item = [];
for(var key3 in results3){
AgendaItem.model.find({agendaDay:results3[key3]._id}).exec(function (err, results2){
for(var key2 in results2){
arr_agenda_item.push(
{
item_id: results2[key2]._id,
item_name: results2[key2].name,
from_time: results2[key2].time_from,
to_time: results2[key2].time_to,
desc: results2[key2].description,
fatured: results2[key2].featured,
}
);
}
arr_agenda.push(
{
name: results3[key3].name,
date: results3[key3].date,
description: results3[key3].description,
item_list:arr_agenda_item
}
);
return callback(arr_agenda);
});
}
});
}
}
exports.list = function (req, res) {
var mainarray = [];
Summit.model.find().exec(function (err, resultssummit) {
if (err) return res.json({ err: err });
if (!resultssummit) return res.json('not found');
Guest.model.find().exec(function (err, resultsguset) {
for(var key in resultssummit){
var agen_arr = [];
for(var i=0; i<resultssummit[key].guests.length; i++){
var sumid = resultssummit[key]._id;
//this is the function im trying get data and assign to mainarray
getAgenda(sumid, function(arr_agenda){
agen_arr = arr_agenda;
});
mainarray.push(
{
id: resultssummit[key]._id,
name: resultssummit[key].name,
agenda_data: agen_arr,
}
);
}
res.json({
summit: mainarray,
});
}
});
}
}
If anyone can help me out, that would be really great :)
You need to restructure this whole thing. You should not be calling mongo queries in a for loop and expecting their output at the end of the loop. Also, your response is in a for loop. That won't work.
I'll tell you how to do it. I cannot refactor all of that code for you.
Instead of putting mongodb queries in a for loop, you need to convert it in a single query. Just put the _ids in a single array and fire a single query.
AgendaItem.model.find({agendaDay:{$in:ARRAY_OF_IDS}})
You need to do the same thing for AgendaDay.model.find({summit:id}) as well.

Retrieve data from MongoDB and save it to global object in Node.js and Express.js

I'm trying to get data from MongoDB collection and then save it to a global object.Later I need to parse it to HTML template.
Here is my code:
When user log onto his profile: then we need to get his projects and here we call findeprojects() function
usrRouter.route('/profile')
.all(function (req,res,next) {
if(!req.user){
res.redirect('/');
}
next();
})
.get(function (req,res,userObj) {
// var proj = findprojects();
userObj = req.user;
var pro = {};
pro = findprojects(userObj);
res.render('index',{name:userObj.username, email:userObj.email});
//res.sendFile('profile.html',{root:path.join(__dirname,'../public'),},{name:userObj.username});
});
Here is findeprojects function code:
var findprojects = function(obj) {
var usern = obj.username;
mongodb.connect(url,function(err, db){
if(err) throw err;
var collection = db.collection('projects');
//console.log(usern);
collection.find({'pusername':usern});
cursor =db.collection('projects').find({ 'pusername': usern }).toArray(function(err,items){
//console.log(items);
var i;
for(i=0; i<items.length;){
userProjects.createdBy = items[i].pusername;
userProjects.proName = items[i].projectName;
userProjects.proType = items[i].projectType;
userProjects.proDesc = items[i].projectDesc;
//return userProjects;
i = i+1;
}
});
console.log(userProjects);
});
};
I have declared global object at the top like:
userProjects = {
createdBy:'',
proName:'',
proType:'',
proDesc:''
};
But when I console userprojects object after calling the findeprojects() function it displays empty values.
why dont you use mongoose to model your stuff.
its more intuitive and you no need to declare the global object and do the mapping in the for loop that you are doing.
also your approach is a bit wrong in terms of when you iterate through for aren't you overwriting ?
say you have two documents where pusername is abdul.
so in your case you loose first object which will get overwritten by the second one.
i see that you commented out a return statement but even that wont work properly.
from a design point of view your approach is not efficient.
in mongoose you can do:
{
var userProjectSchema = new mongoose.Schema({
createdBy: { type: String }
, proName: String
, proType: String
, proDesc: String
});
// Find a single document by username.
userProjectSchema.findOne({ pusername : 'abdul' }, function(err, resDoc) {
if (err) return console.error(err);
// do your html stuff here
});
// Find all documents.
userProjectSchema.find(function(err, results) {
if (err) return console.error(err);
// do your html stuff here
});
}

How to handle callbacks in a for loop(Node.JS)

I am trying to write a code with NodeJS where I grab data from an external API and then populate them in MongoDB using Mongoose. In between that, I'll check to see if that particular already exists in Mongo or not. Below is my code.
router.route('/report') // the REST api address
.post(function(req, res) // calling a POST
{
console.log('calling report API');
var object = "report/" + reportID; // related to the API
var parameters = '&limit=100' // related to the API
var url = link + object + apiKey + parameters; // related to the API
var data = "";
https.get(url, function callback(response)
{
response.setEncoding("utf8");
response.on("data", function(chunk)
{
data += chunk.toString() + "";
});
response.on("end", function()
{
var jsonData = JSON.parse(data);
var array = jsonData['results']; // data is return in array of objects. accessing only a particular array
var length = array.length;
console.log(length);
for (var i = 0; i < length; i++)
{
var report = new Report(array.pop()); // Report is the schema model defined.
console.log('^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^');
console.log(i);
console.log('*****************************');
console.log(report);
console.log('*****************************');
// console.log(report['id']);
/*report.save(function(err)
{
if(err)
res.send(err);
});*/
Report.find({id:report['id']}).count(function(err, count) // checks if the id of that specific data already exists in Mongo
{
console.log(count);
console.log('*****************************');
if (count == 0) // if the count = 0, meaning not exist, then only save
{
report.save(function(err)
{
console.log('saved');
if(err)
res.send(err);
});
}
});
};
res.json({
message: 'Grabbed Report'
});
});
response.on("error", console.error);
});
})
My problem is that since NodeJS callbacks are parallel, it is not getting called sequentially. My end result would be something like this :
Calling report API
console.log(length) = 100
^^^^^^^^^^^^^^^^^^^^^^^^
console.log(i) = starts with 0
*******************************
console.log(report) = the data which will be stored inside Mongo
*******************************
number 3 - 7 repeats 100 times as the length is equals to 100
console.log(count) = either 0 or 1
number 9 repeats 100 times
console.log('saved')
number 11 repeats 100 times
Lastly, only the last out of 100 data is stored into Mongo
What I need is some sort of technique or method to handle these callbacks which are executing one after the other and not sequentially following the loop. I am pretty sure this is the problem as my other REST APIs are all working.
I have looked into async methods, promises, recursive functions and a couple others non which I could really understand how to solve this problem. I really hope someone can shed some light into this matter.
Feel free also to correct me if I did any mistakes in the way I'm asking the question. This is my first question posted in StackOverflow.
This problem is termed as the "callback hell".
There's lots of other approaches like using Promise and Async libraries you'll find.
I'm more excited about the native async ES7 will bring,
which you can actually start using today with transpiler library Babel.
But by far the simplest approach I've found is the following:
You take out the long callback functions and define them outside.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", response_on_end_callback); // --> take out
response.on("error", console.error);
});
}
function response_on_end_callback() { // <-- define here
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(Report_find_count_callback); // --> take out
};
res.json({
message: 'Grabbed Report'
});
}
function Report_find_count_callback(err, count) { // <-- define here
...
if (count == 0) {
report.save(function(err) { // !! report is undefined here
console.log('saved');
if (err)
res.send(err); // !! res is undefined here
});
}
}
A caveat is that you won't be able to access all the variables inside what used to be the callback,
because you've taken them out of the scope.
This could be solved with a "dependency injection" wrapper of sorts to pass the required variables.
router.route('/report') // the REST api address
.post(calling_a_POST)
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
response.on("end", function(err, data){ // take these arguments
response_on_end(err, data, res); // plus the needed variables
});
response.on("error", console.error);
});
}
function response_on_end(err, data, res) { // and pass them to function defined outside
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
Report.find({ id: report['id'] })
.count(function(err, count){
Report_find_count(err, count, report, res); // same here
});
};
res.json({ // res is now available
message: 'Grabbed Report'
});
}
function Report_find_count(err, count, report, res) { // same here
...
if (count == 0) {
report.save(function(err) { // report is now available
console.log('saved');
if (err)
res.send(err); // res is now available
});
}
}
When I execute the response_on_end function, I am getting the undefined:1 unexpected token u error.
I am pretty much sure it has something to do with this line: var jsonData = JSON.parse(data)
My response_on_end is as below: var jsonData = JSON.parse(data); // problem here
I realize I made an error here:
function calling_a_POST(req, res) {
...
var data = "";
https.get(url, function callback(response) {
...
//sponse.on("end", function(err, data){
response.on("end", function(err){ // data shouldn't be here
response_on_end(err, data, res);
});
response.on("error", console.error);
});
}
Another problem I could forsee, which actually may not arise here but still would be better to talk about anyways.
The data variable, since it's a string which is a primitive type unlike an object, it is "passed by value".
More info
It's better to wrap the variable in an object and pass the object, because objects in javascript are always "passed by reference".
function calling_a_POST(req, res) {
...
// var data = ""; //
var data_wrapper = {};
data_wrapper.data = {}; // wrap it in an object
https.get(url, function callback(response) {
...
response.on("data", function(chunk){
data_wrapper.data += chunk.toString() + ""; // use the dot notation to reference
});
response.on("end", function(err){
response_on_end(err, data_wrapper, res); // and pass that object
});
response.on("error", console.error);
});
}
function response_on_end_callback(err, data_wrapper, res) {
var data = data_wrapper.data; // later redefine the variable
...
for (var i = 0; i < length; i++) {
var report = new Report(array.pop());
...
You can use async library for controlling your execution flows. And there are also iterators for working with arrays.

module.export function not returning results

I'm having some problems returning results from a module function.
Below are two files that I'm working with.
When I call the exported function it returns nothings.
Any suggestions/fixes as to why? Does it have to do with callbacks?
models/index.js
module.exports = exports = function(library) {
modCodes.findOne({name: library}, {modcode:1}, function(err, mc) {
if (err) throw new Error(err);
var db = mongoose.createConnection('mongodb://localhost:27017/' + mc.modcode + '?safe=true');
var models = {
Books: db.model('books', require('./schemas/books'))
}
return models;
});
};
books.js
var Models = require('../models');
console.log(Models("myLibrary")); //return nothing
The reason you're getting no results is that you're trying to return a function value synchronously from an asynchronous callback. Instead of providing a function value, the return statement will instead stop the function, as return; would normally do. This is why you must use a callback for asynchronous operations:
module.exports = exports = function(library, callback) {
modCodes.findOne({name: library}, {modcode: 1}, function (err, mc) {
if (err) throw new Error(err);
var db = mongoose.createConnection('mongodb://localhost:27017/' + mc.modcode + '?safe=true');
var models = {
Books: db.model('books', require('./schemas/books'))
}
callback(models);
});
};
And this is how you would be able to use it:
var Models = require('../models');
Models('myLibrary', function(models) {
console.log(models);
});
i solve similar problem in different way. I am not sure whether it is right way.
in main node js I am using a model named product. I am passing product and res to misc.js. Following is part of my server.js file
var misc = require('./misc');
app.get('/groupbyCategory', function(req,res,next)
{
var res2;
misc.addX(product,res);
})
IN misc.js doing group by function and will return that value to straight way to angular controller. it is not necessary to return the result to server.js and from server.js to return angular controller. So i feel waiting and other call back seems unnecessary.
Inside misc.js i keep following code.
exports.addX = function(product,res) {
product.aggregate([
{ $group: {
_id: {category: "$category"},
count: { $sum: 1 }
}}
], function (err, result) {
if (err) {
console.log(err);
return err;
}
else
{
//return result;
console.log(result);
res.send(result);
}
});
};

nested loops asynchronously in Node.js, next loop must start only after one gets completed

Check below algorithm...
users = getAllUsers();
for(i=0;i<users.length;i++)
{
contacts = getContactsOfUser(users[i].userId);
contactslength = contacts.length;
for(j=o;j<contactsLength;j++)
{
phones = getPhonesOfContacts(contacts[j].contactId);
contacts[j].phones = phones;
}
users[i].contacts = contacts;
}
return users;
I want to develop such same logic using node.js.
I have tried using async with foreach and concat and foreachseries functions. But all fail in the second level.
While pointer is getting contacts of one user, a value of i increases and the process is getting started for next users.
It is not waiting for the process of getting contacts & phones to complete for one user. and only after that starting the next user. I want to achieve this.
Actually, I want to get the users to object with proper
Means all the sequences are getting ruined, can anyone give me general idea how can I achieve such a series process. I am open to change my algorithm also.
In node.js you need to use asynchronous way. Your code should look something like:
var processUsesrs = function(callback) {
getAllUsers(function(err, users) {
async.forEach(users, function(user, callback) {
getContactsOfUser(users.userId, function(err, contacts) {
async.forEach(contacts, function(contact, callback) {
getPhonesOfContacts(contacts.contactId, function(err, phones) {
contact.phones = phones;
callback();
});
}, function(err) {
// All contacts are processed
user.contacts = contacts;
callback();
});
});
}, function(err) {
// All users are processed
// Here the finished result
callback(undefined, users);
});
});
};
processUsers(function(err, users) {
// users here
});
You could try this method without using async:
function getAllUserContacts(users, callback){
var index = 0;
var results = [];
var getUserContacts = function(){
getContactsOfUser(users[index].userId, function(contacts){
var index2 = 0;
var getContactsPhones = function(){
getPhonesOfContacts(contacts[index2].contactId, function(phones){
contacts[index2].phones = phones;
if(index2 === (contacts.length - 1)){
users[index].contacts = contacts;
if(index === (users.length - 1)){
callback(users)
} else {
index++;
getUserContacts();
}
}else{
index2++;
getContactsPhones();
}
});
}
getContactsPhones();
});
}
getUserContacts();
}
//calling the function
getAllUsers(function(users){
getAllUsersWithTheirContacts(users, function(usersWithContacts){
console.log(usersWithContacts);
})
})
//Asynchronous nested loop
async.eachSeries(allContact,function(item, cb){
async.eachSeries(item,function(secondItem,secondCb){
console.log(secondItem);
return secondCb();
}
return cb();
},function(){
console.log('after all process message');
});

Resources