I have a site where i need some data from my Mongo data to be shown. My problem, however, is that i need data from two collections. Collections that are completely separate and have nothing to do with each other.
Right now i have this in my routes for my profile-page:
router.get('/profile', function(req, res,next) {
var resultArray = [];
mongo.connect(url, function(err, db) {
var cursor = db.collection('users').find();
cursor.forEach(function(doc, err) {
resultArray.push(doc);
}, function() {
db.close();
res.render('profile/index', {users: resultArray});
});
});
});
And this, of course, works perfectly fine. But how do i get a second db.collection('colors').find(); to be passed along to my template too?
I'm sure it's something trivial, and me just not quite having the full grasp of things, but yeah.. I'm stuck..
Use the async library which is best suited for this scenario. Where you need to run multiple tasks that do not depend on each other and when they all finish do something else, you should use async.parallel() method. The signature is async.parallel(tasks, callback), where tasks is an array of functions.
It will immediately run all the functions in parallel, wait for all of them to call their task callback, and finally when all tasks are complete it will run callback (the final callback).
The following example demonstrates how this could be adapted for your use case:
router.get('/profile', function(req, res, next) {
mongo.connect(url, function(err, db) {
var locals = {};
var tasks = [
// Load users
function(callback) {
db.collection('users').find({}).toArray(function(err, users) {
if (err) return callback(err);
locals.users = users;
callback();
});
},
// Load colors
function(callback) {
db.collection('colors').find({}).toArray(function(err, colors) {
if (err) return callback(err);
locals.colors = colors;
callback();
});
}
];
async.parallel(tasks, function(err) { //This function gets called after the two tasks have called their "task callbacks"
if (err) return next(err); //If an error occurred, let express handle it by calling the `next` function
// Here `locals` will be an object with `users` and `colors` keys
// Example: `locals = {users: [...], colors: [...]}`
db.close();
res.render('profile/index', locals);
});
});
});
Try this code:
router.get('/profile', function(req, res,next) {
var resultArray = {
users : [],
colors : []
};
mongo.connect(url, function(err, db) {
var cursor = db.collection('users').find();
cursor.forEach(function(doc, err) {
resultArray.users.push(doc);
}
var colors = db.collection('colors').find();
colors.forEach(function(doc,err){
resultArray.colors.push(doc);
}, function() {
db.close();
res.render('profile/index', {users: resultArray.users, colors: resultArray.colors});
});
});
});
Didn't have time to check it, but I'm pretty sure that it would work.
Related
I am using the Express framework and I have the following in one of my route files:
var allUsersFromDynamoDb = function (req, res) {
var dynamodbDoc = new AWS.DynamoDB.DocumentClient();
var params = {
TableName: "users",
ProjectionExpression: "username,loc,age"
};
dynamodbDoc.scan(params, function (err, data) {
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err));
res.statusCode = 500;
res.send("Internal Server Error");
} else {
console.log("DynamoDB Query succeeded.");
res.end(JSON.stringify(data.Items));
}
});
}
I am using the above function in one of my routes:
router.get('/users', allUsersFromDynamoDb);
Now the callback that I am defining while making a call to the "scan" on dynamodbDoc can be pretty useful if defined as a separate function. I can re-use that for some of my other routes as well.
But how can I can still get access to the "res" inside this new function?
I think I should be using "closure" but I can't seem to get it exactly right. I think I would need to maintain the signature of the new callback function to expect 2 params, "err" and "data" as per the following page:
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/DynamoDB/DocumentClient.html#scan-property
Any ideas on how this can be done?
You can use that function as middleware of every routes you want http://expressjs.com/en/guide/using-middleware.html
The new route with the middleware:
var middlewares = require('./middlewares'),
controllers = require('./controllers');
router.get('/users', middlewares.allUsersFromDynamoDb, controllers.theRouteController);
The middleware (middlewares.js) where you pass your data to req so you can use that data everywhere you have req:
exports.allUsersFromDynamoDb = function (req, res, next) {
var dynamodbDoc = new AWS.DynamoDB.DocumentClient();
var params = {
TableName: "users",
ProjectionExpression: "username,loc,age"
};
dynamodbDoc.scan(params, function (err, data) {
if (err) {
console.error("Unable to query. Error:", JSON.stringify(err));
next("Internal Server Error");
} else {
console.log("DynamoDB Query succeeded.");
req.dataScan = JSON.stringify(data.Items);
next();
}
});
};
And finally the controller (controllers.js):
exports.theRouteController = function (req, res) {
// Here is the dataScan you defined in the middleware
res.jsonp(req.dataScan);
};
Based on Michelem's answer here I tried something which makes things a bit cleaner and code more reusable:
var allUsersFromDynamoDb = function (req, res, next) {
var dynamodbDoc = new AWS.DynamoDB.DocumentClient();
var params = {
TableName: "users",
ProjectionExpression: "username,loc,age"
};
dynamodbDoc.scan(params, function (err, data) {
req.err = err;
req.data = data;
next();
});
}
Now I declare another function:
var processUserResults = function (req, res, next) {
if (req.err) {
console.error("Unable to query. Error:", JSON.stringify(req.err));
res.statusCode = 500;
res.send("Internal Server Error");
} else {
console.log("DynamoDB Query succeeded.");
res.end(JSON.stringify(req.data.Items));
}
};
And finally this:
router.get('/users', [allUsersFromDynamoDb, processUserResults]);
All I need to do in the original "function(err, data)" callback is always set 2 values:
req.err = err
req.data = data
And call next(). And processUserResults can similarly be used for other routes.
Still curious to find out if there are any other efficient solutions.
I have following nodejs code:
app.get('/dashboard', function(req, res){
db.collection('com_url_mamy').find().toArray(function(err, doc){
db.collection('com_url_mamy').find({'price':''}).count(function(err, docs){
db.collection('com_url_mamy').find({"price":{$not:{$ne:"last_price_1"}}}).count(function(err, last_doc){
if(err){console.log(err);}
console.log(docs);
res.render('dashboard',{'doc':doc, 'docs':docs, 'last_doc':last_doc});
});
});
});
Here i have to add more two or three query/callback.
But I don't think this is right way to do.
Please any one can me tell me how I can solve this problem to increase performance.
Thank You
Using async/await will be an appropriate solution that avoids the callback hell for you in this case. Consider running your queries as follows:
app.get('/user/:name', async (req, res, next) => {
try {
const docs = await db.collection('com_url_mamy').find().toArray()
const count = await db.collection('com_url_mamy').find({'price':''}).count()
const last_doc = await db.collection('com_url_mamy').find({"price": "last_price_1"}).count()
res.render('dashboard', { docs, count, last_doc })
} catch (err) {
return next(err)
}
}
As an alternative, you can use the async libary especially the async.parallel() method when you need to run multiple tasks that do not depend on each other and when they all finish do something else.
Consider the following example:
app.get('/user/:name', function(req, res, next) {
var locals = {};
async.parallel([
// Load all documents
function(callback) {
db.collection('com_url_mamy').find().toArray(function(err, docs){
if (err) return callback(err);
locals.docs = docs;
callback();
});
},
// Get count of documents where price is empty
function(callback) {
db.collection('com_url_mamy').find({'price':''}).count(function(err, count){
if (err) return callback(err);
locals.count = count;
callback();
});
},
// Load last docs
function(callback) {
db.collection('com_url_mamy').find({"price": "last_price_1"}).count(function(err, docs){
if (err) return callback(err);
locals.last_doc = docs;
callback();
});
}
], function(err) { //This function gets called after the three tasks have called their "task callbacks"
if (err) return next(err);
//Here render dashboard with locals object
res.render('dashboard', locals);
});
});
You can use native Promises with MongoDB driver (on node.js >= 0.12):
app.get('/dashboard', function(req, res){
var proms = [];
proms.push(db.collection('com_url_mamy').find().toArray());
proms.push(db.collection('com_url_mamy').find({'price':''}).count());
proms.push(db.collection('com_url_mamy').find({"price": "last_price_1"}).count());
Promise.all(proms)
.then(function(pres) {
res.render('dashboard',{'doc':pres[0], 'docs': pres[1], 'last_doc': pres[2]});
})
.catch(console.error);
});
Promise.all takes the promises you give it and execute them in parallel.
The Promise.all(iterable) method returns a promise that resolves when all of the promises in the iterable argument have resolved
Source:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
(BTW I think you should rename your question with something more like 'Improve independent nested async calls' to avoid the closing/duplicate issue you had.)
I've got following code now:
exports.listByUser = function(req, res) {
Attack.find({user: req.user._id}, function(err, attacks) {
if(err)
return next(err);
for(var i in attacks) {
attacks[i].evaluateFight();
}
res.json(attacks);
});
};
the main problem is that attacks[i].evaluateFight() is called asynchronously, I want to transform it to make sure that [i-1] iteration is done ... and finally call res.json(attacks). I think, it can be done with async, but I don't know how :( Something like this should work, but how can I call attacks.method?
async.eachSeries(attacks, function (callback) {
//something??
callback();
}, function (err) {
if (err) { throw err; }
res.json(attacks);
});
You can leverage async whilst method call to implement the same. However, there is question I have about the callback of evaluateFight because if it is executed asynchronously then there has to be some callback associated with it which will notify if the previous call is succeeded.
The example code can be as follows assuming evaluateFight returns a callback when completed -
exports.listByUser = function(req, res) {
Attack.find({user: req.user._id}, function(err, attacks) {
if(err)
return next(err);
var attacksLength = attacks.length;
var count = 0;
async.whilst(function () {
return count < attacksLength;
},
function (callback) {
attacks[count].evaluateFight(function(err, result){
count++;
callback();
}); // assuming it returns a callback on success
},
function (err) {
// all the iterations have been successfully called
// return the response
res.json(attacks);
});
};
I have a script that has rest apis that gets data from a postgresql database and returns it back to the client. At the start, the script only uses the about 7mb of memory and the response time when making queries is very fast. However, as time passes by(about 1 day), the memory used by the script balloons to 170mb. And now, the queries takes more than 1 minute to respond. But when I restart the script, it is now again fast on its response. I am clueless as to why this happens. Can anybody shed light on this? Here is a portion of what my script looks like:
var port = process.env.PORT || 8000;
var router = express.Router();
router.get('/:id/from/:prevdate', function (req, res) {
var results = [];
var id = req.params.id;
var prevdate = req.params.prevdate;
pg.connect(connectionString, function (err, client, done) {
var query = client.query("some sql statement here", [id, prevdate]);
query.on('row', function (row) {
results.push(row);
});
query.on('end', function () {
client.end();
return res.json(results);
});
if (err) {
console.log(err);
}
});
});
router.get('/:id/getdata', function (req, res) {
var results = [];
var id = req.params.id;
pg.connect(connectionString, function (err, client, done) {
var query = client.query("some sql statement here", [id]);
query.on('row', function (row) {
results.push(row);
});
query.on('end', function () {
client.end();
return res.json(results);
});
if (err) {
console.log(err);
}
});
});
app.use('/restapitest', router);
app.listen(port);
console.log('Webservice started using port: ' + port);
You are mixing connection pooling (which uses done()) with creating single connections (which uses client.end()).
Try this:
query.on('end', function() {
done();
return res.json(results);
});
Also, since you are storing all results in memory anyway, there's no need to use events. So with proper error and connection handling, you could use this:
pg.connect(connectionString, function (err, client, done) {
var sendError = function(err) {
console.log(err);
return res.sendStatus(500);
};
if (err) return sendError(err);
client.query("some sql statement here", [id, prevdate], function(err, results) {
// Done with the client.
done();
// Handle any errors.
if (err) return sendError(err);
// Return result
return res.json(results);
});
});
I have a collection of posts and a collection of users. When returning the list of posts, I want to resolve the references to users. This means making an async call for every row of the users. When monk returns a promise, it returns something that responds to "complete" or "success". Q expects something responding to "then". I need to use Q.all to wait for all the users to be fetched into the posts, but I can't make it play well with monk's promise style.
Here is my attempt.
exports.posts = function (req, res) {
req.posts.find()
.complete(function(err, posts) {
handle(err, res, posts);
var postsWithUsers = posts.map(function(post) {
return req.users.findOne({_id: post.userId}).complete(function(err, result) {
post.user = result;
});
});
Q.all(postsWithUsers.map(function(monkPromise) {
monkPromise.then = monkPromise.complete
}), function(err, results) {
console.log("done with all posts");
});
});
};
Just for everyone else out there. This is one solution, perhaps not the best.
exports.posts = function (req, res) {
req.posts.find()
.complete(function(err, posts) {
handle(err, res, posts);
var postsWithUsers = posts.map(function(post) {
var deferred = Q.defer();
return req.users.findOne({_id: post.userId}).complete(function(err, result) {
post.user = result;
deferred.resolve(result);
});
return deferred.promise;
});
Q.all(postsWithUsers, function(err, results) {
console.log("done with all posts");
});
});