How to handle callbacks in Node.js? - node.js

Let's say I have 3 files.
index.js makes a call to the backend like this
$.post('/test/', data, function(response) {
//handle success here
})
routes.js handles the route like this
app.post('/test/', function(req, res){
item.getItems(function(response){
res.json(response);
});
});
items.js is the model which accesses the database and makes a POST request for each item
function getItems(callback) {
database.query('SELECT * from items', function(result){
result.forEach(function(item){
request.post('/api/', item, function(req, res) {
//finished posting item
});
});
});
//callback here doesnt wait for calls to finish
}
where/when should I call the callback passed to getItems() to handle a success/failure in index.js?

Because your request.post() operations are async, you have to use some method of keeping track of when they are all done and then you can call your callback. There are multiple ways of doing that. I'll outline a few:
Manually Keeping Track of Count of Request Operations
function getItems(callback) {
database.query('SELECT * from items', function(result){
var remaining = result.length;
result.forEach(function(item){
request.post('/api/', item, function(err, res) {
--remaining;
//finished posting item
if (remaining === 0) {
callback();
}
});
});
});
}
The main problem with doing this manually is that propagating error in nested async operations is difficult when you get to actually figuring out how you're going to handle errors. This is much easier in the other methods shown here.
Using Promises
// load Bluebird promise library
var Promise = require('bluebird');
// promisify async operations
Promise.promisifyAll(request);
function queryAsync(query) {
return new Promise(function(resolve, reject) {
// this needs proper error handling from the database query
database.query('SELECT * from items', function(result){
resolve(result);
});
});
}
function getItems(callback) {
return queryAsync('SELECT * from items').then(function(result) {
return Promise.map(result, function(item) {
return request.postAsync('/api/', item);
});
});
}
getItems.then(function(results) {
// success here
}, function(err) {
// error here
})

It seems strange that you're making an API request in your server-side code, unless this is some sort of middle tier code that interacts with the API... but you're interacting with a database, so I'm still confused on why you can't just do a database insert, or have a bulk insert API call?
Anyway, if you must do it the way you're asking, I've done this in the past with a recursive method that trims down the result array... I really don't know if this is a good approach, so I'd like to hear any feedback. Something like this:
function recursiveResult(result, successfulResults, callback) {
var item = result.shift();
// if item is undefined, then we've hit the end of the array, so we'll call the original callback
if (item !== undefined) {
console.log(item, result);
// do the POST in here, and in its callback, call recursiveResult(result, successfulResults, callback);
successfulResults.push(item);
return recursiveResult(result, successfulResults, callback);
}
// make sure callback is defined, otherwise, server will crash
else if (callback) {
return callback(successfulResults);
}
else {
// log error... callback undefined
}
}
function getItems(callback) {
var successfulResults = [];
var result = [1, 2, 3, 4];
recursiveResult(result, successfulResults, callback);
}
console.log('starting');
getItems(function(finalResult) {
console.log('done', finalResult);
});

Related

NodeJS - Send multiple requests and process all responses in one callback

I'm trying to find a way to send multiple requests (using Express) and process all the response in one function.
Here's my code :
// In router.js
app.get('/api/FIRST_PATH', CALLBACK_FUNCTION_A );
// In CALLBACK_FUNCTION_A file :
module.exports = function (req, response) {
CALLBACK_FUNCTION_TO_SERVICE_A();
CALLBACK_FUNCTION_TO_SERVICE_B();
CALLBACK_FUNCTION_TO_SERVICE_C();
}
My problem is to send the requests CALLBACK_FUNCTION_TO_SERVICE_A, CALLBACK_FUNCTION_TO_SERVICE_B and CALLBACK_FUNCTION_TO_SERVICE_C and then retrieve all the results in another function to process them.
Any help would be greatly appreciated.
You can learn more about the new js standard and use Promise.
// In CALLBACK_FUNCTION_A file :
module.exports = function (req, response) {
var promises = [CALLBACK_FUNCTION_TO_SERVICE_A(),
CALLBACK_FUNCTION_TO_SERVICE_B(),
CALLBACK_FUNCTION_TO_SERVICE_C()];
Promise.all(promises).then( function(results) {
//results is an array
//results[0] contains the result of A, and so on
});
}
Of course CALLBACK_FUNCTION_TO_SERVICE_A() and such need to return Promise objects. You form a function like this:
function asyncFunction(callback) {
//...
callback(result);
}
You can create a Promise like this:
var p = new Promise(asyncFunction);
It'll start running the function, and supports the Promise interface.
So for example, either use request-promise or you can do something like:
function CALLBACK_FUNCTION_TO_SERVICE_A() {
var worker = function(callback) {
app.get('/api/FIRST_PATH', callback);
};
return new Promise(worker);
}
You can read more about Promise and how to also easily handle errors.
You could use async parallel. You can keep all your API calls as async.parallel array or JSON(Example uses Array).
async.parallel(
[
function(done){
reqServcieA(..., funnction(err, response){
if(err) done(err,null);
done(null, response);
}
},
function(done){
reqServcieA(..., funnction(err, response){
if(err) done(err,null);
done(null, response);
}
},
...// You can keep as many request inside the array
], function(err,results){
// Will be called when all requests are returned
//results is an array which will contain all responses as in request arry
//results[0] will have response from requestA and so on
});

Use promises for multiple node requests

With the request library, is there a way to use promises to simplify this callback?
var context = {};
request.get({
url: someURL,
}, function(err, response, body) {
context.one = JSON.parse(body);
request.get({
url: anotherURL,
}, function(err, response, body) {
context.two = JSON.parse(body);
// render page
res.render('pages/myPage');
});
});
Here's a solution using the Bluebird promises library. This serializes the two requests and accumulates the results in the context object and rolls up error handling all to one place:
var Promise = require("bluebird");
var request = Promise.promisifyAll(require("request"), {multiArgs: true});
var context = {};
request.getAsync(someURL).spread(function(response, body) {
context.one = JSON.parse(body);
return request.getAsync(anotherURL);
}).spread(response, body)
context.two = JSON.parse(body);
// render page
res.render('pages/myPage');
}).catch(function(err) {
// error here
});
And, if you have multiple URLs, you can use some of Bluebirds other features like Promise.map() to iterate an array of URLs:
var Promise = require("bluebird");
var request = Promise.promisifyAll(require("request"), {multiArgs: true});
var urlList = ["url1", "url2", "url3"];
Promise.map(urlList, function(url) {
return request.getAsync(url).spread(function(response,body) {
return [JSON.parse(body),url];
});
}).then(function(results) {
// results is an array of all the parsed bodies in order
}).catch(function(err) {
// handle error here
});
Or, you could create a helper function to do this for you:
// pass an array of URLs
function getBodies(array) {
return Promise.map(urlList, function(url) {
return request.getAsync(url).spread(function(response.body) {
return JSON.parse(body);
});
});
});
// sample usage of helper function
getBodies(["url1", "url2", "url3"]).then(function(results) {
// process results array here
}).catch(function(err) {
// process error here
});
Here is how I would implement chained Promises.
var request = require("request");
var someURL = 'http://ip.jsontest.com/';
var anotherURL = 'http://ip.jsontest.com/';
function combinePromises(context){
return Promise.all(
[someURL, anotherURL].map((url, i)=> {
return new Promise(function(resolve, reject){
try{
request.get({
url: url,
}, function(err, response, body) {
if(err){
reject(err);
}else{
context[i+1] = JSON.parse(body);
resolve(1); //you can send back anything you want here
}
});
}catch(error){
reject(error);
}
});
})
);
}
var context = {"1": "", "2": ""};
combinePromises(context)
.then(function(response){
console.log(context);
//render page
res.render('pages/myPage');
}, function(error){
//do something with error here
});
Doing this with native Promises. It's good to understand the guts.
This here is known as the "Promise Constructor Antipattern" as pointed out by #Bergi in the comments. Don't do this. Check out the better method below.
var contextA = new Promise(function(resolve, reject) {
request('http://someurl.com', function(err, response, body) {
if(err) reject(err);
else {
resolve(body.toJSON());
}
});
});
var contextB = new Promise(function(resolve, reject) {
request('http://contextB.com', function(err, response, contextB) {
if(err) reject(err);
else {
contextA.then(function(contextA) {
res.render('page', contextA, contextB);
});
}
});
});
The nifty trick here, and I think by using raw promises you come to appreciate this, is that contextA resolves once and then we have access to it's resolved result. This is, we never make the above request to someurl.com, but still have access to contextA's JSON.
So I can conceivable create a contextC and reuse the JSON without having to make another request. Promises always only resolve once. You would have to take that anonymous executor function out and put it in a new Promise to refresh that data.
Bonus note:
This executes contextA and contextB in parallel, but will do the final computation that needs both contexts when both A & B are resolved.
Here's my new stab at this.
The main problem with the above solution is none of the promises are reusable and they are not chained which is a key feature of Promises.
However, I still recommend promisifying your request library yourself and abstaining from adding another dependency to your project. Another benefit of promisifying yourself is you can write your own rejection logic. This is important if you're working with a particular API that sends error messages in the body. Let's take a look:
//Function that returns a new Promise. Beats out constructor anti-pattern.
const asyncReq = function(options) {
return new Promise(function (resolve, reject) {
request(options, function(err, response, body) {
//Rejected promises can be dealt with in a `catch` block.
if(err) {
return reject(err);
}
//custom error handling logic for your application.
else if (hasError(body)) {
return reject(toError(body));
}
// typically I just `resolve` `res` since it contains `body`.
return resolve(res);
}
});
};
asyncReq(urlA)
.then(function(resA) {
//Promise.all is the preferred method for managing nested context.
return Promise.all([resA, asyncReq(urlB)]);
})
.then(function(resAB) {
return render('page', resAB[0], resAB[1]);
})
.catch(function(e) {
console.err(e);
});
You can use the request-promise library to do this. In your case, you could have something like this, where you chain your requests.
request
.get({ url: someURL })
.then(body => {
context.one = JSON.parse(body);
// Resolves the promise
return request.get({ url: anotherURL });
})
.then(body => {
context.two = JSON.parse(body);
res.render('pages/myPage');
})
.catch(e => {
//Catch errors
console.log('Error:', e);
});
By far the easiest is to use request-promise library. You can also use use a promise library like bluebird and use its promisify functions to convert the request callback API to a promise API, though you may need to write your own promisify function as request does not use the standard callback semantics. Lastly, you can just make your own promise wrapper, using either native promises or bluebird.
If you're starting fresh, just use request-promise. If you're refactoring existing code, I would just write a simple wrapper for request using bluebird's spread function.

Pushing to an array inside of a loop inside of a callback function

I have a loop that I need to run inside of a callback, unfortunately accessing the array outside of the callback leaves me with a blank array. I know why this happens, but I want to know the best solution to tackle this.
Gallery.prototype.getGallery = function(cb) {
self = this;
var cos = new pb.CustomObjectService();
var ms = new pb.MediaService();
var s = [];
cos.loadTypeByName('Gallery Image', function(err, gallery){
cos.findByType(gallery._id.toString(), function(err, rpy){
for(var i = 0; i < rpy.length; i++){
ms.loadById(rpy[i].Image, function(e,r){
s.push(r.location);
console.log(r.location); /* <-- logs expected data */
});
}
console.log(s[0]); /* <-- this is undefined */
});
});
};
Replace your for loop with a call to async.*; in this case async.map seems right. Pass a callback to async.map; it will be invoked when all the individual calls to ms.loadById are done, with the array of results.
async.map(
rpy,
function(elt, callback) {
ms.loadById(elt.Image, callback);
},
function(err, data) {
// comes here after all individual async calls have completed
// check errors; array of results is in data
}
);
If you want to go into the promises world, then wrap the calls to ms.loadById in a promise. Here's a roll-your-own version, but various versions of what is usually called promisify are also out there.
function loadByIdPromise(elt) {
return new Promise(function(resolve, reject) {
ms.loadById(elt.image, function(err, data) {
if (err) return reject(err);
resolve(data);
});
});
}
Then do a Promise.all on the resulting promises:
Promise.all(rpy.map(loadByIdPromise))
.then(function(data) {
// comes here when all individual async calls complete successfully
// data is your array of results
});
Using the promises style, your entire code would look like:
loadTypeByNamePromise('Gallery Image') .
then(function(gallery) { return findByTypePromise(gallery._id.toString(); }) .
then(function(rpy) { return Promise.all(rpy.map(loadByIdPromise)); }) .
then(function(results) { /* do something with [results] */ });

Using Waterline find method with promisses

I'm stuck when it comes to returning response, from method that includes database call.
Here's sample of what I need...
service.js
module.exports = {
getUserStatus: function(userId){
return User
.find()
.where({userId: userId)
.exec(function(err, user){
return user.status;
}
}
}
In this service.js, I should fetch user's status and if I console.log(user.status) inside exec method, that is printed OK (I got status).
The problem is I need this result outside service.js:
controller.js
// this code is extracted from longer file, just for demo purpose.
// userService is required 'service.js'
index: function(req, res) {
var status = userService.getUserStatus(req.session.User.id);
console.log(status);
return res.view({userStatus: status});
}
If I console.log(status) here, it will be undefined.
I guess that it has something to do with promises and stuff (because of the async calls), but not sure what is the right way to do it.
getUserStatus contains asynchronous code, so it needs a callback:
module.exports = {
getUserStatus: function(userId, cb){
User.findOne().where({userId: userId}).exec(function(err, user){
if (err) return cb(err);
return cb(null, user.status);
});
}
}
then in the code that uses it:
index: function(req, res) {
userService.getUserStatus(req.session.User.id, function(err, status) {
// If an error was returned from the service, bail out
if (err) {return res.serverError(err);}
console.log(status);
return res.view({userStatus: status});
});
}
Note the use of findOne instead of find; find will return an array.
An alternative would be to return a promise from the service function, and chain your controller code with .then() and .fail():
module.exports = {
getUserStatus: function(userId, cb){
return User.findOne().where({userId: userId});
}
}
index: function(req, res) {
userService.getUserStatus(req.session.User.id)
.then(function(user) {
console.log(user.status);
return res.view({userStatus: user.status});
})
.fail(function(err) {
return res.serverError(err);
});
});
}
It's a matter of preference, but I think the first method is better especially in your case, since it allows the service call to deliver just the status, rather than the whole user object. In general, getting used to the standard (err, result) callback method in Node will serve you well.

async and Q promises in nodejs

I'm using the Q library and async library in nodejs.
Here's an example of my code:
async.each(items, cb, function(item) {
saveItem.then(function(doc) {
cb();
});
}, function() {
});
saveItem is a promise. When I run this, I always get cb is undefined, I guess then() doesn't have access. Any ideas how to work around this?
Your issue doesn't lie with promises, it lies with your usage of async.
async.each(items, handler, finalCallback) applies handler to every item of the items array. The handler function is asynchronous, i.e. it is handed a callback, that it must call when it has finished its work. When all handlers are done, the final callback is called.
Here's how you'd fix your current issue:
var handler = function (item, cb) {
saveItem(item)
.then(
function () { // all is well!
cb();
},
function (err) { // something bad happened!
cb(err);
}
);
}
var finalCallback = function (err, results) {
// ...
}
async.each(items, handler, finalCallback);
However, you don't need to use async for this particular piece of code: promises alone fill this job quite nicely, especially with Q.all():
// Create an array of promises
var promises = items.map(saveItem);
// Wait for all promises to be resolved
Q.all(promises)
.then(
function () { // all is well!
cb();
},
function (err) { // something bad happened!
cb(err);
}
)

Resources