Callback for Async.Map Not Working - node.js

I'm relatively new to callbacks and have been unsuccessful in getting the following code to work. I have used the async.map function to return the data from each web call to its respective page. However, my console.log(return) is returning [ , undefined] even though the console.log(data) prior to the callback(data) is returning the web page's html. Here's my code:
var http = require("http"),
fs = require("fs"),
page, chap, ot,
async = require("async");
ot = fs.open('ot.txt', 'w');
page = "test";
chap = 2;
function getData(url, callback) {
var data = "";
var options = {
host: "rs.au.com",
port: 80
}
options.path = url;
console.log("request sent to: http://" + options.host + options.path);
var req = http.request(options, function(res) {
console.log("Response received " + res.statusCode);
res.on('data', function(chunk) {
data += chunk;
});
res.on('end', function(e) {
console.log(data);
callback(e, data);
});
}).end();
}
function main() {
var pathArr = [];
for ( var i = 1; i <= chap; i++ ) {
pathArr[i] = "/".concat(page, "/", i, ".html");
}
async.map(pathArr, getData, function(err, result) {
console.log("The result is :" + result);
});
}
main();
Could anyone please point out why my code is not working and how I can correct it?
Much appreciated!
EDIT: After Brandon Tilley's response I amended the callback function from callback(data) to callback(e, data), however I'm getting no response now from the last console.log output.

The Async library assumes your callbacks adhere to the standard Node.js callback signature, which is callback(err, others...). Since you are passing data as the first argument, Async assumes it's an error. You should use callback(e, data) instead (since e will be null in the case of no errors).
[Update]
The other problem is that your array is not correct. Since i starts at 1 and goes up to chap, pathArr[0] is undefined. Change:
pathArr[i] = "/".concat(page, "/", i, ".html");
to
pathArr[i-1] = "/".concat(page, "/", i, ".html");

Related

Nodejs, why lines after ASYNC call are not executing?

I am trying to write a AWS Lambda function with Nodejs.
Inside the code I have to call a API, wait for the response and do other things with that data (I haven't still write anything about this)
NOTE: sorry about declaring customerTAX as global, but I prefer to get the lambda function to work with and after, try to return the value from the function isself.
This is the code:
'use strict';
var customerTAX;
const https = require('https');
const options = {
host: 'xxxxxxx.com',
port: 443,
path: '/yyyyyyy.json',
method: 'POST',
headers: {
'Content-Type': 'application/graphql',
}
};
exports.handler = async (event) => {
const body = JSON.parse(event.body);
const orderId = body.id;
const customerId = body.customer.id;
console.log('ORDER ID: ' + orderId);
console.log('CUST ID: ' + customerId);
const query = `xxxxxxxxxxxxxxxxxxxx`;
//I CAN SEE ALL LOGS OF THIS FUNCTION IN CLOUDWATCH
await getCustomerTAX(query);
//I CAN'T SEE NOTHING BELOW THIS LINE IN AWS CLOUDWATCH
console.log('CUST TAX: ' + customerTAX);
if (customerTAX != null) {
console.log('LETs GO TO SAVE IT')
} else {
console.log('NOTAX: No customerTAX');
}
const response = {
statusCode: 200,
body: JSON.stringify(event.body),
};
return response;
};
var getCustomerTAX = function(query) {
return new Promise(function(resolve, reject) {
var req = https.request(options, function(res) {
res.setEncoding('utf8');
var bodyRaw = '';
res.on('readable', function () {
var chunk = this.read() || '';
bodyRaw += chunk;
console.log('getTAX CHUNK (' + Buffer.byteLength(chunk) + ' bytes): ' + chunk);
});
res.on('end', function () {
const body = JSON.parse(bodyRaw);
if (body.TAX.value != null) {
customerTAX = body.TAX.value;
} else {
customerTAX = null;
}
console.log("getTAX END: " + customerTAX);
resolve;
//console.log('body: ' + Buffer.byteLength(body) + ' bytes');
});
});
//handle the possible errors
req.on('error', function(e) {
console.log("ERROR: " + e);
reject(e);
});
//do the request
req.write(query);
//finish the request
req.end();
});
};
Function getCustomerTAX works perfectly, but I don't know why my lambda function is "finished" in this line, and I can't see more console-logs in cloudwatch.
Hope your answer and thanks a lot.
For starters in getCustomerTax(), resolve; needs to be resolve();.
It's a function. You need to call it.
Without calling resolve(), the promise is never resolved and thus await getCustomerTax() never finishes and the lines after it are never executed.
FYI, the request-promise module will automatically do a lot of what you have in getCustomerTax() (make an http request, get the response, handle all possible errors and return a promise that represents the result).
EDIT Jan, 2020 - request() module in maintenance mode
FYI, the request module and its derivatives like request-promise are now in maintenance mode and will not be actively developed to add new features. You can read more about the reasoning here. There is a list of alternatives in this table with some discussion of each one. I have been using got() myself and it's built from the beginning to use promises and is simple to use.

Node.js HTTP-Response into variable

I try to get the response of the http-request to my variable "temperature" but it doesnt work, i already tried it with callbacks but im not really familiar with it so i cant solve my problem right now. Maybe someone has an idea?
thanks and best regards :-)
var temperature = '';
http.get(url, function(res) {
//console.log("Got response: " + res.statusCode);
var bodyarr = [];
res.on('data', function(chunk){
bodyarr.push(chunk);
});
res.on('end', function(){
//console.log(*/bodyarr.join('').toString());
temperature = bodyarr;
});
enter code here
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
console.log(temperature);
The issue is about asynchronicity here; the temperature object is undefined at that point since the log method is called before the request is done. Use the http.request as it's asynchronous; it takes a callback as parameter which you pass to response.end if you want to have the complete response:
//The url for example is 'www.random.org/temperatures/?min=1&max=100'
var options = {
host: 'www.random.org',
path: '/temperatures/?min=1&max=100'
};
var temperature = '';
var http = require('http');
callback = function(res) {
var bodyarr = [];
res.on('data', function (chunk) {
bodyarr.push(chunk);
});
res.on('end', function () {
console.log(req.data);
console.log(bodyarr);
temperature = bodyarr.join();
});
}
var req = http.request(options, callback).end();

Node tries to send headers but previous response seems still opened

I have the following code:
app.get('/pull-requests', function (request) {
fetchRepos(fetchPullRequests);
app.on('pull-requests:fetched', function (pullRequestsByRepo) {
var html = "";
_.each(pullRequestsByRepo, function (pullRequests) {
html += 'There is <strong>'+ pullRequests.length +'</strong> pending pull request(s) for <strong>'+ pullRequests[0].title +'</strong>:';
html += '<ul>';
_.each(pullRequests, function (pullRequest) {
html += '<li><em>'+ pullRequest.title +'</em> ('+ pullRequest.url +')</li>';
});
html += '</ul>';
});
response.send(html);
});
});
It works fine once. Every second request ends raising an error Can't set headers after they are sent..
EDIT: More code to explicit the logic
function fetchRepos (callback) {
_options.path = '/orgs/'+ app.get('org') +'/repos?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
// Fetch the list of repos for a given organisation
var request = https.get(_options, function (res) {
data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
var repos = JSON.parse(data);
return callback(repos);
});
});
request.on('error', function (error) {
console.log('Problem with request: '+ e);
});
}
function fetchPullRequests (repos) {
var pullRequests = [];
_.each(repos, function (repo, index) {
_options.path = '/repos/'+ app.get('org') +'/'+ repo.name +'/pulls?client_id='+ app.get('client_id') +'&client_secret='+ app.get('client_secret');
var request = https.get(_options, function (res) {
(function () {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function () {
data = JSON.parse(data);
if (data.length > 0) {
pullRequests.push(data);
}
if (index == (repos.length - 1)) {
app.emit('pull-requests:fetched', pullRequests);
}
});
})();
});
});
}
Your problem is that whenever you call app.on('pull-requests:fetched', …), you add a new listener, meaning that when the second request arrives, it will trigger the first one again.
node then complains because you try reply twice to the first request.
You could fix your immediate issue by calling app.once, which would ensure that the only fires once, but you would still have an issue if 2 requests arrive at the same time.
The correct pattern in this case is to pass a callback to fetchRepos.

Combine sequence of asynchronous requests to build final answer

I'm rather new in express framework. I have call flickr api to get albums list and for each album need to get its thumbnail. On the end need to build covers array with list of objects like {title, thumb}. I would like to pass fully created covers array to template and render. I have problem with it because of the way that node.js callbacks works and for loop ends quickly before requests ends. How to do that properly ?
http.get(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function(resp){
var body = '';
resp.on('data', function(chunk) {
body += chunk;
});
resp.on('end', function() {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
// final answer
var covers = {};
for(var i=0; i<ps.length; i++) {
var p = ps[i];
var getSizesUrl = base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary;
http.get(getSizesUrl, function(resp){
var body1 = '';
resp.on('data', function(chunk) {
body1 += chunk;
});
resp.on('end', function() {
var json1 = JSON.parse(body1);
covers += {title: p.title._content, thumb: json1.sizes.size[1].source};
if(i + 1 == ps.length) {
// last call
console.log(covers);
res.render('photosets', {covers: covers});
}
});
});
}
});
});
Update using async and request as #sgwilly said, but something wrong...
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function (error, response, body) {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
// functions list to call by `async`
var funcs = {};
for(var i = 0; i < ps.length; i++) {
var p = ps[i];
funcs += function(callback) {
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary, function (error, response, body1){
var json1 = JSON.parse(body1);
var cover = {title: p.title._content, thumb: json1.sizes.size[1].source};
callback(null, cover);
});
};
}
// run requests and produce covers
async.series(funcs,
function(covers){
console.log(covers);
res.render('photosets', {covers: covers});
}
);
});
Node.js callbacks are a little tricky, but they'll make sense after a while.
You want to use request as your outside loop, then async.concat as a callback inside, and have each iterator be a request call of a URL.
Thanks for #dankohn. I got this and work :)
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photosets.getList', function (error, response, body) {
var json = JSON.parse(body);
var ps = json.photosets.photoset;
async.concat(ps, function(p, callback){
request(base_url+'&user_id='+flickr.user_id+'&method=flickr.photos.getSizes&photo_id='+p.primary, function (error, response, body1){
var json1 = JSON.parse(body1);
var cover = {title: p.title._content, thumb: json1.sizes.size[1].source};
callback(null, cover);
});
},
function(err, covers){
console.log(covers);
res.render('photosets', {covers: covers});
});
});

Chaining an arbitrary number of promises in Q

I want to send an HTTP request N times. I want to eventually have information about the results of each of those requests.
Running the request function once works great. Here's the HTTP request function using Q.defer():
function runRequest() {
var deferred = Q.defer(),
start = (new Date).getTime(),
req = HTTP.request(options, function(res) {
var end = (new Date).getTime(),
requestDetails = {
reqStatus: res.statusCode,
reqStart: start,
reqEnd: end,
duration: end - start
}
deferred.resolve(requestDetails);
});
req.on('error', function(e) {
deferred.reject(e.message);
});
req.end();
return deferred.promise;
}
If I do this, I get back the data I expect:
runRequest().then(function(requestDetails) {
console.log('STATUS: ' + requestDetails.reqStatus);
console.log('Duration: ' + requestDetails.duration);
console.log('Start: ' + requestDetails.reqStart);
console.log('End: ' + requestDetails.reqEnd);
}, function(error) {
console.log('Problem with request: ' + error);
})
.done();
To iterate, I tried to fit that into a for loop:
function iterateRequests() {
var deferred = Q.defer();
var reqResults = [];
for (var iteration = 0; iteration < requests; iteration++) {
runRequest()
.then(function(requestDetails) {
console.log('STATUS: ' + requestDetails.reqStatus);
reqResults.push(requestDetails);
}, function(error) {
console.log('Problem with request: ' + error);
});
}
deferred.resolve(reqResults);
return deferred.promise;
}
Then I call it like this:
iterateRequests()
.then(function(results) {
console.log(results);
console.log("in the success callback after iterateRequests");
}, function() {
console.log("in the failure callback after iterateRequests");
})
.done();
I end up getting into the success callback (i.e., it logs "in the success callback after iterateRequests"). However, the console.log(results) prints before I get the logs from runRequest().then() callback and it's an empty array.
Any ideas or some guidance on chaining/iterating over promise-return functions?
Thanks!
Update
Follow up question in response to #abject_error's answer:
Checked out Q.all. Definitely looks like what I need. And it's much simpler that what I was working with. I made a simple test case to help me figure out how it works:
var Q = require("q");
function returner(number) {
var deferred = Q.defer();
deferred.resolve(number);
return deferred.promise;
}
function parent() {
return Q.all([
returner(1),
returner(2),
returner(4)
]);
}
parent()
.then(function(promises) {
// works - promises gives me [1, 2, 4]
console.log(promises);
});
So I see how I can use it if I know beforehand the number of times I need to call it (and which functions I'm going to call). Any tips on how to get a dynamic number of calls to returner (in this example) or runRequest (in my original example) in the array?
This answers the update part of the question:
var buildCalls = function() {
var calls = [];
for (var i in stories) {
calls.push(myFunc(i));
}
return calls;
}
return Q.all(buildCalls());
Q has other functions to aid in Promise based workflows. The method you need to use is Q#all. If you have an array of promises, and you want to call a function when all of them have successfully fulfilled, you do
Q.all(array_of_promises).then(success_callback, failure_callback);
After all the request promises are fulfilled, success_callback is called. If any of them rejects, the failure_callback is called immediately.

Resources