how to make for loop in async.parallel in node.js - node.js

I have a code snippet like the following.
async.parallel({
foo: function(call) {
request.get('...?node=foo', function (error, response, body) {
var last=JSON.parse(body);
call(null,last.data.result);
});
},
bar: function(call) {
request.get('...node=bar', function (error, response, body) {
var last=JSON.parse(body);
call(null,last.data.result);
});
}
}, function(err, results) {
callback(results)
});
but I want to that in loop...that is to say,like the following.
var node = ['foo', 'bar'];
async.parallel({
for (var i = 0; i < node.length; i++) {
node[i]: function(call) {
request.get('...?node=' + node[i] + '', function(error, response, body) {
var last = JSON.parse(body);
call(null, last.data.result);
});
}
}
}, function(err, results) {
callback(results)
});
it does not work...how to make it?
can you help me?

You have one of two options. One is to read the documentation for async.map(), which would be the more canonical approach using async.
That would look something like this:
async.map(['foo', 'bar'], function (node, callback) {
request.get('...?node=' + encodeURIComponent(node), function (error, response, body) {
if (error) return callback(error)
var last = JSON.parse(body)
callback(null, last.data.results)
})
}, function (error, results) {
// handle error
callback(results)
})
Your second option would be to use Promise.all() with a synchronous Array#map(), removing the dependency for async entirely, though you'll probably want a polyfill like bluebird until Promises are used in more substantial percentage of browsers.
Here's how you'd implement the latter solution using native Promises:
Promise.all(['foo', 'bar'].map(function (node) {
return new Promise(function (resolve, reject) {
request.get('...?node=' + encodeURIComponent(node), function (error, response, body) {
if (error) return reject(error)
var last = JSON.parse(body)
resolve(last.data.result)
})
})
})).then(function (results) {
callback(results)
}).catch(function (error) {
// handle error
})

Related

NodeJs and Protractor return a value from the shared file

this.queryMailApi = function(mailUrl, callback) {
request.get({url: mailUrl}, function (err, httpResponse, body) {
if (err) {
return console.error('post failed:', err);
} else
callback(body);
});
};
this.myCallBack = function(data) {
var emailData = data;
console.log(emailData);
}
This is my function + callback to get the value. I want to return it to a function call similar to how you would do this.
var x = shared.queryMailApi(mailApiUrl, shared.myCallBack);
To be used later in code. I've read a ton of things about asynchronous Nodejs stuff which means I can't actually do this... but there has to be a way.
I didn't try this, but I think you should be able to do this in in this way with a promise.
this.queryMailApi = function(mailUrl) {
var deferred = protractor.promise.defer();
request.get({url: mailUrl}, function (err, httpResponse, body) {
if (err) {
deferred.reject(err);
return console.error('post failed:', err);
}
deferred.resolve(body);
});
return deferred.promise
};
this
.queryMailApi('example#mail.com')
.then(function(response) {
console.log(response);
});
If this doesn't work, you may take a look webdriver.WebDriver.wait. This may be useful.

How to put different request's response in the same document with nodejs and request?

Hi I want to put different request's response in the same document. I have this document:
var result = {google:"", twitter:"", facebook:""}
I want to do different GET requests to these sites (google.com, Facebook.com, twitter.com) and put all the result in the correspondent field.
I tried with nested callbacks but in this way i have to do first the google call, then the twitter etc like this:
Request({
url:first_url,
},function(err, response, body) {
if (err) {
request.log(err);
}else{
risultato.google = body;
Request({
url:second_url,
},function(err, response, body) {
if (err) {
request.log(err);
}else{
risultato.facebook = body;
Request({
url:third_url,
},function(err, response, body) {
if (err) {
request.log(err);
}else{
risultato.twitter = body;
console.log(result);
}
});
}
});
}
});
So i have to know how to do all the requests in parallel, and when i have all the request's response in the result object i have to use it.
You can use async
var async = require('async');
var request = require('request');
async.parallel({
google: function(cb) {
request("https://google.com", function(err, resp, data) {
cb(err, data)
})
},
facebook: function(cb) {
request("https://facebook.com", function(err, resp, data) {
cb(err, data)
})
},
twitter: function(cb) {
request("https://twitter.com", function(err, resp, data) {
cb(err, data)
})
},
}, function(err, results) {
console.log(err, results);
//results is now equals to {google: "", facebook: "". twitter: ""}
});
An alternative to async would be to use the Promise API. There's a request-promise npm library (https://www.npmjs.com/package/request-promise) that returns a Promise for each request. We can send requests for each URL, then simply call Promise.All on the list of Promises:
const request = require('request-promise');
const urls = ['http://google.com', 'http://yelp.com'];
// List of Promises that resolve the original URL and the content
const resultPromises = urls.map(url => {
return request(url).then(response => ({ url, response }));
});
Promise.all(resultPromises)
.then(results => {
// results is an array containing the URL and response for each URL
console.log(results);
});

Need to call two apis using node,js asynchronously and aggregate the response from both the apis

The code I wrote so far is as below.
I don't need the whole response but just part of it.
var request = require('request');
var async = require('async');
var asyncTasks = [];
var install;
async.series([
function (callback) {
setTimeout(function () {
request('URL', function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
}, 5000);
},
function (callback) {
setTimeout(function () {
request('URL', function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body); // Show the HTML for the Google homepage.
}
});
}, 5000);
}
],
function (error, results) {
console.log(results);
});
One approach to do the above concurrently would be to use async.parallel - of the form of:
async.parallel([
function(){ ... },
function(){ ... }
], callback);
Another approach is to use a Promises library - BlueBird or Q are good choices.
Q.All is of the form of:
return Q.all([
promise1,
promise2
]).spread(function (resultFromPromise1, resultFromPromise2) {
// do something with the results...
});
You could use one of these approaches to parallelise the two calls. The outputs of each will give you an array containing the results of each call respectively.
Here is a simple illustration of each approach:
Using Async.js
var async = require('async');
var task = function (cb, count) {
setTimeout(function () {
cb(null, "complete: " + count);
}, 1000);
};
async.parallel([
function (cb) {
task(cb, 'one');
},
function (cb) {
task(cb, 'two');
}
], function (err, results) {
console.log(results);
//[ 'complete: one', 'complete: two' ]
});
Using Q:
var Q = require('q');
function task1(cb, count) {
var deferred = Q.defer();
setTimeout(function () {
return deferred.resolve(cb(null, count));
}, 1000);
return deferred.promise;
}
var myCb = function (err, count) {
return "complete: " + count;
};
Q.all([task1(myCb, 'one'), task1(myCb, 'two')])
.then(function (results) {
console.log(results);
//[ 'complete: one', 'complete: two' ]
});
Let me know if you are unclear.
Promises are there to help you out in such a case.
I would prefer to use 'Q' library.
I have modified your code to use Q library
var Q = require('q');
var request = require('request');
function makeCall() {
Q.all([req1(), req2()])
.spread(function (res1, res2) {
// This block is executed once all the functions( Provided in Q.all() ) are finished its execution.
// Use responses from called functions
}, function (err) {
// Error, If any
});
}
function req1() {
var defer = Q.defer();
var url = ''; // Specify URL
var options = {
method: 'get', // Method to use
url: url
}
request(options, function (err, res, body) {
if (err) {
return defer.reject(err);
}
return defer.resolve(body);
});
return defer.promise;
}
function req2() {
var defer = Q.defer();
var url = ''; // Specify URL
var options = {
method: 'get', // Method to use
url: url
}
request(options, function (err, res, body) {
if (err) {
return defer.reject(err);
}
return defer.resolve(body);
});
return defer.promise;
}
You can find docs for Q library here : Q docs

Call same function many times and process combined result set

I have a requirement to make several API requests and then do some processing on the combines result sets. In the example below, you can see that 3 requests are made (to /create) by duplicating the same request code however I would like to be able to specify how many to make. For example, I may wish to run the same API call 50 times.
How can I make n calls without duplicating the API call function n times?
async.parallel([
function(callback){
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
},
function(callback){
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
},
function(callback){
request.post('http://localhost:3000/api/store/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err, null);
}
callback(null, res.body.id);
});
}
],
function(err, results){
if (err) {
console.log(err);
}
// do stuff with results
});
First, wrap the code that you want to call many times in a function:
var doRequest = function (callback) {
request.post('http://localhost:3000/create')
.send(conf)
.end(function (err, res) {
if (err) {
callback(err);
}
callback(null, res.body.id);
});
}
Then, use the async.times function:
async.times(50, function (n, next) {
doRequest(function (err, result) {
next(err, result);
});
}, function (error, results) {
// do something with your results
}
Create an array with as many references to the function as you need tasks in your workload. Then pass them to async.parallel. For example:
var async = require("async");
var slowone = function (callback) {
setTimeout(function () {
callback(null, 1);
}, 1000);
};
async.parallel(
dd(slowone, 100),
function (err, r) {
console.log(JSON.stringify(r));
}
);
// Returns an array with count instances of value.
function dd(value, count) {
var result = [];
for (var i=0; i<count; i++) {
result.push(value);
}
return result;
}
Note again that there is only one instance of the slow running function, in spite of there being many references to it.

Node async.series trouble

While building a fairly complex scraper i stumbled upon a problem with a control flow of my code.
What's going on in code below:
1) request a URL
2) scrape NEWURL from the results
3) pass it to readability API as first async function
4) here comes the trouble — i never get the next async function which saves readabilityData to DB
How to solve this problem?
I'm new to JS, so please feel free to point out at any issues with my code.
request(URL, function(error, response, html) {
if (!error) {
var $ = cheerio.load(html);
NEWURL = data.find('a').attr('href');
readabilityData = {}
var articleUrl = 'https://readability.com/api/content/v1/parser?url=' + NEWURL + token;
async.series([
function(){
request(articleUrl, function(error, response, html) {
if (!error) {
readabilityData = response.toJSON();
}
});
},
function(readabilityData){
Article.findOne({
"link": url // here's the
}, function(err, link){
if(link) {
console.log(link)
} else {
var newArticle = new Article({
// write stuff to DB
});
newArticle.save(function (err, data) {
// save it
});
}
});
}
],
function(err){
console.log('all good — data written')
});
});
}
});
You need to call the callback parameter that's passed into the functions of the async.series call when each function's work is complete. That's how async.series knows that it can proceed to the next function. And don't redefine readabilityData as a function parameter when you're trying to use it to share data across the functions.
So something like:
var readabilityData = {};
async.series([
function(callback){
request(articleUrl, function(error, response, html) {
if (!error) {
readabilityData = response.toJSON();
}
callback(error);
});
},
function(callback){
Article.findOne({
"link": url // here's the
}, function(err, link){
if(link) {
console.log(link);
callback();
} else {
var newArticle = new Article({
// write stuff to DB
});
newArticle.save(function (err, data) {
// save it
callback(err);
});
}
});
}
],
function(err){
console.log('all good — data written')
});

Resources