Calling an API in parallel using Node - node.js

I want to stress test my API and call it multiple times in parallel to get an idea of performance.
I am almost there with the code, and as someone new to JS, I am hoping to fix my code as follows:
Call each request in Parallel
Log out when each individual request is finished
Log out the time taken for each individual request
At the minute, the function waits until all calls have finished before it logs out the data. I assume this is because I am using Promise.all()?
import fetch from 'node-fetch';
var requestAsync = function(url) {
return new Promise((resolve, reject) => {
var req = request(url, (err, response, body) => {
if (err) return reject(err, response, body);
resolve(JSON.parse(body));
});
});
};
const urls = ["url1", "url2", "url3", "url4"];
var getParallelData = async function() {
//transform requests into Promises, await all
var startTime = performance.now()
try {
var data = await Promise.all(urls.map(requestAsync));
} catch (err) {
console.error("ERROR: " + err);
}
var endTime = performance.now()
console.log(data);
console.log(`Call took ${(endTime - startTime) / 1000} seconds`)
}
getParallelData();
Edit: I have been continually trying to evolve this to suit my needs and this is what I have now. I think its doing what I expect but as long as its not waiting for the first request to finish before starting the next.
import request from 'request'
var requestAsync = function(url) {
return new Promise((resolve, reject) => {
var req = request.get({
url : url,
time : true
},function(err, response){
console.log('Request time in seconds', (response.elapsedTime / 1000));
});
});
};
const urls = ["url1", "url2", "url3", "url4"];
var getParallelData = async function() {
for(const url of dev_urls) {
requestAsync(url)
}
}
getParallelData();

Related

Function enters then before request completed

My async function enters then before request is completed. Shouldn't Then part of the code executes only after the async function is completed? How to make the function call only when all the function has finished executing?
app.js
var request_test = require('./request_test');
baseUrl = "https://github.com";
promiseFunction().then((result)=>{
console.log("complete")
});
var promiseFunction = async function promiseFunction() {
request_test.request_test(baseUrl);
}
request_test.js
var request = require('request');
var cheerio = require('cheerio');
var request_test = function check(baseUrl) {
console.log("baseUrl:" + baseUrl)
var options = {
url: baseUrl
};
request(options, function (error, response, html) {
if (!error) {
console.log("no error");
}else{
console.log("else js");
console.log(error);
}
});
}
module.exports = {
request_test: request_test
};
In order to use then() you need to return a promise. So here is an example of the good old style promise chain, simply return promise from request_test and once you resolve or reject it, then() will be called:
promiseFunction().then((result) => {
console.log("complete");
});
function promiseFunction() {
return request_test();
}
function request_test() {
return new Promise(function(resolve, reject) {
setTimeout(function() {
console.log("no error");
resolve();
}, 1000);
});
}
Or maybe use the modern approach - async method to await call function that returns promise.
promiseFunction();
async function promiseFunction() {
await request_test();
console.log('complete');
}
function request_test() {
return new Promise(function(resolve, reject) {
setTimeout(function() {
console.log("no error");
resolve();
}, 1000);
});
}
Your issue is coming from var request_test = function check(baseUrl) { ... inside this function you are not returning promise, you are even returning nothing :)
if you are using async I would go ahead and use the await/async syntax. Also the package request does not return a promise, you have an alternative with request-promise-native. The promise should be the return value of your helper function. It could look like this:
var request_test = require('./request_test');
var baseUrl = "https://github.com";
var promiseFunction = async function () {
var result = await request_test.request_test(baseUrl);
console.log("complete");
}
promiseFunction();
and the module:
var request = require('request-promise-native');
var cheerio = require('cheerio');
var request_test = function check(baseUrl) {
console.log("baseUrl:" + baseUrl)
var options = {
url: baseUrl
};
return request(options).then(function (error, response, html) {
if (!error) {
console.log("no error");
} else{
console.log("else js");
console.log(error);
}
});
}
module.exports = {
request_test: request_test
};

NodeJS using request inside a loop

I use NodeJS and request lib to make some request to an API.
I understand now that all requests are async and so it doesn't "wait" the result of the GET call, so the index of my Loop is always the same.
I was wondering if there was any simple way (without any lib) to wait for the response of the request call ?
For now, my code is this :
for (var i in entries) {
var entryId = entries[i]['id'];
var options = {
url: 'https://api.com/'+ entryId +'/get/status',
method: 'GET',
headers: {
'Authorization': auth
}
};
console.log(' ENTRY ID > '+ entryId);
request(options, function(error, response, body) {
var response = JSON.parse(body);
if (response.status.code == 200) {
var id = response.status.id;
var data = [];
data['id'] = id;
data = JSON.stringify(data);
// show first entryId of loop
console.log(' > MY ID : '+ id + ' - '+ entryId);
options = {
host: hostname,
port: 80,
path: '/path/function2',
method: 'PUT'
};
var post = http.request(options, function(json) {
var body = '';
json.on('data', function(d) {
body += d;
});
json.on('end', function() {
console.log('> DONE');
});
}).on('error', function(e) {
console.log(e);
});
post.write(data);
post.end();
}
});
}
You are looking for async/await.
Wrap your logic inside an async function, then you can await for the promise to resolve.
const request = require('request-promise')
async function foo (a) {
for (i in a)
try {
let a = await request('localhost:8080/')
// a contains your response data.
} catch (e) {
console.error(e)
}
}
foo([/*data*/])
Just use the promisified version of request module.
You also can use Promises to wait for your async code to finish.
function asyncCode(msg, cb){
setTimeout(function() {cb(msg);}, 1000);
}
var p1 = new Promises(function(resolve){
asyncCode("my asyncCode is running", resolve);
});
p1.then(function(msg) {
console.log(msg);
}).then(function() {
console.log("Hey I'm next");
});
console.log("SyncCode, Async code are waiting until I'm finished");

Regressive call in promise

Is there any easy way to do recursive call using promise. Here is my sample.
function getData() {
var result=[];
var deferred = Q.defer();
(function fetchData(pageno){
var options = {
method : 'GET',
url : 'example.com/test',
qs:{
pageNo: pageno
}
}
request(options, function (error, response, body) {
if (error)throw new Error(error);
if (body.hasMorePage == true) { //checking is there next page
result.push(body)
fetchData(++body.pageno); // getting next page data
} else {
deferred.resolve(result); // promise resolve when there is no more page
}
});
})(0);
return deferred.promise;
}
getData().then(function(data){
console.log(data)
});
Let's consider API is giving more data in consecutive calls. in order to collect all the data, I need to use some parameter (EX:hasMorePage) from previous call response. I need to go regressive call only for obtaining this scenario, but I would like to know a better(Promise) way.
Most welcome.
async function request(options, callback) {
// simulate server response of example.com/test with 1 second delay
const totalNumberOfPages = 3;
const pageNo = options.qs.pageNo;
await new Promise(resolve => setTimeout(resolve, 1000));
const hasMorePages = pageNo < totalNumberOfPages;
const body = { hasMorePages };
callback(void 0, { body }, body);
}
function getPage(pageNo) {
const options = {
method: 'GET',
url: 'example.com/test',
qs: { pageNo }
};
return new Promise(resolve => request(options, (error, response, body) => {
console.log('response received', response);
if(error) {
throw new Error(error);
}
resolve(body);
}));
}
async function getData() {
const result = [];
for(let i = 1, hasMorePages = true; hasMorePages; i++) {
const body = await getPage(i);
result.push(body);
hasMorePages = body.hasMorePages;
}
return result;
}
getData().then(data => console.log('RESULT', data));

Chain Promises Node.js using Q Not Working

Pretty simple Node.js (console app) code:
var request = require("request");
var q = require("q");
var data = new Object();
var deferred = new q.defer();
var url1 = "https://www.google.com"
var url2 = "https://www.yahoo.com"
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
console.log('begin data1');
requestData(url1) //Promises seem to be working fine here
.then(function () { console.log('finished data1'); }) //At this point, we go back to async, and everything from here down executes at the same time.
.then(function () { console.log('begin data2'); })
.then(function () { requestData(url2); }) //Expected behavior is that it should pause here, and execute everything below after the request is complete.
.then(function () { console.log('finished data2'); })
.then(function () { console.log(data); })
.then(function () { console.log('finished write2'); })
.then(function () { console.log('operation completed!'); });
function requestData(url) {
console.log(url);
data = new Object();
console.log(data);
deferred = new q.defer();
console.log(deferred);
request({
url: url,
json: true
}, function (error, response, obj) {
if (!error && response.statusCode === 200) {
data = obj;
deferred.resolve();
console.log(deferred);
} else {
console.log('err');
}
});
return deferred.promise;
}
The problem is after the first .then statement, everything executes asynchronously. I want the second time I run 'requestData' call to be synchronous like the first.
Pardon my console logs for debugging purposes.
What am I doing wrong here?
The problem is you are not returning a promise in the then methods.
Try this:
...something.then(function() {
return requestData(url2);
}).then(function() { //called after requestData ended});

Download Image with Node-Request with URL scan from CSV

Pardon me as the code is messy. I'm still learning.
I need to download the image with the URL scan from a CSV file. However i have 2000+ of URL with the same domain, and i don't think the server will let me to pull everything in a go hence i always get error after some images. Problem that i need to solve - 1) How to make sure the images are downloaded completely then only the code move on to the next URL 2) How to write a better code
Your help is appreciated. Thank You
var csv = require('fast-csv');
var Promise = require('bluebird');
var fs = require('fs');
var request = require('request');
var path = "test.csv";
var promiseCSV = Promise.method(function(path, options) {
return new Promise(function(resolve, reject) {
var records = [];
csv
.fromPath(path, options)
.on('data', function(record) {
records.push(record);
})
.on('end', function() {
resolve(records);
console.log('done');
});
});
});
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
promiseCSV(path).then(function (records) {
for(i=0;i<records.length;i++)
{
download(records[i][0],'img/'+records[i][1], function(){
});
}
});
This will throttle your requests to one at a time. Another option is to use throttled-request to limit by requests per unit time.
var i = 0;
promiseCSV(path).then(function (records) {
next();
function next(){
download(records[i][0],'img/'+records[i][1], function(){
i++;
if (i < records.length) next();
});
}
});
Also, your records variable is out of scope, you need to move it out in order to access it:
var records = []; // move out to global scope to access from elsewhere
var promiseCSV = Promise.method(function(path, options) {
return new Promise(function(resolve, reject) {
csv
.fromPath(path, options)
.on('data', function(record) {
records.push(record);
})
.on('end', function() {
resolve(records);
console.log('done');
});
});
});

Resources