Axios.all, how to configure axios wait time to mitigate hung up? - node.js

My application uses an internal webservice for fetching data, i have a job which creates approx 500 requests which getsfired async to complete the fetch operation.
I make use of Axios, by creating an array of axios promises and then resolving them using using Axios.all();
It works fine until some 200 requests but post that i get socket hung up, however on the server side i see the requests are being processed.
How to configure axios to set custom time out, or is it a better idea to splice my promises array and then run them as multiple batches ?
Source code
let getAxiosPromiseArray = (urlList) => {
var axiosArrayofPromise = [];
return new Promise ( (resolve, reject) => {
try {
urlList.forEach ( (URL) => {
axiosArrayofPromise.push(axios.get(URL));
});
resolve(axiosArrayofPromise);
}
catch (err) {
reject("There is a problem getting Axios array of promises " + err);
}
})
}
async function processAxiosPromises (PromiseArray) {
try {
var results = []
results = await axios.all(PromiseArray);
return results;
}
catch(err) {
throw("There was a problem resolving promises array (Axios) " + err);
}
}
getallID().then ( (urlList) => {
return getAxiosPromiseArray(urlList);
}).then( (AxiosPromises) => {
return processAxiosPromises(AxiosPromises);
}).then ((resultData) => {
console.log(resultData);
});
Error
There was a problem resolving promises array (Axios) Error: socket hang up

First, that pair of functions getAxiosPromiseArray() and processAxiosPromises() needs fixing.
Your new Promise() construction is unnecessary. You can simply return Promise.all(arrayofPromise) (or axios.all(...) if you must) and do away with the other function.
Renaming the remaining function to something meaningful, you would end up with eg :
let getData = (urlList) => {
return Promise.all(urlList.map(URL => axios.get(URL)))
.catch(error => {
error.message = "There is a problem getting Axios array of promises " + error.message; // augment the error message ...
throw error; // ... and re-throw the errror.
});
};
And call as follows :
getallID().then(getData)
.then(resultData => {
console.log(resultData);
}).catch(error => {
console.error(error);
});
That will put you on solid ground but, on its own, is unlikely to fix a concurrency problem (if that's what it is), for which the simplest approach is to use Bluebird's Promise.map with the concurrency option.
The caller code can remain the same, just change getData(), as follows:
let getData = (urlList) => {
let concurrency = 10; // play with this value to find a reliable concurrency limit
return Promise.map(urlList, URL => axios.get(URL), {'concurrency': concurrency})
.catch(error => {
error.message = "There is a problem getting Axios array of promises " + error.message;
throw error;
});
};
// where `Promise` is Bluebird.

const axios = require('axios');
const axiosThrottle = require('axios-throttle');
//pass axios object and value of the delay between requests in ms
axiosThrottle.init(axios,200)
const options = {
method: 'GET',
};
const urlList = [
'https://jsonplaceholder.typicode.com/todos/1',
'https://jsonplaceholder.typicode.com/todos/2',
'https://jsonplaceholder.typicode.com/todos/3',
'https://jsonplaceholder.typicode.com/todos/4',
'https://jsonplaceholder.typicode.com/todos/5',
'https://jsonplaceholder.typicode.com/todos/6',
'https://jsonplaceholder.typicode.com/todos/7',
'https://jsonplaceholder.typicode.com/todos/8',
'https://jsonplaceholder.typicode.com/todos/9',
'https://jsonplaceholder.typicode.com/todos/10'
];
const promises = [];
const responseInterceptor = response => {
console.log(response.data);
return response;
};
//add interceptor to work with each response seperately when it is resolved
axios.interceptors.response.use(responseInterceptor, error => {
return Promise.reject(error);
});
for (let index = 0; index < urlList.length; index++) {
options.url = urlList[index];
promises.push(axiosThrottle.getRequestPromise(options, index));
}
//run when all promises are resolved
axios.all(promises).then(responses => {
console.log(responses.length);
});
https://github.com/arekgotfryd/axios-throttle

Related

Chunking axios.get requests with a 1 second delay per chunk - presently getting 429 error

I have a script using axios that hits an API with a limit of 5 requests per second. At present my request array length is 72 and will grow over time. I receive an abundance of 429 errors. The responses per endpoint change with each run of the script; ex: url1 on iteration1 returns 429, then url1 on iteration2 returns 200, url1 on iteration3 returns 200, url1 on iteration4 returns 429.
Admittedly my understanding of async/await and promises are spotty at best.
What I understand:
I can have multiple axios.get running because of async. The variable I set in my main that uses the async function can include the await to ensure all requests have processed before continuing the script.
Promise.all can run multiple axios.gets but, if a single request fails the chain breaks and no more requests will run.
Because the API will only accept 5 requests per second I have to chunk my axios.get requests to 5 endpoints, wait for those to finish processing before sending the next chunk of 5.
setTimeout will assign a time limit to a single request, once the time is up the request is done and will not be sent again no matter the return being other than 200.
setInterval will assign a time limit but it will send the request again after time's up and keep requesting until it receives a 200.
async function main() {
var endpoints = makeEndpoints(boards, whiteList); //returns an array of string API endpoints ['www.url1.com', 'www.url2.com', ...]
var events = await getData(endpoints);
...
}
The getData() has seen many iterations in attempt to correct the 429's. Here are a few:
// will return the 200's sometimes and not others, I believe it's the timeout but that won't attempt the hit a failed url (as I understand it)
async function getData(endpoints) {
let events = [];
for (x = 0; x < endpoints.length; x++) {
try {
let response = await axios.get(endpoints[x], {timeout: 2000});
if ( response.status == 200 &&
response.data.hasOwnProperty('_embedded') &&
response.data._embedded.hasOwnProperty('events')
) {
let eventsArr = response.data._embedded.events;
eventsArr.forEach(event => {
events.push(event)
});
}
} catch (error) {
console.log(error);
}
}
return events;
}
// returns a great many 429 errors via the setInterval, as I understand this function sets a delay of N seconds before attempting the next call
async function getData(endpoints) {
let data = [];
let promises = [];
endpoints.forEach((url) => {
promises.push(
axios.get(url)
)
})
setInterval(function() {
for (i = 0; i < promises.length; i += 5) {
let requestArr = promises.slice(i, i + 5);
axios.all(requestArr)
.then(axios.spread((...res) => {
console.log(res);
}))
.catch(err => {
console.log(err);
})
}
}, 2000)
}
// Here I hoped Promise.all would allow each request to do its thing and return the data, but after further reading I found that if a single request fails the rest will fail in the Promise.all
async getData(endpoints) {
try {
const res = await Promise.all(endpoints.map(url => axios.get(url))).catch(err => {});
} catch {
throw Error("Promise failed");
}
return res;
}
// Returns so many 429 and only 3/4 data I know to expect
async function getData(endpoints) {
const someFunction = () => {
return new Promise(resolve => {
setTimeout(() => resolve('222'), 100)
})
}
const requestArr = endpoints.map(async data => {
let waitForThisData = await someFunction(data);
return axios.get(data)
.then(response => { console.log(response.data)})
.catch(error => console.log(error.toString()))
});
Promise.all(requestArr).then(() => {
console.log('resolved promise.all')
})
}
// Seems to get close to solving but once an error is it that Promise.all stops processing endpoint
async function getData(endpoints) {
(async () => {
try {
const allResponses = await Promise.all(
endpoints.map(url => axios.get(url).then(res => console.log(res.data)))
);
console.log(allResponses[0]);
} catch(e) {
console.log(e);
// handle errors
}
})();
}
It seems like I have so many relevant pieces but I cannot connect them in an efficient and working model. Perhaps axios has something completely unknown to me? I've also tried using blurbird concurrent to limit the request to 5 per attempt but that still returned the 429 from axios.
I've been starring at this for days and with so much new information swirling in my head I'm at a loss as to how to send 5 requests per second, await the response, then send another set of 5 requests to the API.
Guidance/links/ways to improve upon the question would be much appreciated.

Mongoose promise never gets to .then()

I am using q and I have multiple mongoose .exec() promises that never gets to the .then() part of the code, so never allow the q to resolve. Can't figure out why it never comes back.
var defer = q.defer();
var promises = [];
console.log('Exams:', exams.length);
for (var e=0; e<exams.length; e++) {
console.log('Exams:', exams[e]._id);
var newPromise = Pupilexam.find({ _exam: exams[e]._id }).populate('_user').exec()
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
resolve(exams[e]);
})
.catch((err) => {
reject(err);
});
console.log(typeof newPromise);
promises.push(newPromise);
console.log("Promised pushed");
}
q.all(promises).then(function(data){
console.log("q'd all");
defer.resolve(res.status(200).json(exams));
});
return defer;
The Pupilexam.find().exec() never reaches the .then() so the promises never resolve and the defer never resolves. Why would the mongoose find not get to the .then()? What have I missed?
*** UPDATE ***
Even using the built in promises, we get the same issue. The Pupilexams.find() call never comes back.
var promises = [];
for (var e=0; e<exams.length; e++) {
console.log('e:', e);
console.log('Exam', exams[e]._id);
var newPromise = Pupilexam.find({ _exam: exams[e]._id }).populate('_user').exec()
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
})
.catch(handleError(res));
promises.push(newPromise);
}
Promise.all(promises).then((exams) => {
console.log(values);
res.status(200).json(exams)
});
With this method I also get a headers error on the call UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
** ADDITIONAL CODE REQUESTED **
function handleError(res, statusCode) {
statusCode = statusCode || 500;
return function(err) {
console.log(err.message);
res.status(statusCode).send(err);
};
}
To answer the updated question regarding the Cannot set headers after they are sent to the client error. Looks like you send a response to the client inside your handleError function. Now, if more than one Pupilexam.find call fails, handleError would be invoked twice, resulting in the mentioned error.
You should move the catch-handler down to the Promise.all call:
const promises = [];
for (const exam of exams) {
const newPromise = Pupilexam
.find({ _exam: exam._id }).populate('_user').exec()
.then((pupils) => {
exam.pupils = pupils;
});
promises.push(newPromise);
}
Promise.all(promises)
.then((exams) => {
res.status(200).json(exams);
})
.catch(handleError(res));
I guess that you are indeed returning your promise but you are returning an empty json.
There are 2 problems with your approach:
You are not returning from your then: should return pupils and it is returning undefined
You are logging values that I don't know what it is
.then((pupils) => {
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
// you should return something // return pupils
})
promises.push(newPromise);
Promise.all(promises).then((exams) => {
// ['undefined', 'undefined', ...]
console.log(values);
res.status(200).json(exams)
});
Looks like the answer was that on these two lines the exams[e] is not in scope, because by the time the promise comes back the loop has moved on, so e is wrong and gets too high so it was erroring.
console.log("Adding pupils", exams[e]._id);
exams[e].pupils = pupils;
Only discovered that when I read #eol's message about the catch and decided to catch it properly and output.
it is Look from your code.
//(async) function
for (var e of exams) {
try {
const pupils = await Pupilexam.find({ _exam: exams[e]._id
}).populate('_user').exec().lean()
e.pupils = pupils
}catch((err){
//handleError
};
}
res.status(200).json({data: exams})
maybe that will show you how match are you wrong

insert document into multiple instance of couch DB in Node JS with all success and failure result in any way possible

i have array of db like
const dbArr = ["http://localhost:5984", "http://xyz_couchdb.com:5984"]
data to insert
let data ={
_id: 324567,
name: Harry,
gerder: male
}
here is the logic i am using nano module
return new Promise((resolve, reject) => {
let res = [];
let rej = [];
let counter = 0;
for(let i = 0; i < dbArr.length ; i++){
dbArr[i].insert(data, (err, body) => {
err ? rej.push(err) : res.push(body)
if(counter === obj.dbArray.length -1){
rej.length ? reject(rej) : resolve(res)
}
counter++;
})
}
})
what can be the best possible way to achieve this using promise or async module or anything.
In the following example, we gotta use Array.map to create one promise for each element of dbArr, then we gotta wait all promises to end using Promise.all. The catch is here so we handle the errors.
function getAll(dbArr) {
return Promise.all(dbArr.map(x => x.insert(data)));
}
getAll(dbArr)
.then((rets) => {
// Handle the returns
// They are in an array
})
.catch((err) => {
// Handle the error
});
EDIT :
Ok after checking out the documentation of node-couchdb (the one I suppose you use) - I saw that the .insert() method do not return a Promise but only a callback.
So we gotta transform the method, so it will return a Promise using util.Promisify()
const {
promisify,
} = require('util');
function getAll(dbArr) {
return Promise.all(dbArr.map(x => promisify(x.insert)(data)));
}
getAll(dbArr)
.then((rets) => {
// Handle the returns
// They are in an array
})
.catch((err) => {
// Handle the error
});

Angular : Receive responses in order with the calls

Hi I am pretty new to Angular and Observables
I am trying to GET Objects by theirs ID through a loop.
But don't receive my Response in Order.
Example
get ID(1)
get ID(2)
get ID(3)
Receive Object ID(2)
Receive Object ID(3)
Receive Object ID(1)
Is it possible to get my Objects back in order ??
Below is where I call multiple times my service function :
conferences-attendance.component.ts
ExportExcelAttendance() {
for (var i = 0; i < this.contactsAttendance.length; i++) {
this.practiceService.GetPracticebyDBID(this.contactsAttendance[i].practiceId)
.subscribe(
(practice: Practice) => {
this.practicesAttendance.push(practice);
if (this.practicesAttendance.length == this.contactsAttendance.length) {
this.ExportExcelAttendance2();
}
},
error => this.errorMessage = <any>error
);
}
}
Here is my function in my service, it where I receive the data (not in order with the calls).
practices.service.ts
GetPracticebyDBID(id: string) {
let params: URLSearchParams = new URLSearchParams();
params.set('thisId', id);
let requestOptions = new RequestOptions();
requestOptions.params = params;
return this.http.get('http://ec2-34-231-196-71.compute-1.amazonaws.com/getpractice', requestOptions)
.map((response: Response) => {
return response.json().obj;
})
.catch((error: Response) => Observable.throw(error.json()));
}
forkJoin gives you a little less code,
const arrayOfFetches = this.contactsAttendance
.map(attendee => this.practiceService.GetPracticebyDBID(attendee.practiceId) );
Observable.forkJoin(...arrayOfFetches)
.subscribe((practices: Practice[]) => {
this.practicesAttendance = practices;
this.ExportExcelAttendance2();
});
Edit
Snap! #Anas beat me to it. Although, I don't think you need the concatAll()
you should use concatAll operator to ensure calling your observables in sequence.
also, you can use completed callback to call ExportExcelAttendance2 instead of checking practicesAttendance length on every response callback.
check the below example:
let contactsAttendanceObservables = this.contactsAttendance
.map((item) => {
return this.practiceService.GetPracticebyDBID(item.practiceId);
});
Observable.of(...contactsAttendanceObservables)
.concatAll()
.subscribe(
(practice: Practice) => {
this.practicesAttendance.push(practice);
},
(err) => {
// handle any errors.
},
() => {
// completed
this.ExportExcelAttendance2();
}
);
if you still want your observables to run in parallel, you can use forkJoin Operator, which will emit the last value of all the passed observables to a one subscriber when all observables are completed.
check the below example:
let contactsAttendanceObservables = this.contactsAttendance
.map((item) => {
return this.practiceService.GetPracticebyDBID(item.practiceId);
});
Observable.forkJoin(...contactsAttendanceObservables)
.subscribe(
(practices: Practice[]) => {
this.practicesAttendance = practices;
this.ExportExcelAttendance2();
}
);
The forkJoin operator is simple to use. It waits until all observables complete, then emit an array with all the items emitted.
ExportExcelAttendance() {
const all = this.contactsAttendance.map(it => this.practiceService.GetPracticebyDBID(it.practiceId));
Rx.Observable.forkJoin(all)
.subscribe(
practicesAttendance => this.ExportExcelAttendance2(practicesAttendance),
error => this.errorMessage = < any > error);
}

Handling exceptions within recursive promise

I'm trying to both be able to handle a paginated API, as well as do retries if throttled for too many requests. The pagination is handled by recursing if 'nextToken' is present in the response object. I'm hoping to be able to catching a Throttling Exception, and effectively start the whole request over by recursing without passing the token. This is my current code:
function getAllExecHist(execArn) {
var sfn = new AWS.StepFunctions();
sfn = Promise.promisifyAll(sfn);
execHists = [];
return new Promise(function(resolve, reject) {
function getExecHist(nextToken) {
params = {};
params.executionArn = execArn;
if (nextToken !== undefined) {
params.nextToken = nextToken;
}
sfn.getExecutionHistoryAsync(params)
.then(function(results) {
execHists = execHists.concat(results.events);
if (!results.nextToken) {
resolve(execHists);
}
else {
getExecHist(results.nextToken);
}
})
.catch(function(e) {
console.log('caught this: ', e);
console.log('retrying');
return new Promise(function(res, rej) {
console.log('Sleeping');
setTimeout(function() {
execHists = [];
res(getExecHist());
}, random(100,10000));
});
})
}
getExecHist();
});
}
The recursion was handling pagination without issue, but since adding the catch, it simply never returns. Any ideas what I'm doing wrong / how to fix?
The AWS SDK supports promises and you can configure Bluebird as it's promise library.
const Promise = require('bluebird');
const AWS = require('aws');
AWS.config.setPromisesDependency(Promise);
const sfn = new AWS.StepFunctions();
Use Promise.delay() instead of setTimeout.
Try and avoid creating new promises if functions are already returning them. Only wrap a promise in new Promise if you have a lot of synchronous code that might throw an error or needs to resolve the promise early.
The following also avoids the extra function and nested scope by passing values between function calls.
function getExecHist(execArn, execHists, nextToken) {
let params = {};
params.executionArn = execArn;
if ( nextToken !== undefined ) params.nextToken = nextToken;
if ( execHists === undefined ) execHists = [];
return sfn.getExecutionHistory(params).promise()
.then(results => {
execHists = execHists.concat(results.events);
if (!results.nextToken) return execHists;
return getExecHist(execArn, execHists, results.nextToken);
})
.catch(e => {
console.log('caught this: ', e);
console.log('retrying');
return Promise.delay(random(100,10000))
.then(() => getExecHist(execArn));
})
}
Eventually you should be specific about what errors you retry on and include a count or time limit too.
Also note that this is the wrong way to retry a rate limit issue as this starts again from the beginning. A rate limit retry should continue from where it left off, otherwise you are just adding to your rate limit problems.

Resources