How to add delay in nodejs - node.js

i am calling a 3rd party API in a loop in my nodejs application. Basically I have a list, am iterating through the list and calling the 3rd party API.
The 3rd party API is very slow and cannot handle more than 3 requests. I have been advised to add some delay.
Please can someone advise how to add delay in this scenario.
var promises = [];
promises = rids.map((rid,j) => {
// 3rd party API
// getServiceDetailsApi is wrapper around 3rd party API
return getServiceDetailsApi(rid)
});
// console.log(promises);
Promise.all(promises)
.then(res => {
// console.log('promise complete..' + res.length)
var responses = [];
res.map((response,i) => {
var serviceAttributesDetail = {};
// console.log(response);
serviceAttributesDetails = response.data.serviceAttributesDetails;
serviceAttributesDetail.rid = serviceAttributesDetails.rid;
responses = responses.concat(serviceAttributesDetail);
})
// Add more logic above
return Promise.all(responses);
})

If one request at a time is enough, you can try this way:
'use strict';
(async function main() {
try {
const responses = [];
for (const rid of rids) {
const response = await getServiceDetailsApi(rid);
responses.push({ rid: response.data.serviceAttributesDetails.rid });
}
console.log(responses);
} catch (err) {
console.error(err);
}
})();

If your restriction is about having a maximum of 3 concurrent requests to that API, here is a possibility (untested though, there might be typos, and I didn't think the rejection handling):
const cfgMaxApiCalls = 3;
...
function getServiceDetailsApi() {...}
...
const rids = ...
...
const promisedResponses = new Promise((generalResolve) => {
let currentCalls = 0; // to know how many calls in progress
const responses = []; // the output of the general promise
// this chains promises, ensuring we do an API call only when another one finished
const consumer = (response) => {
responses.push(response); // first remember the data
// stop condition: nothing more to process, and all current calls have resolved
if (!rids.length && !currentCalls--) {
return generalResolve(responses);
}
// otherwise make a new call since this one's done
return getServiceDetailsApi(rids.shift()).then(consumer);
};
// start the process for maximum `cfgMaxApiCalls` concurrent calls
for (; currentCalls < cfgMaxApiCalls && rids.length; currentCalls++) {
getServiceDetailsApi(rids.shift()).then(consumer);
}
});
promisedResponses.then((res) => {
// here `res` === your code's `res`
// and by the way, Array.prototype.concat is not asynchronous,
// so no need to Promise.all(responses) at the end ;)
});

Related

Koa API server - Wait until previous request is processed before processing a new request

I'm building an API in Node with Koa which uses another API to process some information. A request comes in to my API from the client and my API does some different requests to another API. Problem is, the other API is fragile and slow so to guarantee data integrity, I have to check if there is no previous incoming request being processed, before starting a new process. My first idea was to use promises and a global boolean to check if theres an ongoing processing and await until the process has finished. Somehow this prevents concurrent requests but even if 3-4 requests come in during the process, only the first one is done and that is it. Why are the rest of the incoming requests forgotten ?
Edit: As a side note, I do not need to respond to the incoming request with processed information. I could send response right after the request is recieved. I need to do operations with the 3rd party API.
My solution so far:
The entry point:
router.get('/update', (ctx, next) => {
ctx.body = 'Updating...';
update();
next();
});
And the update function:
let updateInProgress = false;
const update = async () => {
const updateProcess = () => {
return new Promise((resolve, reject) => {
if (!updateInProgress) {
return resolve();
} else {
setTimeout(updateProcess, 5000);
}
});
};
await updateProcess();
updateInProgress = true;
// Process the request
updateInProgress = false
}
Ok, I found a working solution, not sure how elegant it is tough...
I'm guessing the problem was, that new Promise was created with the Timeout function, and another one, and another one until one of them was resolved. That did not resolve the first Promise tough and the code got stuck. The solution was to create an interval which checked if the condition is met and then resolve the Promise. If someone smarter could comment, I'd appreciate it.
let updateInProgress = false;
const update = async () => {
const updateProcess = () => {
return new Promise((resolve, reject) => {
if (!updateInProgress) {
return resolve();
} else {
const processCheck = setInterval(() => {
if (!updateInProgress) {
clearInterval(processCheck);
return resolve();
}
}, 5000);
}
});
};
await updateProcess();
updateInProgress = true;
// Process the request
updateInProgress = false
}

function delaying on node.js

Im on a big problem, I need to do an application that gets trello content as soon as i can but i dont know why my for isnt working as it should do. when i the output of this should be the card id and when the function is called it should show the member. I dont know why, but when the 'miembro' function is called, it delays and it is shown after the second id, so its delayed a lot and i need them to show one under the other. I appreciate a quick answer, thank you!
const trelloKB = require("trello-kb");
const fetch = require('node-fetch');
// Replace this by the application key of your Trello account
var appKey = '51501902fff527d305686a29d6d61cfa';
// Replace this by a valid authorization token
var authToken = '9828f5f03073ae52ffdae77bdf49c939df8a315b169cb81aeb42a3d43d0f9e21';
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
function miembros (id){
fetch('https://api.trello.com/1/cards/'+id+'/members?key=51501902fff527d305686a29d6d61cfa&token=9828f5f03073ae52ffdae77bdf49c939df8a315b169cb81aeb42a3d43d0f9e21&fields=fullName', {
method: 'GET'
})
.then(response => {
setTimeout(function() {
return(response.text());
}, 3000);
})
.then(text => console.log(text))
.catch(err => console.error(err));
}
trelloKB.get(appKey, authToken, '33CP31Sf').then(
function (cards) {
// Print the title of each card
var ms = 3000;
for(i=0; i<2; i++){
var card = cards[0];
var id = card.id;
var titleCard = card.title;
console.log(id);
miembros(id);
}
},
);
I think you should learn about synchronous and asynchronous concept
You need to use async-await and return a promise in your function miembros()
read this async await and promise.
this is my example
const cards =[{id:1,title:'kimiwo'},{id:2,title:'namae wa'},{id:3,title:'udiiiin'}];
yourname(cards);
async function yourname (cards) {
for(let card of cards){
console.log(`id :${card.id},text:${card.title}`);
let result = await(await miembros(card.id)).text();
console.log(result);
}
}
function miembros(id){
return fetch('https://api.trello.com/1/cards/'+id+'/members?key=51501902fff527d305686a29d6d61cfa&token=9828f5f03073ae52ffdae77bdf49c939df8a315b169cb81aeb42a3d43d0f9e21&fields=fullName')
}
you can see the result here
*Edit
fetch returns promise, so you can just return fetch and wrap your function with async-await
It’s delayed because you need to use a sync await or .then
You need to get the first Id first, then do a .then to get the second ID through the function call.
Also, you shouldn’t show you API keys, they’re supposed to be private lol

NodeJS: make less requests by API

I am trying to process response data and do not make next request before current data didn't processed. I tried use async/await and generators.
Generator:
private *readData() {
const currentUrl = this.getUrl();
const requestSettings = this.getRequestSettings();
yield axios.get( currentUrl, requestSettings).then( (response: any) => {
console.log('Make request');
return this.getData(response);
});
}
*readItem() {
let responseData: any;
if (!responseData) {
const response = this.readData();
responseData = response.next();
}
console.log('res data:', responseData['value']);
yield responseData['value'].then((res: any) => {
return res;
});
}
and then in the main code I do next:
for (let i=0;i<10;i++) {
item = transport.readItem().next();
console.log("R:", item);
}
Another idea was using async/await
async readItems() {
const headers = this.settings.headers;
const response = await axios.get( url, {
headers: headers
});
return response.data;
}
But in this case I get a promise in the response, if I just try to call this method 10 times.
I read 10 items, but I still make 10 requests to the server. Is it possible make one request, processed 10 items, and then make the second request? Maybe I have to use another pattern or whatever.
Async/await is right approach, just put await in front of readItem() and the Promise you get will be awaited that will give you desired. If your loop is in top level use readItem().then(). The latest NodeJS version allows await in top level.
for (let i=0;i<10;i++) {
item = await transport.readItem();
console.log("R:", item);
}
I found next solution
(async () => {
for (let i = 0; i < 10; i++) {
item = await transport.readItem();
console.log("R:", item);
}
})();
because I ran this part of code inside script without any functions/methods

Synchronously iterate through firestore collection

I have a firebase callable function that does some batch processing on documents in a collection.
The steps are
Copy document to a separate collection, archive it
Run http request to third party service based on data in document
If 2 was successful, delete document
I'm having trouble with forcing the code to run synchronously. I can't figure out the correct await syntax.
async function archiveOrders (myCollection: string) {
//get documents in array for iterating
const currentOrders = [];
console.log('getting current orders');
await db.collection(myCollection).get().then(querySnapshot => {
querySnapshot.forEach(doc => {
currentOrders.push(doc.data());
});
});
console.log(currentOrders);
//copy Orders
currentOrders.forEach (async (doc) => {
if (something about doc data is true ) {
let id = "";
id = doc.id.toString();
await db.collection(myCollection).doc(id).set(doc);
console.log('this was copied: ' + id, doc);
}
});
}
To solve the problem I made a separate function call which returns a promise that I can await for.
I also leveraged the QuerySnapshot which returns an array of all the documents in this QuerySnapshot. See here for usage.
// from inside cloud function
// using firebase node.js admin sdk
const current_orders = await db.collection("currentOrders").get();
for (let index = 0; index < current_orders.docs.length; index++) {
const order = current_orders.docs[index];
await archive(order);
}
async function archive(doc) {
let docData = await doc.data();
if (conditional logic....) {
try {
// await make third party api request
await db.collection("currentOrders").doc(id).delete();
}
catch (err) {
console.log(err)
}
} //end if
} //end archive
Now i'm not familiar with firebase so you will have to tell me if there is something wrong with how i access the data.
You can use await Promise.all() to wait for all promises to resolve before you continue the execution of the function, Promise.all() will fire all requests simultaneously and will not wait for one to finish before firing the next one.
Also although the syntax of async/await looks synchronous, things still happen asynchronously
async function archiveOrders(myCollection: string) {
console.log('getting current orders')
const querySnapshot = await db.collection(myCollection).get()
const currentOrders = querySnapshot.docs.map(doc => doc.data())
console.log(currentOrders)
await Promise.all(currentOrders.map((doc) => {
if (something something) {
return db.collection(myCollection).doc(doc.id.toString()).set(doc)
}
}))
console.log('copied orders')
}

Batching and Queuing API calls in Node

I am hitting an API that takes in addresses and gives me back GPS coordinates. The API only accepts a single address, but it can handle 50 live connections at any given time. I am trying to build a function that will send 50 requests, wait until they all return and send 50 more. Or send 50 request and send the next one as a previous is returned. Below is the code I have been working with, but I am stuck.
One issue is in batchFunct. The for loop sends all the API calls, doesn’t wait for them to come back, then runs the if statement before updating returned. This makes since considering the asynchronicity of Node. I tried to put an await on the API call, but that seemingly stops all the async process (anyone have clarification on this) and effectively makes it send the requests one at a time.
Any advice on adapting this code or on finding a better way of batching and queuing API requests?
const array = ['address1', 'address2', 'address3', 'address4', '...', 'addressN']
function batchFunc(array) {
return new Promise(function (resolve, reject) {
var returned = 1
for (let ele of array) {
apiCall(ele).then(resp => { //if but an await here it will send one at a time
console.log(resp)
returned++
})
};
if (returned == array.length) {
resolve(returned);
}
})
}
async function batchCall(array) {
while (array.length > 0) {
let batchArray = []
if (array.length > 50) {
for (let i = 0; i < 50; i++) {
batchArray.push(array[0])
array.splice(0, 1)
}
} else {
batchArray = array
array = []
}
let result = await batchFunc(batchArray);
console.log(result);
}
}
batchCall(array)
I ended up using the async.queue, but I am still very interested in any other solutions.
const array = ['address1', 'address2', 'address3', 'address4', 'address5', 'address6']
function asyncTime(value) {
return new Promise(function (resolve, reject) {
apiCall(ele).then(resp => {
resolve(resp)
})
})
}
function test(array) {
var q = async.queue(async function(task, callback) {
console.log(await asyncTime(task))
if(callback) callback()
}, 3);
q.push(array, function(err) {
if (err) {
console.log(err)
return
}
console.log('finished processing item');
});
}

Resources