Querying WordPress WP-API for lots of posts (using promises!) - node.js

I am working on a Node.js application which uses the WordPress JSON API as a kind of headless CMS. When the application spins up, we query out to the WP database and pull in the information we need (using Axios), manipulate it, and store it temporarily.
Simple enough - but one of our post categories in the CMS has a rather large number of entries. For some godforsaken reason, WordPress has capped the API request limit to a maximum of 99 posts at a time, and requires that we write a loop that can send concurrent API requests until all the data has been pulled.
For instance, if we have 250 posts of some given type, I need to hit that route three separate times, specifying the specific "page" of data I want each time.
Per the docs, https://developer.wordpress.org/rest-api/using-the-rest-api/pagination/, I have access to a ?page= query string that I can use to send these requests concurrently. (i.e. ...&page=2)
I also have access to X-WP-Total in the headers object, which gives me the total number of posts within the given category.
However, these API calls are part of a nested promise chain, and the whole process needs to return a promise I can continue chaining off of.
The idea is to make it dynamic so it will always pull all of the data, and return it as one giant array of posts. Here's what I have, which is functional:
const request = require('axios');
module.exports = (request_url) => new Promise((resolve, reject) => {
// START WITH SMALL ARBITRARY REQUEST TO GET TOTAL NUMBER OF POSTS FAST
request.get(request_url + '&per_page=1').then(
(apiData) => {
// SETUP FOR PROMISE.ALL()
let promiseArray = [];
// COMPUTE HOW MANY REQUESTS WE NEED
// ALWAYS ROUND TOTAL NUMBER OF PAGES UP TO GET ALL THE DATA
const totalPages = Math.ceil(apiData.headers['x-wp-total']/99);
for (let i = 1; i <= totalPages; i++) {
promiseArray.push( request.get(`${request_url}&per_page=99&page=${i}`) )
};
resolve(
Promise.all(promiseArray)
.then((resolvedArray) => {
// PUSH IT ALL INTO A SINGLE ARRAY
let compiledPosts = [];
resolvedArray.forEach((axios_response) => {
// AXIOS MAKES US ACCESS W/RES.DATA
axios_response.data.forEach((post) => {
compiledPosts.push(post);
})
});
// RETURN AN ARRAY OF ALL POSTS REGARDLESS OF LENGTH
return compiledPosts;
}).catch((e) => { console.log('ERROR'); reject(e);})
)
}
).catch((e) => { console.log('ERROR'); reject(e);})
})
Any creative ideas to make this pattern better?

I have exactly the same question. In my case, I use Vue Resource :
this.$resource('wp/v2/media').query().then((response) => {
let pagesNumber = Math.ceil(response.headers.get('X-WP-TotalPages'));
for(let i=1; i <= pagesNumber; i++) {
this.$resource('wp/v2/media?page='+ i).query().then((response) => {
this.medias.push(response.data);
this.medias = _.flatten(this.medias);
console.log(this.medias);
});
}
I'm pretty sure there is a better workaround to achieve this.

Related

Nodejs - Fire multiple API calls while limiting the rate and wait until they are all done

My issues
Launch 1000+ online API that limits the number of API calls to 10 calls/sec.
Wait for all the API calls to give back a result (or retry), it can take 5 sec before the API sends it data
Use the combined data in the rest of my app
What I have tried while looking at a lot of different questions and answers here on the site
Use promise to wait for one API request
const https = require("https");
function myRequest(param) {
const options = {
host: "api.xxx.io",
port: 443,
path: "/custom/path/"+param,
method: "GET"
}
return new Promise(function(resolve, reject) {
https.request(options, function(result) {
let str = "";
result.on('data', function(chunk) {str += chunk;});
result.on('end', function() {resolve(JSON.parse(str));});
result.on('error', function(err) {console.log("Error: ", err);});
}).end();
});
};
Use Promise.all to do all the requests and wait for them to finish
const params = [{item: "param0"}, ... , {item: "param1000+"}]; // imagine 1000+ items
const promises = [];
base.map(function(params){
promises.push(myRequest(params.item));
});
result = Promise.all(promises).then(function(data) {
// doing some funky stuff with dat
});
So far so good, sort of
It works when I limit the number of API requests to a maximum of 10 because then the rate limiter kicks in. When I console.log(promises), it gives back an array of 'request'.
I have tried to add setTimeout in different places, like:
...
base.map(function(params){
promises.push(setTimeout(function() {
myRequest(params.item);
}, 100));
});
...
But that does not seem to work. When I console.log(promises), it gives back an array of 'function'
My questions
Now I am stuck ... any ideas?
How do I build in retries when the API gives an error
Thank you for reading up to hear, you are already a hero in my book!
When you have a complicated control-flow using async/await helps a lot to clarify the logic of the flow.
Let's start with the following simple algorithm to limit everything to 10 requests per second:
make 10 requests
wait 1 second
repeat until no more requests
For this the following simple implementation will work:
async function rateLimitedRequests (params) {
let results = [];
while (params.length > 0) {
let batch = [];
for (i=0; i<10; i++) {
let thisParam = params.pop();
if (thisParam) { // use shift instead
batch.push(myRequest(thisParam.item)); // of pop if you want
} // to process in the
// original order.
}
results = results.concat(await Promise.all(batch));
await delayOneSecond();
}
return results;
}
Now we just need to implement the one second delay. We can simply promisify setTimeout for this:
function delayOneSecond() {
return new Promise(ok => setTimeout(ok, 1000));
}
This will definitely give you a rate limiter of just 10 requests each second. In fact it performs somewhat slower than that because each batch will execute in request time + one second. This is perfectly fine and already meet your original intent but we can improve this to squeeze a few more requests to get as close as possible to exactly 10 requests per second.
We can try the following algorithm:
remember the start time
make 10 requests
compare end time with start time
delay one second minus request time
repeat until no more requests
Again, we can use almost exactly the same logic as the simple code above but just tweak it to do time calculations:
const ONE_SECOND = 1000;
async function rateLimitedRequests (params) {
let results = [];
while (params.length > 0) {
let batch = [];
let startTime = Date.now();
for (i=0; i<10; i++) {
let thisParam = params.pop();
if (thisParam) {
batch.push(myRequest(thisParam.item));
}
}
results = results.concat(await Promise.all(batch));
let endTime = Date.now();
let requestTime = endTime - startTime;
let delayTime = ONE_SECOND - requestTime;
if (delayTime > 0) {
await delay(delayTime);
}
}
return results;
}
Now instead of hardcoding the one second delay function we can write one that accept a delay period:
function delay(milliseconds) {
return new Promise(ok => setTimeout(ok, milliseconds));
}
We have here a simple, easy to understand function that will rate limit as close as possible to 10 requests per second. It is rather bursty in that it makes 10 parallel requests at the beginning of each one second period but it works. We can of course keep implementing more complicated algorithms to smooth out the request pattern etc. but I leave that to your creativity and as homework for the reader.

How to process large number of requests with promise all

I have about 5000 links and I need to crawl all those. So Im wonder is there a better approach than this. Here is my code.
let urls = [ 5000 urls go here ];
const doms = await getDoms(urls);
// processing and storing the doms
getDoms = (urls) => {
let data = await Promise.all(urls.map(url => {
return getSiteCrawlPromise(url)
}));
return data;
}
getSiteCrawlPromise = (url) => {
return new Promise((resolve, reject) => {
let j = request.jar();
request.get({url: url, jar: j}, function(err, response, body) {
if(err)
return resolve({ body: null, jar: j, error: err});
return resolve({body: body, jar: j, error: null});
});
})
}
Is there a mechanism implemented in promise so it can devide the jobs to multiple threads and process. then return the output as a whole ?
and I don't want to devide the urls into smaller fragments and process those fragments
The Promise object represents the eventual completion (or failure) of an asynchronous operation, and its resulting value.
There is no in-built mechanism in Promises to "divide jobs into multiple threads and process". If you must do that, you'll have to fragment the urls array into smaller arrays and queue the fragmented arrays onto separate crawler instances simultaneously.
But, there is absolutely no need to go that way, since you're using node-js and node-crawler, you can use the maxConnections option of the node-crawler. This is what it was built for and the end result would be the same. You'll be crawling the urls on multiple threads, without wasting time and effort on manual chunking and handling of multiple crawler instances, or depending on any concurrency libraries.
There isn't such a mechanism built-in to Javascript, at least right now.
You can use third-party Promise libraries that offer more features, like Bluebird, in which you can make use of their concurrency feature:
const Promise = require('bluebird');
// Crawl all URLs, with 10 concurrent "threads".
Promise.map(arrayOfUrls, url => {
return /* promise for crawling the url */;
}, { concurrency: 10 });
Another option is to use a dedicated throttling library (I recommend highly bottleneck), which lets you express any generic kind of rate limit. The syntax in that case would be similar to what you already have:
const Bottleneck = require('bottleneck');
const limit = new Bottleneck({ maxConcurrent: 10 });
const getCallSitePromise = limit.wrap(url => {
// the body of your getCallSitePromise function, as normal
});
// getDoms stays exactly the same
You can solve this problem yourself, but bringing one (or both!) of the libraries above will save you a lot of code.

Firestore cloud function asynchronous execution with promise

I have orders collection and products collection in my application. The user can have multiple products in their single order. What I want to do is calculating the amount of each product reading through products collection and then perform the further action. Below is what I got as of now.
exports.myfunc = functions.firestore.document('collection/{collid}')
.onCreate(event => {
let data = event.data.data();
const products = data.products;
const prices = [];
_.each(products, (data1, index) => {
const weight = data1.weight;
const isLess = data1.isLess;
firebaseAdmin.firestore().collection('collection').doc(data1.productId).onSnapshot(data2 => {
let amount = weight === '1/2' ? data2.data().price1 : data2.data().price1 * weight;
amount += isLess ? 50 : 0;
prices.push(amount);
});
});
//Do some task after _.each with new total
});
But am not able to achieve synchronous task here, so that I can store actual amount for the product against its order and calculate total to store in document.
Could anyone please tell me how I achieve the above-said scenarios? How I can work along with promise and then callback?
You can map the products array to promises, like this:
var productPromises = products.map(product => {
return new Promise((resolve, reject) => {
firebaseOperation()...onSnapshot(resolve)
})
})
Promise.all(productPromises).then(results => {
// process all results at once
})
First, don't use onSnapshot() with Cloud Functions. That attaches a listener that stay listening indefinitely, until you remove it. That's not what you want at all, because functions can't execute indefinitely.
Instead, use get(), which returns a promise when the fetch is complete.
Also, you could consider accumulating all the documents you want to access into an array and use getAll() (with the spread operator on the array) to fetch them all.

Using node.js and promise to fetch paginated data

Please keep in mind that I am new to node.js and I am used with android development.
My scenario is like this:
Run a query against the database that returns either null or a value
Call a web service with that database value, that offers info paginated, meaning that on a call I get a parameter to pass for the next call if there is more info to fetch.
After all the items are retrieved, store them in a database table
If everything is well, for each item received previously, I need to make another web call and store the retrieved info in another table
if fetching any of the data set fails, all data must be reverted from the database
So far, I've tried this:
getAllData: function(){
self.getMainWebData(null)
.then(function(result){
//get secondary data for each result row and insert it into database
}
}
getMainWebData: function(nextPage){
return new Promise(function(resolve, reject) {
module.getWebData(nextPage, function(errorReturned, response, values) {
if (errorReturned) {
reject(errorReturned);
}
nextPage = response.nextPageValue;
resolve(values);
})
}).then(function(result) {
//here I need to insert the returned values in database
//there's a new page, so fetch the next set of data
if (nextPage) {
//call again getMainWebData?
self.getMainWebData(nextPage)
}
})
There are a few things missing, from what I've tested, getAllData.then fires only one for the first set of items and not for others, so clearly handling the returned data in not right.
LATER EDIT: I've edited the scenario. Given some more research my feeling is that I could use a chain or .then() to perform the operations in a sequence.
Yes it is happening as you are resolving the promise on the first call itself. You should put resolve(value) inside an if statement which checks if more data is needed to be fetched. You will also need to restructure the logic as node is asynchronous. And the above code will not work unless you do change the logic.
Solution 1:
You can either append the paginated response to another variable outside the context of the calls you are making. And later use that value after you are done with the response.
getAllData: function(){
self.getMainWebData(null)
.then(function(result){
// make your database transaction if result is not an error
}
}
function getList(nextpage, result, callback){
module.getWebData(nextPage, function(errorReturned, response, values) {
if(errorReturned)
callback(errorReturned);
result.push(values);
nextPage = response.nextPageValue;
if(nextPage)
getList(nextPage, result, callback);
else
callback(null, result);
})
}
getMainWebData: function(nextPage){
return new Promise(function(resolve, reject) {
var result = [];
getList(nextpage, result, function(err, results){
if(err)
reject(err);
else{
// Here all the items are retrieved, you can store them in a database table
// for each item received make your web call and store it into another variable or result set
// suggestion is to make the database transaction only after you have retrieved all your data
// other wise it will include database rollback which will depend on the database which you are using
// after all this is done resolve the promise with the returning value
resolve(results);
}
});
})
}
I have not tested it but something like this should work. If problem persists let me know in comments.
Solution 2:
You can remove promises and try the same thing with callback as they are easier to follow and will make sense to the programmers who are familiar with structural languages.
Looking at your problem, I have created a code that would loop through promises.
and would only procede if there is more data to be fetched, the stored data would still be available in an array.
I hope this help. Dont forget to mark if it helps.
let fetchData = (offset = 0, limit= 10) => {
let addresses = [...Array(100).keys()];
return Promise.resolve(addresses.slice(offset, offset + limit))
}
// o => offset & l => limit
let o = 0, l = 10;
let results = [];
let process = p => {
if (!p) return p;
return p.then(data => {
// Process with data here;
console.log(data);
// increment the pagination
o += l;
results = results.concat(data);
// while there is data equal to limit set then fetch next page
// otherwise return the collected result
return (data.length == l)? process(fetchAddress(o, l)).then(data => data) : results;
})
}
process(fetchAddress(o, l))
.then(data => {
// All the fetched data will be here
}).catch(err => {
// Handle Error here.
// All the retrieved data from database will be available in "results" array
});
if You want to do it more often I have also created a gist for reference.
If You dont want to use any global variable, and want to do it in very functional way. You can check this example. However it requires little more complication.

Google Cloud Datastore, how to query for more results

Straight and simple, I have the following function, using Google Cloud Datastore Node.js API:
fetchAll(query, result=[], queryCursor=null) {
this.debug(`datastoreService.fetchAll, queryCursor=${queryCursor}`);
if (queryCursor !== null) {
query.start(queryCursor);
}
return this.datastore.runQuery(query)
.then( (results) => {
result=result.concat(results[0]);
if (results[1].moreResults === _datastore.NO_MORE_RESULTS) {
return result;
} else {
this.debug(`results[1] = `, results[1]);
this.debug(`fetch next with queryCursor=${results[1].endCursor}`);
return this.fetchAll(query, result, results[1].endCursor);
}
});
}
The Datastore API object is in the variable this.datastore;
The goal of this function is to fetch all results for a given query, notwithstanding any limits on the number of items returned per single runQuery call.
I have not yet found out about any definite hard limits imposed by the Datastore API on this, and the documentation seems somewhat opaque on this point, but I only noticed that I always get
results[1] = { moreResults: 'MORE_RESULTS_AFTER_LIMIT' },
indicating that there are still more results to be fetched, and the results[1].endCursor remains stuck on constant value that is passed on again on each iteration.
So, given some simple query that I plug into this function, I just go on running the query iteratively, setting the query start cursor (by doing query.start(queryCursor);) to the endCursor obtained in the result of the previous query. And my hope is, obviously, to obtain the next bunch of results on each successive query in this iteration. But I always get the same value for results[1].endCursor. My question is: Why?
Conceptually, I cannot see a difference to this example given in the Google Documentation:
// By default, google-cloud-node will automatically paginate through all of
// the results that match a query. However, this sample implements manual
// pagination using limits and cursor tokens.
function runPageQuery (pageCursor) {
let query = datastore.createQuery('Task')
.limit(pageSize);
if (pageCursor) {
query = query.start(pageCursor);
}
return datastore.runQuery(query)
.then((results) => {
const entities = results[0];
const info = results[1];
if (info.moreResults !== Datastore.NO_MORE_RESULTS) {
// If there are more results to retrieve, the end cursor is
// automatically set on `info`. To get this value directly, access
// the `endCursor` property.
return runPageQuery(info.endCursor)
.then((results) => {
// Concatenate entities
results[0] = entities.concat(results[0]);
return results;
});
}
return [entities, info];
});
}
(except for the fact, that I don't specify a limit on the size of the query result by myself, which I have also tried, by setting it to 1000, which does not change anything.)
Why does my code run into this infinite loop, stuck on each step at the same "endCursor"? And how do I correct this?
Also, what is the hard limit on the number of results obtained per call of datastore.runQuery()? I have not found this information in the Google Datastore documentation thus far.
Thanks.
Looking at the API documentation for the Node.js client library for Datastore there is a section on that page titled "Paginating Records" that may help you. Here's a direct copy of the code snippet from the section:
var express = require('express');
var app = express();
var NUM_RESULTS_PER_PAGE = 15;
app.get('/contacts', function(req, res) {
var query = datastore.createQuery('Contacts')
.limit(NUM_RESULTS_PER_PAGE);
if (req.query.nextPageCursor) {
query.start(req.query.nextPageCursor);
}
datastore.runQuery(query, function(err, entities, info) {
if (err) {
// Error handling omitted.
return;
}
// Respond to the front end with the contacts and the cursoring token
// from the query we just ran.
var frontEndResponse = {
contacts: entities
};
// Check if more results may exist.
if (info.moreResults !== datastore.NO_MORE_RESULTS) {
frontEndResponse.nextPageCursor = info.endCursor;
}
res.render('contacts', frontEndResponse);
});
});
Maybe you can try using one of the other syntax options (instead of Promises). The runQuery method can take a callback function as an argument, and that callback's parameters include explicit references to the entities array and the info object (which has the endCursor as a property).
And there are limits and quotas imposed on calls to the Datastore API as well. Here are links to official documentation that address them in detail:
Limits
Quotas

Resources