I am trying to get the response of the users using auth function and i have to create an excel sheet using the xlsx-populate library and i am able to convert that into an array of objects as the limit is 1000 so there are multiple arrays of objects. and i am not able to figure out how can i do this problem.in this problem, i am simply fetching results using auth and try to get the results into an array of objects. and i am also tried to use the objects to pass into the excel sheet but it gives the excel sheet with last 1000 queries response
const admin = require("firebase-admin");
const momentTz = require("moment-timezone");
const XlsxPopulate = require("xlsx-populate");
momentTz.suppressDeprecationWarnings = true;
const {
alphabetsArray
} = require("./constant");
var start = momentTz().subtract(4, "days").startOf("day").format();
var start = momentTz(start).valueOf();
const end = momentTz().subtract(1, "days").endOf("day").format();
const listAllUsers = async(nextPageToken) =>{
const [workbook] = await Promise.all([
XlsxPopulate.fromBlankAsync()
]);
const reportSheet = workbook.addSheet("Signup Report");
workbook.deleteSheet("Sheet1");
reportSheet.row(1).style("bold", true);
[
"Date",
"TIME",
"Phone Number"
].forEach((field, index) => {
reportSheet.cell(`${alphabetsArray[index]}1`).value(field);
});
let count = 0
// List batch of users, 1000 at a time.
const data = [];
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
});
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
})
// .catch(function (error) {
// console.log("Error listing users:", error);
// });
// const datas = []
// datas.push(data)
// console.log(datas)
return ;
}
// Start listing users from the beginning, 1000 at a time.
listAllUsers();
and the output i am getting is like this
[]
[]
[]
[]
[]
i want to convert this into a single array of response
You have a race condition. When you perform your console.log(JSON.stringify(data)) your listUserQuery is in progress (and in async mode) and you don't have yet the answer when you print the array. Thus the array is empty.
Try this (I'm not sure of this optimal solution, I'm not a nodeJS dev)
admin
.auth()
.listUsers(1000, nextPageToken)
.then (async (listUsersResult) => {
listUsersResult.users.forEach((userRecord) =>{
const time = userRecord.metadata.creationTime;
const timestamp = momentTz(time).valueOf();
// console.log(timestamp)
if (timestamp >= 1585704530967 ) {
console.log(time);
let column = count+2;
count++;
data.push(userRecord.toJSON())
reportSheet.cell(`A${column}`).value(time);
reportSheet.cell(`C${column}`).value(userRecord.phoneNumber);
}
}
console.log(JSON.stringify(data))//this is the array of the object and i am getting after 1000 response
if (listUsersResult.pageToken) {
// List next batch of users.
listAllUsers(listUsersResult.pageToken);
await workbook.toFileAsync("./SignUp.xlsx");
}
);
Related
I'm using Back4app.
My Profile class schema has 4 File columns containing pictures.
So when I retrieve an object , I have to make an HTTP request for each file URL and get the byte data like this.
const data = await Parse.Cloud.httpRequest({url:profilePhoto.url()});
return data.buffer.toString('base64');
But for all four files I have to do 4 HTTP requests to the server.
Is there anyway to do a batch HTTP request so that with just 1 request I can get data for all 4 files ?
My main aim is to do the least amount of requests to the server as possible.
There is no out-of-the-box way to retrieve multiple files with one request in Parse Server.
You could implement your own Parse Cloud Code function to retrieve multiple files, but you would have to manually combine them server side and separate them client side.
As a starting point you could look at packages like multistream that allow you to combine multiple file streams into one to get some inspiration.
You might be able to do something similar to what I've done in cloud code.
I had to load up a bunch of information at the start of my application, requiring many round trips to the server.
So I wrote a function called getUserData().
This does many unrelated queries, and jams all of the results into one big object. I then return the object from the function.
Here is the entire function:
console.log("startig getUserData");
var callCount = 0;
var lastLoadTime=0;
// Given a user, load all friends. Save the objects to ret.objects,
// and save the objectIds to ret.friends
//
// Note: we always load the exhaustive friend list, because
// otherwise, we would have no way of recognizing
// removed friendships.
//
async function loadFriends(user, ret) {
const friendQuery = user.relation("friends").query();
const friends = await findFully(friendQuery);
for(var i=0;i<friends.length;i++){
ret.friends[friends[i].id]=1;
ret.objects[friends[i].id]=friends[i];
};
}
// Given a user, load all owned cells. Save the objects to ret.owned,
// and save their objectIds to ret.ownedCells.
//
// Also, save the ids of members, which we will use to flesh out ret.objects with
// the objects who are not friends, but share a cell with the current user.
async function loadPublicCells(user, ret, memberIds) {
const ownedCellQ = new Parse.Query('PublicCell');
ownedCellQ.equalTo('owner',user);
const joinedCellQ = new Parse.Query('PublicCell');
joinedCellQ.equalTo('members',user);
const publicCellQ = Parse.Query.or(ownedCellQ,joinedCellQ);
publicCellQ.greaterThan("updatedAt",new Date(lastLoadTime));
const publicCells=await findFully(publicCellQ);
for(var i=0;i<publicCells.length;i++) {
const cell = publicCells[i];
ret.ownedCells[cell.id]=cell;
const owner = cell.get("owner");
if(owner==null)
continue;
ret.objects[cell.id]=cell;
if(owner.id === user.id) {
ret.ownedCells[cell.id]=1;
} else {
ret.joinedCells[cell.id]=1;
};
const memberQ = cell.relation("members").query();
const members = await findFully(memberQ);
if(ret.memberMap[cell.id]==null)
ret.memberMap[cell.id]={};
const map = ret.memberMap[cell.id];
for(var j=0;j<members.length;j++){
const member=members[j];
map[member.id]=1;
ret.objects[member.id]=member;
};
};
};
// given a list of all members of all cells, load those objects and store
// them in ret.objects. We do not have to record which cells they belong
// to, because that information is in ret.memberMap
async function loadMembers(memberIds, ret) {
const memberQ = new Parse.Query(Parse.User);
var partIds;
while(memberIds.length){
partIds = memberIds.splice(0,100);
memberQ.containedIn('objectId',partIds);
const part = await findFully(memberQ);
for(var i=0;i<part.length;i++) {
ret.objects[part[i].id]=part[i];
}
};
};
// given a user, save all of the objectIds of people who have annoyed him with
// spam. We save only the ids, they don't go on ret.objects, because we only
// need to filter them out of things. The objectIds are sufficient.
//
// We always send all spam objects, otherwise we would not recognize deletions
async function loadUserSpams(user, ret) {
const userSpamsQ = new Parse.Query("_User");
userSpamsQ.equalTo("spamUsers",user);
userSpamsQ.greaterThan("updatedAt", new Date(lastLoadTime));
const userSpams = await findFully(userSpamsQ);
for(var i=0;i<userSpams.length;i++){
ret.userSpams[userSpams[i].id]=1;
};
};
// given a user, save all of the objectIds of people who have been annoyed *BY*
// him with spam. We save only the ids, they don't go on ret.objects, because we
// only need to filter them out of things. The objectIds are sufficient.
//
// We always send all spam objects, otherwise we would not recognize deletions
async function loadSpamUsers(user, ret) {
const spamUserR = user.relation('spamUsers');
const spamUserQ = spamUserR.query();
spamUserQ.greaterThan("updatedAt", new Date(lastLoadTime));
const spamUsers = await findFully(spamUserQ);
for(var i=0;i<spamUsers.length;i++){
ret.spamUsers[spamUsers[i].id]=1;
};
};
// given a user, save all of the objectIds of people to whom he has sent a
// friend request which is still pending. We save only the ids, they don't go
// on ret.objects, because we only need to filter them out of things. The
// objectIds are sufficient.
async function loadPendingFriends(user, ret) {
const request1Q = new Parse.Query('Request');
request1Q.equalTo("owner",user);
const request2Q = new Parse.Query('Request');
request2Q.equalTo("sentTo",user);
const requestQ = Parse.Query.or(request1Q,request2Q);
requestQ.equalTo("status",'PENDING');
const requests = await findFully(requestQ);
for(var i=0;i<requests.length;i++){
const request = requests[i];
const sentBy = request.get("owner");
if(sentBy==null){
console.warn("sentBy==null");
continue;
};
const sentTo = request.get("sentTo");
if(sentTo==null){
console.warn("sentTo==null");
continue;
};
console.dump({sentTo,sentBy});
if(sentBy.id==user.id){
ret["pendingFriends"][sentTo.id]=sentTo;
} else if ( sentTo.id==user.id ) {
ret["friendingPends"][sentBy.id]=sentBy;
};
};
};
// given a user, load all of his private cells. We do not store
// the user objects, because only friends will be in your private cells.
async function loadPrivateCells(user, ret) {
const privateCellQ = new Parse.Query('PrivateCell');
privateCellQ.equalTo("owner", user);
privateCellQ.greaterThan("updatedAt", new Date(lastLoadTime));
const privateCells = await findFully(privateCellQ);
for(var i=0;i<privateCells.length;i++) {
const cell = privateCells[i];
ret.objects[cell.id]=cell;
ret.privateCells[cell.id]=cell;
if(ret.memberMap[cell.id]==null)
ret.memberMap[cell.id]={};
const map = ret.memberMap[cell.id];
const memberQ = cell.relation("members").query();
const members = await findFully(memberQ);
for(var j=0;j<members.length;j++){
const member=members[j];
map[member.id]=1;
ret.objects[member.id]=member;
};
};
//});
}
// we use objects as maps to weed out duplicate objects and cells.
// when we are done, we use this function to replace the object
// with an array of objects. we don't need to send the keys, since
// they already exist within the objects.
function objToValueList(k,ret){
const objs = [];
for( var id in ret[k] )
objs.push(ret[k][id]);
ret[k]=objs;
ret.counts[k]=objs.length;
};
// convert the objects which have been used to accumulate key lists
// to arrays of objectIds. k is the name of the list we are working
// on. ret[k] is the list itself.
function objToKeyList(k,ret) {
const objs = [];
for( var id in ret[k] ) {
objs.push(id);
};
ret[k]=objs;
ret.counts[k]=objs.length;
};
async function checkUserConsent(user){
const query = new Parse.Query("PrivacyPolicy");
query.descending("createdAt");
query.limit(1);
const res = await query.find();
if(res.length==0) {
return true;
};
const policy=res[0];
console.dump(policy);
console.log(policy);
const userConsent=user.get("lastConsent");
return userConsent!=null && userConsent.id == policy.id;
};
async function loadAlerts(user,ret) {
const q1 = new Parse.Query("Alert");
q1.equalTo("owner", user);
const q2 = new Parse.Query("Response");
q2.equalTo("owner", user);
const q3 = new Parse.Query("Alert");
q3.matchesKeyInQuery("objectId", "alert", q2);
const q = Parse.Query.or(q1,q3);
const list = await q.find();
var time = new Date().getTime();
time -= 1000*86400;
time=Math.max(lastLoadTime, time);
q.greaterThan("updatedAt",time);
for(var i=0;i<list.length;i++) {
const item=list[i];
ret.alerts[item.id]=1;
ret.objects[item.id]=item;
};
}
async function doGetUserData(user) {
if(!user)
return {fatal: 'not logged in!' };
const ret = {
owner: {},
privateCells: {},
friends: {},
alerts: {},
objects: {},
ownedCells: {},
joinedCells: {},
spamUsers: {},
userSpams: {},
pendingFriends: {},
friendingPends: {},
memberMap: {},
loadTime: lastLoadTime,
counts: {callCount: callCount++},
};
{
user.fetch();
ret.owner=user.id;
const memberIds={};
ret.objects[user.id]=user;
console.log("loadFriends");
await loadFriends(user,ret);
console.log("loadPrivateCells");
await loadPrivateCells(user,ret,memberIds);
console.log("loadPublicCells");
await loadPublicCells(user,ret,memberIds);
console.log("loadPendingFriends");
await loadPendingFriends(user,ret);
console.log("loadUserSpams");
await loadUserSpams(user,ret);
console.log("loadSpamUsers");
await loadSpamUsers(user,ret);
console.log("loadAlerts");
await loadAlerts(user,ret);
const memberList=[];
for( var id in memberIds ) {
console.log(ret.objects[id]);
memberList.push(id);
};
console.log("loadMembers");
await loadMembers(memberList,ret);
}
for(var cell in ret.memberMap) {
var map = ret.memberMap[cell];
var list = [];
ret.memberMap[cell]=list;
for(var member in map) {
list.push(member);
};
}
delete ret.objects[user.id];
[
'friends', "friendingPends", 'pendingFriends',
'privateCells', 'ownedCells', 'joinedCells',
'userSpams', 'spamUsers', "alerts"
].forEach((k)=>{
objToKeyList(k,ret);
});
objToValueList('objects',ret);
delete ret.counts;
return ret;
}
async function getUserData(req) {
try {
var nextLoadTime=new Date().getTime();
const user = req.user;
console.log(user);
lastLoadTime = req.params.lastLoadTime;
if(lastLoadTime==null)
lastLoadTime=0;
lastLoadTime = new Date(lastLoadTime);
const ret = await doGetUserData(user);
ret.loadTime=nextLoadTime;
return ret;
} catch ( err ) {
console.log(err);
try {
console.log(err.stack());
} catch ( xxx ) {
console.log(err);
};
throw (`error getting data: ${err}`);
};
};
Parse.Cloud.define("getUserData", getUserData);
Something like this could easily be done to get your data for you. Like this solution, it is unlikely to be entirely pretty, but it would probably work.
I have a firebase function that's supposed to return Items that are sold by a seller. I want to get the seller's profile picture via firebase authentication. But whenever I AWAIT the function
edit: worth noting that mAuth is firebase authentication*
await mAuth.geUser(sellerData.UID);
the application returns me an empty json or []
Here is the full code for the function, the error occurs on line 11 or somewhere around there.
export const getHottestItems = functions.region("asia-east2").https.onRequest(async (data, response) => {
try {
var arrayItem = new Array<Item>();
let itemSeller: Seller;
const sellerSnapshot = await db.collection("users").get();
// this is the list of promises/awaitables for all items
const promises = new Array<Promise<FirebaseFirestore.QuerySnapshot<FirebaseFirestore.DocumentData>>>();
sellerSnapshot.forEach(async (sellerDoc) => {
const sellerData = sellerDoc.data();
// THIS PART CAUSES THE API TO RETURN []
const sellerAuth = await mAuth.getUser(sellerData.UID);
// check for non null / empty strings
if (sellerData.Name as string && sellerData.UID as string) {
// this is all the seller information we need
itemSeller = new Seller(sellerData.Name, sellerData.UID, sellerAuth.photoURL); // placeholder profile picture
const refItem = sellerDoc.ref.collection("Items");
// push all the promises to a list so we can run all our queries in parallel
promises.push(refItem.get());
}
});
// wait for all promises to finish and get a list of snapshots
const itemSnapshots = await Promise.all(promises);
itemSnapshots.forEach((ItemSnapshot) => {
ItemSnapshot.forEach((ItemDoc) => {
// get the data
const itemData = ItemDoc.data();
// if title is not null, the rest of the fields are unlikely to be.
if (itemData.Title as string) {
// the rest of the logic to convert from database to model is in the constructor
arrayItem.push(new Item(ItemDoc.id, itemData.Title, itemSeller, itemData.Likes, itemData.ListedTime, itemData.Rating, itemData.Description, itemData.TransactionInformation, itemData.ProcurementInformation, itemData.Category, itemData.Stock, itemData.Image1, itemData.Image2, itemData.Image3, itemData.Image4, itemData.AdvertisementPoints, itemData.isDiscounted, itemData.isRestocked));
}
});
});
// sort by performance level
arrayItem = arrayItem.sort(x => x.Performance);
if (data.body.userID) {
arrayItem = await markLikedItems(data.body.userID, arrayItem);
}
//send the responseafter all the final modifications
response.send(arrayItem);
} catch (err) {
// log the error
console.log(err);
response.status(500).send(err);
}
});
REVISED QUESTION
I've revised the question, in the hope of getting a clearer answer.
I'm trying to process data in ExpressJS, based on the incoming req.body and the existing data in the table.
I'm receiving a req.body that contains a JSON list of updated fields. Some of those fields are stored as JSONB in Postgres. If an incoming field is JSONB, then the form (external code) that is making the request has already run a jsonpatch.compare() to generate the list of patches, and it is these patches and not the full values that are being passed in. For any non-JSONB values, incoming values just need to be passed through to the UPDATE query.
I have a working version, as below, that pretends that the existing JSONB values in the table ARE NULL. Clearly, this is NOT what is needed. I need to pull the values from the db. The non-querying-of-current-values version and a bare minimum router, looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const SQL = require('sql-template-strings')
const { Client } = require('pg')
const dbConfig = require('../db')
const jsonpatch = require('fast-json-patch')
const FormRouter = express.Router()
I have some update code:
````javascript
const patchFormsRoute = (req, res) => {
const client = new Client(dbConfig)
const { id } = req.body
const parts = []
const params = [id]
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
// this is a DUMMY RECORD instead of the result of a client.query
let currentRecord = { 'sections':[], 'editors':[], 'descriptions':[] }
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery = 'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
client.connect()
return client
.query(updateQuery, params)
.then(result => res.status(200).json(result.rowCount))
.catch(err => res.status(400).json(err.severity))
.then(() => client.end())
}
FormRouter.route('/')
.patch(bodyParser.json({ limit: '50mb' }), patchFormsRoute)
exports.FormRouter = FormRouter
I promise, that this is working code, which does almost what I need. However, I want to replace the dummy record with the data already in the table, fetched contemporaneously. My issue, is because multiple clients could be updating a row at the same time (but looking at orthogonal elements of the JSONB values), I need the fetch, calc, and update to happen as a SINGLE TRANSACTIOn. My plan is to:
BEGIN a transaction
Query Postgres for the current row value, based on the incoming id
For any JSONB fields, apply the patch to generate the correct value for that field in the UPDATE statement.
Run the UPDATE statement with the appropriate param values (either from the req.body or the patched row, depending on whether the field is JSONB or not)
COMMIT the transaction, or ROLLBACK on error.
I've tried implementing the answer from #midrizi; maybe it's just me, but the combination of awaits and plain testing of res sends the server off into Hyperspace... and ends in a timeout.
In case anyone is still awake, here's a working solution to my issue.
TLDR; RTFM: A pooled client with async/await minus the pooling (for now).
const patchFormsRoute = (req, res) => {
const { id } = req.body
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
const parts = []
const params = [id]
;(async () => {
const client = await new Client(dbConfig)
await client.connect()
try {
// begin a transaction
await client.query('BEGIN')
// get the current form data from DB
const fetchResult = await client.query(
SQL`SELECT * FROM forms WHERE id = ${id}`,
)
if (fetchResult.rowCount === 0) {
res.status(400).json(0)
await client.query('ROLLBACK')
} else {
const currentRecord = fetchResult.rows[0]
// patch JSONB values or update non-JSONB values
let val = []
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery =
'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
// update record in DB
const result = await client.query(updateQuery, params)
// commit transaction
await client.query('COMMIT')
res.status(200).json(result.rowCount)
}
} catch (err) {
await client.query('ROLLBACK')
res.status(400).json(err.severity)
throw err
} finally {
client.end()
}
})().catch(err => console.error(err.stack))
}
I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.
I'm having a tremendously tough time organizing the flow here as I'm self-taught so wondering if someone might be able to assist.
var channelIds = ['XYZ','ABC','QRS']
var playlistIds = [];
var videoIds = [];
ORDER OF PROCESS
1. Get All Playlist IDs: If returning Get Request JSON contains nextPageToken run Get Request again with that page before going to (2)
2. Get All Video IDs: If returning Get Request JSON contains nextPageToken run Get Request again with that page before going to (3)
3. Aggregate into Final Array: I need put all in an array such as:
var ArrFinal = [{channelId,playlistID,videoId},{channelId,playlistID,videoId},{channelId,playlistID,videoId}];
I don't necessarily need someone to write the whole thing. I'm trying to better understand the most efficient way to know when the previous step is done, but also handle the nextPageToken iteration.
i'm not familiar with the youtube api.
But what you basically need is a get function for each endpoint. This function should also care about the "nextPageToken".
Something like that: (not tested)
'use strict';
const Promise = require('bluebird');
const request = Promise.promisifyAll(require('request'));
const playlistEndpoint = '/youtube/v3/playlists';
const baseUrl = 'https://www.googleapis.com'
const channelIds = ['xy', 'ab', 'cd'];
const getPlaylist = async (channelId, pageToken, playlists) => {
const url = `${baseUrl}${playlistEndpoint}`;
const qs = {
channelId,
maxResults: 25,
pageToken
};
try {
const playlistRequest = await request.getAsync({ url, qs });
const nextPageToken = playlistRequest.body.nextPageToken;
// if we already had items, combine with the new ones
const items = playlists ? playlists.concat(playlistRequest.body.items) : playlistRequest.body.items;
if (nextPageToken) {
// if token, do the same again and pass results to function
return getPlaylist(channelId, nextPageToken, items);
}
// if no token we are finished
return items;
}
catch (e) {
console.log(e.message);
}
};
const getVideos = async (playlistId, pageToken, videos) => {
// pretty much the same as above
}
function awesome(channelIds) {
const fancyArray = [];
await Promise.map(channelIds, async (channelId) => {
const playlists = await getPlaylist(channelId);
const videos = await Promise.map(playlists, async (playlistId) => {
const videos = await getVideos(playlistId);
videos.forEach(videoId => {
fancyArray.push({ channelId, playlistId, videoId })
})
});
});
return fancyArray;
}
awesome(channelIds)
// UPDATE
This may be a lot concurrent requests, you can limit them by using
Promise.map(items, item => { somefunction() }, { concurrency: 5 });