I attempted to ask a simplified version of this here but I realized that it probably doesn't contain enough information. So unfortunately I'm just going to post the whole thing and hope that it doesn't offend anyone.
Basically I have 4 functions, with their stated purposes below:
1.InitializeDrive() takes a user email and uses Google's JWT user impersonation method to return the appropriate authorization for that user.
2.listfiles() calls on InitializeDrive() for authorization and retrieves a list of the documents associated with the associated user auth.
3.singleUserData() expects a list of the files such as that from listfiles() and refines them.
4.all_user_data() is intended to be the Promise.all async function that combines all of the aforementioned functions, and maps to an array of users getListUsers(), creating a master array containing all of the refined file data for each user.
Basically my poorly formed and ignorant question is 'how can make the all_user_data() in such a way that it will return the aforementioned master array?'. I am relatively new to async programming, and have a persistent problem getting confused with nested functions.
// loads credentials from .env file
require('dotenv').config();
const util = require('util');
const { google } = require('googleapis');
const { logger } = require('handlebars');
const { getListUsers } = require('./get_users');
const target_users = getListUsers();
function initializeDrive(version, user) {
return new Promise((resolve, reject) => {
const client_email = process.env.GOOGLE_CLIENT_EMAIL;
console.log(client_email);
// add some necessary escaping so to avoid errors when parsing the private key.
const private_key = process.env.GOOGLE_PRIVATE_KEY.replace(/\\n/g, '\n');
// impersonate an account with rights to create team drives
const emailToImpersonate = user;
const jwtClient = new google.auth.JWT(
client_email,
null,
private_key,
['https://www.googleapis.com/auth/drive'],
emailToImpersonate,
);
return google.drive({
version: version,
auth: jwtClient,
});
});
}
const listfiles = async (pagetokenObj, user) => {
let pageToken = '';
let version = 'v3';
if (pagetokenObj !== undefined) {
pageToken = pagetokenObj.pageToken;
}
const drive = await initializeDrive(version, user);
//const drive = initializeDrive(version,user);
return new Promise((resolve, reject) => {
drive.files.list(
{
pageSize: 100,
fields:
'nextPageToken, files(id, name, owners(emailAddress),
sharingUser(emailAddress), permissions)',
...(pageToken ? { pageToken } : {}),
},
function (err, { data: { nextPageToken = '', files = [] } = {} }) {
if (err) {
return reject(err);
}
if (!nextPageToken) {
return resolve(files);
}
// if page token is present we'll recursively call ourselves until
// we have a complete file list.
return listfiles({ pageToken: nextPageToken }).then((otherfiles) => {
resolve(files.concat(otherfiles));
});
},
);
});
};
//Function returns formatted
const singleUserData = async (files) => {
return new Promise((resolve, reject) => {
const single_user_json = await listfiles();
const res = single_user_json
.filter((doc) => Boolean(doc.permissions))
.map((doc) => {
return {
id: doc.id,
sharedWith: doc.permissions
.filter((permission) => permission.type === 'user')
.map((permission) => {
return {
emailAddress: permission.emailAddress,
role: permission.role || null,
};
}), // convert the perm object into a string (email address)
};
});
// this is how you get nicer console.logs instead of just [Object] and [Array] BS
// https://stackoverflow.com/a/10729284/9200245
console.log(util.inspect(res, { showHidden: false, depth: null, colors: true }));
if (err) {
return reject(err);
}
if (!nextPageToken) {
return resolve(files);
};
})
};
const all_user_data = async() => {
const users = await getListUsers();
const pagetokenObj = "{ pageToken = '' } = {}";
Promise.all(
users.map(async (pagetokenObj,user) => {
const files = await listfiles(pagetokenObj, user);
console.log(files);
const singleUserData = await singleUserData(files)
console.log(singleUserData);
}),
);
console.log(users)
}
//returnJSON();
//getListUsers();
//singleUserData();
all_user_data();
I finally figured it out. Basically needed to await a lot more and properly declare async.
// loads credentials from .env file
require('dotenv').config();
const util = require('util');
const { google } = require('googleapis');
const { logger } = require('handlebars');
const { getListUsers } = require('./get_users');
const target_users = getListUsers();
async function initializeDrive(user) {
// return new Promise((resolve, reject) => {
const client_email = process.env.GOOGLE_CLIENT_EMAIL;
console.log(user + ': ' + client_email);
// add some necessary escaping so to avoid errors when parsing the private key.
const private_key = process.env.GOOGLE_PRIVATE_KEY.replace(/\\n/g, '\n');
// impersonate an account with rights to create team drives
const emailToImpersonate = await user;
const jwtClient = new google.auth.JWT(
client_email,
null,
private_key,
['https://www.googleapis.com/auth/drive'],
emailToImpersonate,
);
return google.drive({
version: "v3",
auth: jwtClient,
});
//});
}
//
async function listfiles(pagetokenObj, user) {
let pageToken = '';
if (pagetokenObj !== undefined) {
pageToken = pagetokenObj.pageToken;
}
const drive = await initializeDrive(user);
//console.log(drive)
return new Promise((resolve, reject) => {
drive.files.list(
{
pageSize: 100,
fields:
'nextPageToken, files(parents, id, name, properties, owners(emailAddress), sharingUser(emailAddress), permissions)',
...(pageToken ? { pageToken } : {}),
},
function (err, { data: { nextPageToken = '', files = [] } = {} }) {
if (err) {
return reject(err);
}
if (!nextPageToken) {
return resolve(files);
}
// if page token is present we'll recursively call ourselves until
// we have a complete file list.
return listfiles({ pageToken: nextPageToken }).then((otherfiles) => {
resolve(files.concat(otherfiles));
});
},
);
});
};
async function singleUserData(user) {
const pagetokenObj = "{ pageToken = '' } = {}"
const single_user_json = await listfiles(pagetokenObj, user);
// return new Promise ((resolve, reject) => {
console.log(user+ ":" + JSON.stringify(single_user_json));
const res = await single_user_json
.filter((doc) => Boolean(doc.permissions))
.map((doc) => {
return {
id: doc.id,
sharedWith: doc.permissions
.filter((permission) => permission.type === 'user')
.map((permission) => {
return {
emailAddress: permission.emailAddress,
role: permission.role || null,
};
}) // convert the perm object into a string (email address)
};
});
return JSON.stringify(res);
//})
}
async function getAllUserData() {
try {
const users = await getListUsers();
// const userString = JSON.parse(users);
console.log("test22222222222222: " + users)
const usersData = await Promise.all(
users.map((user) => {
return singleUserData(user);
})
);
console.log("[" + usersData + "]");
return usersData;
} catch (error) {
console.log(error);
}
}
getAllUserData();
You're almost there.
Remove listfiles call in all_user_data
Pass user to singleUserData
Remove pagetokenObj in all_user_data
Remove async and await inside users.map and return promise directly
Await Promise.all, assign it to a variable and return that variable
Remove unnecessary Promise wrapping in singleUserData
Change function signature of singleUserData to take in a user
Pass user to listfiles in singleUserData
Return res instead of files in singleUserData
Here are the changes I made to all_user_data and singleUserData:
// Function returns formatted
async function singleUserData(user) {
const single_user_json = await listfiles(user);
const res = single_user_json
.filter((doc) => Boolean(doc.permissions))
.map((doc) => {
return {
id: doc.id,
sharedWith: doc.permissions
.filter((permission) => permission.type === "user")
.map((permission) => {
return {
emailAddress: permission.emailAddress,
role: permission.role || null,
};
}), // convert the perm object into a string (email address)
};
});
return res;
};
async function getAllUserData() {
const users = await getListUsers();
const usersData = await Promise.all(
users.map((user) => {
return singleUserData(user);
})
);
return usersData;
};
const usersData = await getAllUserData();
Related
I don't understand why I get this error. This is my controller:
export const createProductReview = async (req, res) => {
const { rating, comment } = req.body;
const product = await Product.findById(req.params.id);
if (product) {
const alreadyReviewed = product.reviews.find(
r => r.user.toString() === req.user.userId.toString()
);
if (alreadyReviewed) {
throw new NotFoundError('Product already reviewed');
}
const review = {
user: req.user.userId,
name: req.user.username,
rating: Number(rating),
comment,
};
product.reviews.push(review);
product.numOfReviews = product.reviews.length;
product.rating =
product.reviews.reduce((acc, item) => item.rating + acc, 0) /
product.reviews.length;
await product.save();
res.status(StatusCodes.OK).json({ message: 'Review added', review });
} else {
throw new NotFoundError('Product not found');
}
};
This is mine productPage where i dispatch addProductReview and passing product id from params and review object:
const [rating, setRating] = useState(0);
const [comment, setComment] = useState('');
const submitHandler = e => {
e.preventDefault();
dispatch(
addProductReview(id, {
rating,
comment,
})
);
};
And this is my productSlice:
export const addProductReview = createAsyncThunk(
'product/review',
async (id, { review }, thunkAPI) => {
try {
const { data } = await axios.post(
`/api/v1/products/${id}/reviews`,
review
);
return data;
} catch (error) {
const message = error.response.data.msg;
return thunkAPI.rejectWithValue(message);
}
}
);
I have no clue why i got error Path comment is required. i pass review object to route.
The issue is with the parameters used in your Thunk payloadCreator. From the documentation...
The payloadCreator function will be called with two arguments:
arg: a single value, containing the first parameter that was passed to the thunk action creator when it was dispatched. This is useful for passing in values like item IDs that may be needed as part of the request. If you need to pass in multiple values, pass them together in an object when you dispatch the thunk, like dispatch(fetchUsers({status: 'active', sortBy: 'name'})).
thunkAPI: an object containing all of the parameters that are normally passed to a Redux thunk function, as well as additional options
Your payloadCreator has three arguments which is incorrect.
Try this instead
export const addProductReview = createAsyncThunk(
'product/review',
async ({ id, ...review }, thunkAPI) => {
try {
const { data } = await axios.post(
`/api/v1/products/${id}/reviews`,
review
);
return data;
} catch (error) {
const message = error.response.data.msg;
return thunkAPI.rejectWithValue(message);
}
}
);
and dispatch it like this
dispatch(addProductReview({ id, rating, comment }));
I am trying to get the length of a Map and I keep getting "undefined". Could please someone tell me what am I doing wrong?
This is the part of the code that gives me problems.
const GYMdetail: { [key: string]: number} = {};
GYMdetail[`${doc.data().name} (${doc.data().personalID})`] = 650;
const subtotal = 650 * GYMdetail.size;
This is the complete function code
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
query.get()
.then(async (allUsers) => {
allUsers.docs.forEach(async (doc) => {
if (doc != undefined) {
const houseForGym = doc.data().subscriptions.gym.house;
await admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
if (snapshot.data() == undefined) {
console.log(`${houseForGym}-${doc.data().name}: CREAR!!`);
} else if (snapshot.data()!.issued == false) {
let detail: { [key: string]: any} = {};
const GYMdetail: { [key: string]: number} = {};
detail = snapshot.data()!.detail;
GYMdetail[
`${doc.data().name} (${doc.data().personalID})`
] = 650;
const subtotal = 650 * GYMdetail.size;
detail["GYM"] = {"total": subtotal, "detail": GYMdetail};
snapshot.ref.set({"detail": detail}, {merge: true});
}
return null;
})
.catch((error) => {
console.log(
`${houseForGym} - ${doc.data().name}: ${error}`);
response.status(500).send(error);
return null;
});
}
});
response.send("i");
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});
});
Since you are executing an asynchronous call to the database in your code, you need to return a promise from the top-level code; otherwise Cloud Functions may kill the container when the final } executes and by that time the database load won't be done yet.
So:
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
return query.get()
...
Next you'll need to ensure that all the nested get() calls also get a chance to finish before the Functions container gets terminated. For that I recommend not using await for each nested get call, but a single Promise.all for all of them:
query.get()
.then(async (allUsers) => {
const promises = [];
allUsers.docs.forEach((doc) => {
const houseForGym = doc.data().subscriptions.gym.house;
promises.push(admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
...
});
});
response.send("i");
return Promise.all(promises);
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});
I want the array length, but the function returns [object Promise]. I send a correct email and I just need know if it's already in collection.
const res = require('express/lib/response');
async function emailUnique(email){
var query = { email: email };
const response = await dbo.collection('users').find(query).toArray();
const tot = response.length;
return tot;
}
const emailValidate = function (email) {
if (email.indexOf('#') === -1 || email.indexOf('.') < 0) {
message = 'Invalid entries. Try again.';
}else{
let quantos = emailUnique(email);
message = quantos;
}
return message;
};
module.exports = emailValidate;
Thanks, Its a simplified answer:
emailValidate.mod.js
const res = require('express/lib/response');
function emailValidate(email) {
return new Promise((resolve) => { // return a promise
var query = { email: email };
dbo.collection('users').find(query).toArray((_err, result) => {
resolve(result.length); //the returned value
});
});
}
module.exports = emailValidate;
user.js
const mod_emailValidate = require('./emailValidate.mod');
exports.getUsers = (req, res) => {
const { name } = req.body;
const { email } = req.body;
const { password } = req.body;
async function run() { // make an async function
let data = await mod_emailValidate(email); //call the function with await
console.log(`Retornou: ${data}`);
}
run();
res.status(400).send({ message: 'Fired' });
};
below is the working code in which it can post images but is there any way i can also share videos as instagram story?
the error i get when i try to post video instead of image are:**
error image
PS D:\Softwares\programming\Insta Bot\story> node index.js
18:45:11 - info: Dry Run Activated
18:45:11 - info: Post() called! ======================
18:45:11 - debug: 1 files found in ./images/
18:45:11 - warn: Record file not found, saying yes to D:\Softwares\programming\Insta Bot\story\images\meme.mp4
18:45:11 - debug: Read File Success
18:45:11 - error: undefined
(MAIN CODE)
index.js
const logger = require("./logger.js")
const { random, sleep } = require('./utils')
require('dotenv').config();
const { IgApiClient, IgLoginTwoFactorRequiredError } = require("instagram-private-api");
const ig = new IgApiClient();
const Bluebird = require('bluebird');
const inquirer = require('inquirer');
const { CronJob } = require('cron');
const path = require("path");
const fs = require("fs");
const fsp = fs.promises;
const sharp = require("sharp");
//==================================================================================
const statePath = "./etc/state.conf";
const recordPath = "./etc/usedfiles.jsonl";
const imgFolderPath = "./images/";
const dryrun = true;
const runOnStart = true;
//==================================================================================
(async () => { // FOR AWAIT
// LOGIN TO INSTAGRAM
if (!dryrun) {
await login();
logger.info("Log In Successful");
} else {
logger.info("Dry Run Activated");
}
// SCHEDULER
// logger.silly("I'm a schedule, and I'm running!! :)");
const job = new CronJob('38 43 * * * *', post, null, true); //https://crontab.guru/
if (!runOnStart) logger.info(`Next few posts scheduled for: \n${job.nextDates(3).join("\n")}\n`);
else post();
// MAIN POST COMMAND
async function post() {
logger.info("Post() called! ======================");
let postPromise = fsp.readdir(imgFolderPath)
.then(filenames => {
if (filenames.length < 1) throw new Error(`Folder ${imgFolderPath} is empty...`)
logger.debug(`${filenames.length} files found in ${imgFolderPath}`);
return filenames;
})
.then(filenames => filenames.map(file => path.resolve(imgFolderPath + file)))
.then(filenames => pickUnusedFileFrom(filenames, filenames.length))
.then(filename => {
if (!dryrun) registerFileUsed(filename)
return filename
})
.then(fsp.readFile)
.then(async buffer => {
logger.debug("Read File Success "); //TODO move this to previous then?
return sharp(buffer).jpeg().toBuffer()
.then(file => {
logger.debug("Sharp JPEG Success");
return file
})
})
.then(async file => {
if (!dryrun) {
// await sleep(random(1000, 60000)) //TODO is this necessary?
return ig.publish.story({ file })
.then(fb => logger.info("Posting successful!?"))
}
else return logger.info("Data not sent, dryrun = true")
})
.then(() => logger.info(`Next post scheduled for ${job.nextDates()}\n`))
.catch(logger.error)
}
})();
//=================================================================================
async function login() {
ig.state.generateDevice(process.env.IG_USERNAME);
// ig.state.proxyUrl = process.env.IG_PROXY;
//register callback?
ig.request.end$.subscribe(async () => {
const serialized = await ig.state.serialize();
delete serialized.constants; // this deletes the version info, so you'll always use the version provided by the library
await stateSave(serialized);
});
if (await stateExists()) {
// import state accepts both a string as well as an object
// the string should be a JSON object
const stateObj = await stateLoad();
await ig.state.deserialize(stateObj)
.catch(err => logger.debug("deserialize: " + err));
} else {
let standardLogin = async function() {
// login like normal
await ig.simulate.preLoginFlow();
logger.debug("preLoginFlow finished");
await ig.account.login(process.env.IG_USERNAME, process.env.IG_PASSWORD);
logger.info("Logged in as " + process.env.IG_USERNAME);
process.nextTick(async () => await ig.simulate.postLoginFlow());
logger.debug("postLoginFlow finished");
}
// Perform usual login
// If 2FA is enabled, IgLoginTwoFactorRequiredError will be thrown
return Bluebird.try(standardLogin)
.catch(
IgLoginTwoFactorRequiredError,
async err => {
logger.info("Two Factor Auth Required");
const {username, totp_two_factor_on, two_factor_identifier} = err.response.body.two_factor_info;
// decide which method to use
const verificationMethod = totp_two_factor_on ? '0' : '1'; // default to 1 for SMS
// At this point a code should have been sent
// Get the code
const { code } = await inquirer.prompt([
{
type: 'input',
name: 'code',
message: `Enter code received via ${verificationMethod === '1' ? 'SMS' : 'TOTP'}`,
},
]);
// Use the code to finish the login process
return ig.account.twoFactorLogin({
username,
verificationCode: code,
twoFactorIdentifier: two_factor_identifier,
verificationMethod, // '1' = SMS (default), '0' = TOTP (google auth for example)
trustThisDevice: '1', // Can be omitted as '1' is used by default
});
},
)
.catch(e => logger.error('An error occurred while processing two factor auth', e, e.stack));
}
return
//================================================================================
async function stateSave(data) {
// here you would save it to a file/database etc.
await fsp.mkdir(path.dirname(statePath), { recursive: true }).catch(logger.error);
return fsp.writeFile(statePath, JSON.stringify(data))
// .then(() => logger.info('state saved, daddy-o'))
.catch(err => logger.error("Write error" + err));
}
async function stateExists() {
return fsp.access(statePath, fs.constants.F_OK)
.then(() => {
logger.debug('Can access state info')
return true
})
.catch(() => {
logger.warn('Cannot access state info')
return false
});
}
async function stateLoad() {
// here you would load the data
return fsp.readFile(statePath, 'utf-8')
.then(data => JSON.parse(data))
.then(data => {
logger.info("State load successful");
return data
})
.catch(logger.error)
}
}
async function registerFileUsed( filepath ) {
let data = JSON.stringify({
path: filepath,
time: new Date().toISOString()
}) + '\n';
return fsp.appendFile(recordPath, data, { encoding: 'utf8', flag: 'a+' } )
.then(() => {
logger.debug("Writing filename to record file");
return filepath
})
}
function pickUnusedFileFrom( filenames, iMax = 1000) {
return new Promise((resolve, reject) => {
let checkFileUsed = async function ( filepath ) {
return fsp.readFile(recordPath, 'utf8')
.then(data => data.split('\n'))
.then(arr => arr.filter(Boolean))
.then(arr => arr.map(JSON.parse))
.then(arr => arr.some(entry => entry.path === filepath))
}
let trythis = function( iMax, i = 1) {
let file = random(filenames);
checkFileUsed(file)
.then(async used => {
if (!used) {
logger.info(`Unused file found! ${file}`);
resolve(file);
} else if (i < iMax) {
logger.debug(`Try #${i}: File ${file} used already`);
await sleep(50);
trythis(iMax, ++i)
} else {
reject(`I tried ${iMax} times and all the files I tried were previously used`)
}
})
.catch(err => {
logger.warn("Record file not found, saying yes to " + file);
resolve(file);
})
}( iMax );
})
}
I need to get some data from DynamoDb, using the scan() method. I have implemented some basic pagination by calling my function recursively n number of times to get the correct page.
Currently, I call my function and inside the scan() callback, if the data can be send back, I use the handler callback to return the data.
CURRENT CODE
const AWS = require('aws-sdk')
const docClient = new AWS.DynamoDB.DocumentClient()
const TABLE_NAME = process.env.TABLE_NAME
const DEFAULT_PAGE_SIZE = 500
const DEFAULT_PAGE_NUMBER = 1
const self = {
handler: (event, context, callback) => {
const {pageNumber, pageSize} = event.queryStringParameters ? event.queryStringParameters : {pageNumber: DEFAULT_PAGE_NUMBER, pageSize: DEFAULT_PAGE_SIZE}
const params = {
TableName: ORGANISATION_TYPES_TABLE_NAME,
Limit: pageSize ? pageSize : DEFAULT_PAGE_SIZE
}
return self.scan(params, pageNumber, 1, callback)
},
scan: (params, pageNumber, pageCount, callback) => {
docClient.scan(params, (err, data) => {
if (err) {
callback(null, {
statusCode: 500,
body: JSON.stringify(err)
})
};
if (data.LastEvaluatedKey && pageCount < pageNumber) {
pageCount += 1
params.ExclusiveStartKey = data.LastEvaluatedKey
self.scan(params, pageNumber, pageCount, callback)
} else {
callback(null, {
statusCode: 200,
body: JSON.stringify(data)
})
}
})
}
}
module.exports = self
The above code does work, allowing me to specify a pageSize and pageNumber query parameter.
However, I want to Promisify self.scan.
I tried the following, but it results in the response being undefined
DESIRED CODE
const AWS = require('aws-sdk')
const docClient = new AWS.DynamoDB.DocumentClient()
const ORGANISATION_TYPES_TABLE_NAME = process.env.ORGANISATION_TYPES_TABLE_NAME
const DEFAULT_PAGE_SIZE = 500
const DEFAULT_PAGE_NUMBER = 1
const self = {
handler: (event, context, callback) => {
const {pageNumber, pageSize} = event.queryStringParameters ? event.queryStringParameters : {pageNumber: DEFAULT_PAGE_NUMBER, pageSize: DEFAULT_PAGE_SIZE}
const params = {
TableName: ORGANISATION_TYPES_TABLE_NAME,
Limit: pageSize ? pageSize : DEFAULT_PAGE_SIZE
}
return self.scan(params, pageNumber, 1).then((response) => {
callback(null, {
statusCode: 200,
body: JSON.stringify(response)
})
}).catch((err) => {
callback(null, {
statusCode: 500,
body: JSON.stringify(err)
})
})
},
scan: (params, pageNumber, pageCount) => {
return new Promise((resolve, reject) => {
docClient.scan(params, (err, data) => {
if (err) {
reject(err)
};
if (data.LastEvaluatedKey && pageCount < pageNumber) {
pageCount += 1
params.ExclusiveStartKey = data.LastEvaluatedKey
self.scan(params, pageNumber, pageCount, callback)
} else {
resolve(data)
}
})
})
}
}
module.exports = self
I also tried just doing return Promise.resolve(data) inside the docClient.scan() callback, but that doesn't work either. It's as if promises cannot be resolved inside a callback?
I have recently helped someone with this problem, there's actually quite an elegant solution that we hit upon that uses the hasNextPage property on the response you get from the SDK. The key is to have your recursive function pass an array that holds your results through the recursive calls and just concat until you run out of pages and then just return the array.
const scan = async params => {
function scanRec(promise, xs) {
return promise
.then(async result => {
const response = result.$response;
const items = xs.concat(result.Items);
response.hasNextPage() ? scanRec(response.nextPage().promise(), items) : items
})
}
return scanRec(docClient.query(params).promise(), []);
}
You'd then use the function in the normal way:
const params = { /** params **/ };
scan(params).then(x => {
// ...
})