I'm using the below code to delete all old data every night.
Each function works fine on it's own and if I create multiple schedule tasks, one for each function, it also works fine. However, when I combine them into a single Scheduled Task called scheduledCleanData, I'm receiving Error: Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.
at GoogleAuth.getApplicationDefaultAsync (/workspace/node_modules/google-auth-library/build/src/auth/googleauth.js:161:19)
at process._tickCallback (internal/process/next_tick.js:68:7)
As per this post, I believe this is caused by the functions not waiting for the callbacks, rather than problems with the credentials. However adding async or await keywords cause a parse error. Some of these collections need to delete thousands of records.
Any help how to modify this code to correctly wait would be greatly appreciated!
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const client = require('firebase-tools');
admin.initializeApp(functions.config().firebase)
const db = admin.firestore();
const runtimeOpts = {
timeoutSeconds: 300,
memory: '1GB'
}
exports.scheduledCleanData = functions
.runWith(runtimeOpts)
.pubsub.schedule('0 3 * * *')
.timeZone('America/Chicago')
.onRun((context) => {
return cleanOldAssignments()
.then(cleanOldDuties())
.then(cleanOldEvents())
.then(console.info("scheduledCleanData Complete!"));
});
function cleanOldAssignments() {
const dateYesterday = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); // 24 hours
return db.collectionGroup('assignments').where('date', '<', dateYesterday).get()
.then(querySnapshot => {
console.info("Old assignments to remove: " + querySnapshot.size);
const promises = [];
querySnapshot.forEach(doc => {
promises.push(doc.ref.delete());
});
return Promise.all(promises);
});
}
function cleanOldDuties() {
const dateYesterday = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); // 24 hours
return db.collectionGroup('duties').where('date', '<', dateYesterday).get()
.then(querySnapshot => {
console.info("Old duties to remove: " + querySnapshot.size);
const promises = [];
querySnapshot.forEach(doc => {
promises.push(doc.ref.delete());
});
return Promise.all(promises);
});
}
function cleanOldEvents() {
const dateYesterday = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); // 24 hours
return db.collectionGroup('events').where('date', '<', dateYesterday).get()
.then(querySnapshot => {
console.info("Old events to remove: " + querySnapshot.size);
const promises = [];
querySnapshot.forEach(doc => {
promises.push(doc.ref.delete());
});
return Promise.all(promises);
});
}
If cleanOldDuties and cleanOldEvents both return more promises, your code is not taking them into account. You will need to explicitly return their values to propagate down the promise chain:
return cleanOldAssignments()
.then(() => { return cleanOldDuties() })
.then(() => { return cleanOldEvents() })
Without seeing the exact code of these other functions, it's not certain if this will work or not. Those functions have to be implemented correctly as well.
async/await is almost always a clearer way to express these sorts of things. You are probably just using them incorrectly as well.
Related
I want to run a Script every 6 Hours
const { IgApiClient } = require("instagram-private-api")
const ig = new IgApiClient()
const USERNAME = "abc"
const PASSWORD = "xyz"
ig.state.generateDevice(USERNAME)
const main = async () => {
var birthday = new Date(2069, 05, 14);
var today = new Date();
birthday.setFullYear(today.getFullYear());
if (today > birthday) {
birthday.setFullYear(today.getFullYear() + 1);
}
var daystill = Math.floor((birthday - today) / (1000*60*60*24))
await ig.simulate.preLoginFlow()
await ig.account.login(USERNAME, PASSWORD)
process.nextTick(async () => await ig.simulate.postLoginFlow())
await ig.account.setBiography(`${daystill} Days till my Birthday, Today is ${new Date().getDate()}/${new Date().getMonth()}/${new Date().getFullYear()}. (AutoGenerated)`)
}
main()
instagram-private-api
About Script: update my Instagram Bio with Async Await
Problem / Goal:
I Tried using node-cron, but It returns some Error (I think Async is causing the Problem), I also tried while loops and setInterval()s
I want this Script/File to run every 6 Hours, I have a heroku account (if that helps)
Error when i use node-cron:
node:internal/process/promises:288
triggerUncaughtException(err, true /* fromPromise */);
Code for node-cron:
cron.schedule('* * * * *', () => { // this is not every 6hrs
const main = async () => {
//same as above
}
main()
})
Doing it the async await way as the title says.
// used to measure time
import { performance } from 'perf_hooks';
const interval = 1000; // in ms
(async function main(){
let start_time = performance.now();
// do stuff
let stop_time = performance.now();
let timeout = interval - (stop_time - start_time);
setTimeout(main, timeout);
})();
edit:
To explain the syntax behind the main function.
()(); will automatically call the function inside of first braces on script start.
I'm trying to incorporate rate limiting into my site to prevent brute force but also prevent accounts from being locked by random users. I read this article to see the basics of how it works but the code is not working for me. I've looked through the documentation but I can't find anything mentioning the error I am getting and I can't find it anywhere else.
import { createClient } from 'redis';
import { RateLimiterRedis } from 'rate-limiter-flexible';
const maxWrongAttemptsByIPperMinute = 6;
const maxWrongAttemptsByIPperDay = 100;
const maxConsecutiveFailsByEmailAndIP = 12;
const redisPort = 6379;
const redisClient = createClient({
host: 'redis',
port: redisPort,
enable_offline_queue: false,
});
redisClient.on('error', (err) => console.log('Redis Client Error', err));
// Limit attempts by ip throughout the day
export const limiterSlowBruteByIP = new RateLimiterRedis({
redis: redisClient,
keyPrefix: 'login_fail_ip_per_day',
points: maxWrongAttemptsByIPperDay,
duration: 60 * 60 * 24,
blockDuration: 60 * 60 * 24 // Block for 1 day on 100 failed attempts per day
});
// Limit attempts by ip on a minute basis
export const limiterFastBruteByIP = new RateLimiterRedis({
redis: redisClient,
keyPrefix: 'login_fail_ip_per_minute',
points: maxWrongAttemptsByIPperMinute,
duration: 30,
blockDuration: 60 * 5, // Block ip for 5 minutes if 6 wrong attempts within 30 seconds
});
// Limit attempts by username and ip combo within 90 days
export const limiterConsecutiveFailsByEmailAndIP = new RateLimiterRedis({
redis: redisClient,
keyPrefix: 'login_fail_consecustive_username_and_ip',
points: maxConsecutiveFailsByEmailAndIP,
duration: 60 * 60 * 24 * 90, // Store number for 90 days since first fail
blockDuration: 60 * 60 * 24 * 365 // Block forever after max consecustive fails
});
// Get email and ip combination
export const getEmailIPkey = (email, ip) => `${email}_${ip}`;
export const loginRateLimitChecker = async (req, res, next) => {
const ipAddr = req.connection.remoteAddress;
const emailIPkey = getEmailIPkey(req.body.email, ipAddr);
const test = await limiterConsecutiveFailsByEmailAndIP.get(emailIPkey);
console.log('here');
I've only included the code up to where the error occurs because the rest is irrelevant for now. The error I am getting is from the .get() and the console.log is not being reached.
.pttl(rlKey)
^
TypeError: this.client.multi(...).get(...).pttl is not a function
By official docs method of checking rate limit is .consume
const rateLimiterMiddleware = (req, res, next) => {
rateLimiter.consume(req.ip)
.then(() => {
next();
})
.catch(() => {
res.status(429).send('Too Many Requests');
});
};
based on that try to fix your code:
export const loginRateLimitChecker = async (req, res, next) => {
const ipAddr = req.connection.remoteAddress;
const email = req.body && req.body.email ? req.body.email : 'n/a';
const emailIPkey = getEmailIPkey(email, ipAddr);
try {
const test = await limiterConsecutiveFailsByEmailAndIP.consume(emailIPkey);
}
catch (error) {
res.status(429).send('Too Many Requests');
console.log(`Request rate-limited by key ${emailIPkey}`);
return;
}
next();
});
I want to delete some documents in Firebase 30 minutes after creation. I'm trying to use Firebase Functions for this. Somehow it always deletes documents without checking the dates. Here is my code. It's working but I don't understand why can't I check the dates.
exports.scheduledFunction = functions.pubsub.schedule('every 1 minutes').onRun((context) => {
const now = new Date();
const queryTime = new Date(now - 30 * 60000);
const trips = admin.firestore().collection('trips');
const allTrips = trips.get().then(snapshot => {
snapshot.forEach(trip => {
if (trip.data().date < queryTime) {
admin.firestore().collection('trips').doc(trip.id).delete();
}
});
});
return allTrips;
});
The variable now is a Date and not a number. Try using Date.now() to get current timestamp instead:
const queryTime = new Date(Date.now() - 30 * 60000);
Then you can use where() clause to get documents where date field is older than 30 minutes instead of fetching all documents and filtering them out yourself. This will save many read charges as you are fetching matched documents only and not the whole collection.
export const scheduledFunction = functions.pubsub
.schedule("every 1 minutes")
.onRun(async (context) => {
// async fn ^^
const queryTime = new Date(Date.now() - 30 * 60 * 1000);
// Alternatively:
// const queryTime = admin.firestore.Timestamp.fromMillis(Date.now() - 30 * 60 * 1000);
// Query to fetch documents where date is less than queryTime
const tripsQuery = admin
.firestore()
.collection("trips")
.where("date", "<", queryTime);
const allTrips = await tripsQuery.get();
// Mapping an array of delete promises
await Promise.all(allTrips.docs.map((d) => d.ref.delete()));
return allTrips;
});
Alternatively you can also use batched writes to delete up to 500 documents at once.
Good afternoon, I have a script which is receiving data from Binance API every 30 minutes and saves it in MongoDB specific database in different collections.
My MongoDB is installed on VPS. I'm connecting to it from my local computer.
Problem is that I leave my code to work constantly for ± 3 days and I receive that sometimes data did not save on specific collection or missed, etc. How I can configure my code or what can I do to create a perfect connection to save all data correctly.
Problem Explanation:
I have an array with
symbols=["ada","ae","kava","eth","etc","zrx","xzc","faq","gas","vfg","req"];
And when I leave the code for let's say 25 hours. I suppose to see 50 documents on every collection name.
But I receive that some collections got 48 documents saved instead of 50, some of them got 49, some of them got 50 like sometimes collections didn't save d properly.
FULLCODE HERE:
const { MongoClient } = require('mongodb');
const schedule = require('node-schedule');
const fetch = require("node-fetch");
const symbols=["ada","ae","kava","eth","etc","zrx","xzc","faq","gas","vfg","req"];
//a descriptive name helps your future self and others understand code easier
const getBTCData = async symbol => { //make this function accept the current symbol
//async/await lets us write this much nicer and with less nested indents
let data = await fetch(`https://api.binance.com/api/v3/klines?symbol=${symbol}&interval=30m&limit=1`).then(res => res.json());
const btcusdtdata = data.map(d => {
return {
Open: parseFloat(d[1]),
High: parseFloat(d[2]),
Low: parseFloat(d[3]),
Close: parseFloat(d[4]),
Volume: parseFloat(d[5]),
Timespan: 30,
}
});
console.log(btcusdtdata);
saveToDatebase(symbol, btcusdtdata);
//recursive functions are complicated, we can get rid of it here
//by moving the responsibility to the caller
};
//helper function for an awaitable timeout
const sleep = ms => new Promise(res => setTimeout(res, ms));
const j = schedule.scheduleJob('*/30 * * * *', async() => {
//expand this function to be responsible for looping the data
for (let symbol of symbols) {
//we can pass symbol to getBTCData instead of making it
//responsible for figuring out which symbol it should get
await getBTCData(symbol);
await sleep(8000);
}
});
//make this a helper function so `saveToDatabase()` isn't also responsible for it
const getDateTime = () => {
let today = new Date();
let date = today.getFullYear() + '-' + (today.getMonth() + 1) + '-' + today.getDate();
let time = today.getHours() + ":" + today.getMinutes() + ":" + today.getSeconds();
return date + ' ' + time;
};
const saveToDatebase = async(symbol, BTCdata) => {
const url = 'mongodb://username:password#server:port/dbname?retryWrites=true&w=majority';
let dateTime = getDateTime();
//use await here and below to vastly simplify this function
let db = await MongoClient.connect(url, { useUnifiedTopology: true });
const dbo = db.db('Crypto');
const myobj = { Name: symbol, Array: BTCdata, Date: dateTime };
await dbo.collection(symbol).insertOne(myobj);
console.log('1 document inserted');
db.close();
};
Goal: Solve the saving issue, I think it also can be possible something with VPS response itself. How can I maybe update my code that it looks cleaner and faster if possible?
Any suggestions or help really appreciated.
I'm attempting to clean up my code with Promises and Async await. My problem is that I need these requests to be recallable with the same handling afterwards.
I've tried Promises, but if I nest everything in functions, it gets really messy fast. How do I make this code so that it only continues in the go() async function when a value is returned?
const request = require('request-promise');
require('console-stamp')(console, 'HH:MM:ss.l');
const colors = require('colors');
const kws = 'sweatsasaaser'.toLowerCase();
const size = 'Small';
go();
async function go(){
const f = await getproduct()
console.log('Finished ' + f)
if (f == undefined) getproduct()
}
async function getproduct(){
console.log('Requesting')
let result = await request('https://www.supremenewyork.com/mobile_stock.json');
let data = JSON.parse(result);
let prodid;
for (var i = 0; i < data.products_and_categories['Tops/Sweaters'].length; i++){
if (data.products_and_categories['Tops/Sweaters'][i].name.toLowerCase().includes(kws)){
console.info('Found product: '.green + data.products_and_categories['Tops/Sweaters'][i].name.green);
return prodid = data.products_and_categories['Tops/Sweaters'][i].id;
};
};
if (prodid == undefined){
console.log(`Product id: ${prodid}`.blue);
return prodid;
}
else {
setTimeout(function(){
//getproduct()
}, 4000);
}
}
Write a separate function:
/**
* Re-executes an async function n times or until it resolves
* #param {function} fn Function to call
* #param {number} [times=3] Times to retry before rejecting
* #param {number} [delay=1000] Delay between retries
* #param {number} [i=0] Counter for how many times it's already retried
*/
async function retry(fn, times = 3, delay = 1000, i = 0) {
try {
return await fn()
} catch (error) {
if (i < times) {
await new Promise(r => setTimeout(r, delay))
return retry(fn, times, delay, i + 1)
}
else throw error;
}
}
Have your main function getproduct simply throw the error
else {
// setTimeout(function(){
// //getproduct()
// }, 4000)
throw new Error('Cannot get productid')
}
And use it with the new retry function:
async function go(){
const f = await retry(getproduct, 3)
In case you wanna pass arguments, simply wrap it
const f = await retry(() => getproduct(...args), 3)