When a user deletes their account, I want to remove their storage files along with their data.
I am able to do a multi path delete for the RTDB, how can I do this but also remove files from storage too?
I have tried chaining on a .then but it makes everything fail...
ex...
.then(() => {
const bucket = gcs.bucket(functions.config().firebase.storageBucket);
const path = `categories/${uid}`;
return bucket.file(path).delete();
})
I wish it was faster to test your functions without always deploying because it has taken soooo much time to try making this work...
Here is my working code:
exports.removeUserFromDatabase = functions.auth.user()
.onDelete(function(user, context) {
var uid = user.uid;
const deleteUserData = {};
deleteUserData[`users/${uid}`] = null;
deleteUserData[`feed/${uid}`] = null;
deleteUserData[`friends/${uid}`] = null;
deleteUserData[`profileThumbs/${uid}`] = null;
deleteUserData[`hasUnreadMsg/${uid}`] = null;
deleteUserData[`userChatRooms/${uid}`] = null;
deleteUserData[`userLikedPosts/${uid}`] = null;
deleteUserData[`userLikedStrains/${uid}`] = null;
return admin.database().ref('/friends').orderByChild(`${uid}/uid`).equalTo(uid)
.once("value").then((friendsSnapshot) => {
friendsSnapshot.forEach((friendSnapshot) => {
deleteUserData[`/friends/${friendSnapshot.key}/${uid}`] = null;
});
return admin.database().ref().update(deleteUserData)
})
.then(() => {
// const bucket = gcs.bucket(functions.config().firebase.storageBucket);
const bucket = admin.storage().bucket();
const path = `categories/${uid}`;
return bucket.file(path).delete();
})
});
I feel like it's because I am not dealing with the promise correctly, I just don't know where this is going wrong.
My code snippet currently works until i chain the .then()
Cheers.
Your current code is not returning anything from the top-level code, meaning it may get terminated at any point while it's writing to the database.
You'll want to return admin.database()... and then chain the additional then() after it.
Related
I have a website that runs its frontend of Firebase Hosting and its server which is written using node.js and Express on Firebase Functions
What I want to have redirect links from my website so I can map for example mywebsite.com/youtube to my youtube channel. the way I am creating these links is from my admin panel, and adding them to my Firestore database.
My data is roughly something like this:
The first way I approached this, is by querying my Firestore database on every request, but that is heavily expensive and slow.
Another way I tried to approach this is by setting some kind of background listener to the Firestore database which will always provide up to date data. but unfortunately that did not work because Firebase Functions suspends the main function when the current request execution ends.
lastly, which is the most convenience way, I configured an api route, which will be called from my Admin Panel when any change happens to the data, and I would save the new data to some json file. I tried this on my local but it did not work on production because appearently Firebase Functions is a Read-only system, so we can't edit any files after they are deployed. after some research I found out that Firebase Functions allows writing to the tmp directory, so I went forward with this, and tried deploying it. but again, Firebase Functions was resetting the tmp folder when some request execution ends.
here is my api request code which updates the utm_data.json file in the tmp directory:
// my firestore provider
const db = require('../db');
const fs = require('fs');
const os = require('os')
const mkdirp = require('mkdirp');
const updateUrlsAPI = (req, res) => {
// we wanna get the utm list from firestore, and update the file
// tmp/utm_data.json
// query data from firestore
db.collection('utmLinks').get().then(async function(querySnapshot) {
try {
// get the path to `tmp` folder depending on
// the os running this program
let tmpFolderName = os.tmpdir()
// create `tmp` directory if not exists
await mkdirp(tmpFolderName)
let docsData = querySnapshot.docs.map(doc => doc.data())
let tmpFilePath = tmpFolderName + '/utm_data.json'
let strData = JSON.stringify(docsData)
fs.writeFileSync(tmpFilePath, strData)
res.send('200')
} catch (error) {
console.log("error while updating utm_data.json: ", error)
res.send(error)
}
});
}
and this is my code for reading the utm_data.json file on an incoming request:
const readUrlsFromJson = (req, res) => {
var url = req.path.split('/');
// the url will be in the format of: 'mywebsite.com/routeName'
var routeName = url[1];
try {
// read the file ../tmp/utm_data.json
// {
// 'createdAt': Date
// 'creatorEmail': string
// 'name': string
// 'url': string
// }
// our [routeName] should match [name] of the doc
let tmpFolderName = os.tmpdir()
let tmpFilePath = tmpFolderName + '/utm_data.json'
// read links list file and assign it to the `utms` variable
let utms = require(tmpFilePath)
if (!utms || !utms.length) {
return undefined;
}
// find the link matching the routeName
let utm = utms.find(utm => utm.name == routeName)
if (!utm) {
return undefined;
}
// if we found the doc,
// then we'll redirect to the url
res.redirect(utm.url)
} catch (error) {
console.error(error)
return undefined;
}
}
Is there something I am doing wrong, and if not, what is an optimal solution for this case?
You can initialize the Firestore listener in global scope. From the documentation,
The global scope in the function file, which is expected to contain the function definition, is executed on every cold start, but not if the instance has already been initialized.
This should keep the listener active even after the function's execution has completed until that specific instance is running (which should be about ~30 minutes). Try refactoring the code as shown below:
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
let listener = false;
// Store all utmLinks in global scope
let utmLinks: any[] = [];
const initListeners = () => {
functions.logger.info("Initializing listeners");
admin
.firestore()
.collection("utmLinks")
.onSnapshot((snapshot) => {
snapshot.docChanges().forEach(async (change) => {
functions.logger.info(change.type, "document received");
switch (change.type) {
case "added":
utmLinks.push({ id: change.doc.id, ...change.doc.data() });
break;
case "modified":
const index = utmLinks.findIndex(
(link) => link.id === change.doc.id
);
utmLinks[index] = { id: change.doc.id, ...change.doc.data() };
break;
case "removed":
utmLinks = utmLinks.filter((link) => link.id !== change.doc.id);
default:
break;
}
});
});
return;
};
// The HTTPs function
export const helloWorld = functions.https.onRequest(
async (request, response) => {
if (!listener) {
// Cold start, no listener active
initListeners();
listener = true;
} else {
functions.logger.info("Listeners already initialized");
}
response.send(JSON.stringify(utmLinks, null, 2));
}
);
This example stores all UTM links in an array in global scope which won't be persisted in new instances but you won't have to query each link for every request. The onSnapshot() listener will keep utmLinks updated.
The output in logs should be:
If you want to persist this data permanently and prevent querying in every cold start, then you can try using Google Cloud Compute that keeps running unlike Cloud functions that timeout eventually.
Background:
I am building a discord bot that operates as a Dungeons & Dragons DM of sorts. We want to store game data in a database and during the execution of certain commands, query data from said database for use in the game.
All of the connections between our Discord server, our VPS, and the VPS' backend are functional and we are now implementing slash commands since traditional ! commands are being removed from support in April.
We are running into problems making the slash commands though. We want to set them up to be as efficient as possible which means no hard-coded choices for options. We want to build those choice lists via data from the database.
The problem we are running into is that we can't figure out the proper way to implement the fetch to the database within the SlashCommandBuilder.
Here is what we currently have:
const {SlashCommandBuilder} = require('#discordjs/builders');
const fetch = require('node-fetch');
const {REST} = require('#discordjs/rest');
const test = require('../commonFunctions/test.js');
var options = async function getOptions(){
let x = await test.getClasses();
console.log(x);
return ['test','test2'];
}
module.exports = {
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
for(let op of options()){
//option.addChoice(op,op);
}
return option
}
),
async execute(interaction){
},
};
This code produces the following error when start the npm for our bot on our server:
options is not a function or its return value is not iterable
I thought that maybe the function wasn't properly defined, so I replaced the contents of it with just a simple array return and the npm started without errors and the values I had passed showed up in the server.
This leads me to think that the function call in the modules.exports block is immediatly attempting to get the return value of the function and as the function is async, it isn't yet ready and is either returning undefined or a promise or something else not iteratable.
Is there a proper way to implement the code as shown? Or is this way too complex for discord.js to handle?
Is there a proper way to implement the idea at all? Like creating a json object that contains the option data which is built and saved to a file at some point prior to this command being registered and then having the code above just pull in that file for the option choices?
Alright, I found a way. Ian Malcom would be proud (LMAO).
Here is what I had to do for those with a similar issues:
I had to basically re-write our entire application. It sucks, I know, but it works so who cares?
When you run your index file for your npm, make sure that you do the following things.
Note: you can structure this however you want, this is just how I set up my js files.
Setup a function that will setup the data you need, it needs to be an async function as does everything downstream from this point on relating to the creation and registration of the slash commands.
Create a js file to act as your application setup "module". "Module" because we're faking a real module by just using the module.exports method. No package.jsons needed.
In the setup file, you will need two requires. The first is a, as of yet, non-existent data manager file; we'll do that next. The second is a require for node:fs.
Create an async function in your setup file called setup and add it to your module.exports like so:
module.exports = { setup }
In your async setup function or in a function that it calls, make a call to the function in your still as of yet non-existent data manager file. Use await so that the application doesn't proceed until something is returned. Here is what mine looks like, note that I am writing my data to a file to read in later because of my use case, you may or may not have to do the same for yours:
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
Before we finish off this file, if you want to store anything as a property in this file and then get it later on, you can do so. In order for the data to return properly though, you will need to define a getter function in your exports. Here is an example:
var classList;
module.exports={
getClassList: () => classList,
setup
};
So, with everything above you should have something that looks like this:
const classDataManager = require('./DataManagers/ClassData.js')
const fs = require('node:fs');
var classList;
async function setup(){
console.log('test');
//build option choice lists
let listsBuilt = await buildChoiceLists();
if (listsBuilt){
return true;
} else {
return false;
}
}
async function buildChoiceLists(){
let classListBuilt = await buildClassList();
return true;
}
async function buildClassList(){
let classData = await classDataManager.getClassData();
console.log(classData);
classList = classData;
await writeFiles();
return true;
}
async function writeFiles(){
fs.writeFileSync('./CommandData/classList.json', JSON.stringify(classList));
}
module.exports={
getClassList: () => classList,
setup
};
Next that pesky non-existent DataManager file. For mine, each data type will have its own, but you might want to just combine them all into a single .js file for yours.
Same with the folder name, I called mine DataManagers, if you're combining them all into one, you could just call the file DataManager and leave it in the same folder as your appSetup.js file.
For the data manager file all we really need is a function to get our data and then return it in the format we want it to be in. I am using node-fetch. If you are using some other module for data requests, write your code as needed.
Instead of explaining everything, here is the contents of my file, not much has to be explained here:
const fetch = require('node-fetch');
async function getClassData(){
return new Promise((resolve) => {
let data = "action=GetTestData";
fetch('http://xxx.xxx.xxx.xx/backend/characterHandler.php', {
method: 'post',
headers: { 'Content-Type':'application/x-www-form-urlencoded'},
body: data
}).then(response => {
response.json().then(res => {
let status = res.status;
let clsData = res.classes;
let rcData = res.races;
if (status == "Success"){
let text = '';
let classes = [];
let races = [];
if (Object.keys(clsData).length > 0){
for (let key of Object.keys(clsData)){
let cls = clsData[key];
classes.push({
"name": key,
"code": key.toLowerCase()
});
}
}
if (Object.keys(rcData).length > 0){
for (let key of Object.keys(rcData)){
let rc = rcData[key];
races.push({
"name": key,
"desc": rc.Desc
});
}
}
resolve(classes);
}
});
});
});
}
module.exports = {
getClassData
};
This file contacts our backend php and requests data from it. It queries the data then returns it. Then we format it into an JSON structure for use later on with option choices for the slash command.
Once all of your appSetup and data manager files are complete, we still need to create the commands and register them with the server. So, in your index file add something similar to the following:
async function getCommands(){
let cmds = await comCreator.appSetup();
console.log(cmds);
client.commands = cmds;
}
getCommands();
This should go at or near the top of your index.js file. Note that comCreator refers to a file we haven't created yet; you can name this require const whatever you wish. That's it for this file.
Now, the "comCreator" file. I named mine deploy-commands.js, but you can name it whatever. Once again, here is the full file contents. I will explain anything that needs to be explained after:
const {Collection} = require('discord.js');
const {REST} = require('#discordjs/rest');
const {Routes} = require('discord-api-types/v9');
const app = require('./appSetup.js');
const fs = require('node:fs');
const config = require('./config.json');
async function appSetup(){
console.log('test2');
let setupDone = await app.setup();
console.log(setupDone);
console.log(app.getClassList());
return new Promise((resolve) => {
const cmds = [];
const cmdFiles = fs.readdirSync('./commands').filter(f => f.endsWith('.js'));
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
console.log(file + ' added to commands!');
cmds.push(cmd.data.toJSON());
}
const rest = new REST({version: '9'}).setToken(config.token);
rest.put(Routes.applicationGuildCommands(config.clientId, config.guildId), {body: cmds})
.then(() => console.log('Successfully registered application commands.'))
.catch(console.error);
let commands = new Collection();
for (let file of cmdFiles){
let cmd = require('./commands/' + file);
commands.set(cmd.data.name, cmd);
}
resolve(commands);
});
}
module.exports = {
appSetup
};
Most of this is boiler plate for slash command creation though I did combine the creation and registering of the commands into the same process. As you can see, we are grabbing our command files, processing them into a collection, registering that collection, and then resolving the promise with that variable.
You might have noticed that property, was used to then set the client commands in the index.js file.
Config just contains your connection details for your discord server app.
Finally, how I accessed the data we wrote for the SlashCommandBuilder:
data: new SlashCommandBuilder()
.setName('get-test-data')
.setDescription('Return Class and Race data from database')
.addStringOption(option =>{
option.setName('class')
.setDescription('Select a class for your character')
.setRequired(true)
let ops = [];
let data = fs.readFileSync('./CommandData/classList.json','utf-8');
ops = JSON.parse(data);
console.log('test data class options: ' + ops);
for(let op of ops){
option.addChoice(op.name,op.code);
}
return option
}
),
Hopefully this helps someone in the future!
I am currently developing an app which interacts with uniswap, and I have developed a Wrapper class to contain the info and variables I'll need about some pair (e.g DAI/WETH).
As some of this values are asynchronous, I have coded a build() async function to get those before calling the constructor, so I can store them. I want to store the result of this build function, which is an instance of the class I have defined, inside a variable to use it later, but I need to know whether the Promise that that build function returns is resolved before using it, so how can I make it?
Here is the code of the class:
'use strict'
const { ChainId, Fetcher, WETH, Route, Trade, TokenAmoun, TradeType, TokenAmount } = require('#uniswap/sdk')
const { toChecksumAddress } = require('ethereum-checksum-address')
const Web3 = require('web3')
const web3 = new Web3()
const chainId = ChainId.MAINNET;
let tok1;
let tok2;
let pair;
let route;
let trade;
class UniswapTokenPriceFetcher
{
constructor(async_params)
{
async_params.forEach((element) => {
if (element === 'undefined')
{
throw new Error('All parameters must be defined')
}
});
this.trade = async_params[0];
this.route = async_params[1];
this.pair = async_params[2];
this.tok1 = async_params[3];
this.tok2 = async_params[4];
}
static async build(token1, token2)
{
var tok1 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token1))
var tok2 = await Fetcher.fetchTokenData(chainId, toChecksumAddress(token2))
var pair = await Fetcher.fetchPairData(tok1, tok2)
var route = new Route([pair], tok2)
var trade = new Trade(route, new TokenAmount(tok2, web3.utils.toWei('1', 'Ether')), TradeType.EXACT_INPUT)
return new UniswapTokenPriceFetcher([trade, route, pair, tok1, tok2])
}
getExecutionPrice6d = () =>
{
return this.trade.executionPrice.toSignificant(6);
}
getNextMidPrice6d = () =>
{
return this.trade.nextMidPrice.toSignificant(6);
}
}
module.exports = UniswapTokenPriceFetcher
Thank you everybody!
EDIT: I know Uniswap only pairs with WETH so one of my token variables is unneccesary, but the problem remains the same! Also keep in mind that I want to store an instance of this class for latter use inside another file.
You should either call the build function with await
const priceFetcher = await UniswapTokenPriceFetcher.build(token1, token2)
or followed by then
UniswapTokenPriceFetcher.build(token1, token2).then(priceFetcher => {...})
I don't see any other way.
I've got the following firebase function to run once a file is uploaded to firebase storage.
It basically gets its URL and saves a reference to it in firestore. I need to save them in a way so that I can query them randomly from my client. Indexes seem to be to best fit this requirement.
for the firestore reference I need the following things:
doc ids must go from 0 to n (n beeing the index of the last
document)
have a --stats-- doc keeping track of n (gets
incremented every time a document is uploaded)
To achieve this I've written the following node.js script:
const incrementIndex = admin.firestore.FieldValue.increment(1);
export const image_from_storage_to_firestore = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const splittedPath = filePath!.split("/");
// se siamo nelle immagini
// path = emotions/$emotion/photos/$photographer/file.jpeg
if (splittedPath[0] === "emotions" && splittedPath[2] === "photos") {
const emotion = splittedPath[1];
const photographer = splittedPath[3];
const file = bucket.file(filePath!);
const indexRef = admin.firestore().collection("images")
.doc("emotions").collection(emotion).doc("--stats--");
const index = await indexRef.get().then((doc) => {
if (!doc.exists) {
return 0;
} else {
return doc.data()!.index;
}
});
if (index === 0) {
await admin.firestore().collection("images")
.doc("emotions")
.collection(emotion)
.doc("--stats--")
.set({index: 0});
}
console.log("(GOT INDEX): " + index);
let imageURL;
await file
.getSignedUrl({
action: "read",
expires: "03-09-2491"
})
.then(signedUrls => {
imageURL = signedUrls[0];
});
console.log("(GOT URL): " + imageURL);
var docRef = admin.firestore()
.collection("images")
.doc("emotions")
.collection(emotion)
.doc(String(index));
console.log("uploading...");
await indexRef.update({index: incrementIndex});
await docRef.set({ imageURL: imageURL, photographer: photographer });
console.log("finished");
return true;
}
return false;
});
Getting to the problem:
It works perfectly if I upload the files one by one.
It messes up the index if I upload more than one file at once, because two concurrent uploads will read the same index value from --stats-- and one will overwrite the other.
How would you solve this problem? would you use another approach instead of the indexed one?
You should use a Transaction in which you:
read the value of the index (from "--stats--" document),
write the new index and
write the value of the imageURL in the "emotion" doc.
See also the reference docs about transactions.
This way, if the index value is changed in the "--stats--" document while the Transaction is being executed, the Cloud Function can catch the Transaction failure and generates an error which finishes it.
In parallel, you will need to enable retries for this background Cloud Function, in order it is retried if the Transaction failed in a previous run.
See this documentation item https://firebase.google.com/docs/functions/retries, including the video from Doug Stevenson which is embedded in the doc.
I have Redis with a lot of keys in some format and I want to get keys that match some pattern and do some operations on them. I don't use KEYS method since it's not recommend in production. Using SCAN I'm wondering what is the best way to write it in code. I have to do something like a while loop but using promises, my current solution looks like this (code is simplified a little):
'use strict'
const Promise = require('bluebird');
const config = require('./config');
const client = require('./clinet');
let iterator = 0;
Promise.coroutine(function* () {
do {
iterator = yield clinet.scanAsync(iterator, 'myQuery', 'COUNT', config.scanChunkSize)
.then(data => {
let nextIterator = data[0];
let values = data[1];
//do some magic with values
return nextIterator;
})
} while (iterator !== '0');
})();
Is there a better way to do it that I'm missing?
I realize this is a really old question, but I found all of the other answers very unsatisfying. Here is yet another attempt to scan in a relatively clean way using async await (WITHOUT the use of yet another external dependency). You can easily modify this to continuously delete each set of found keys (you would want to tackle them in batches like this in case there are LOTS). Pushing them into an array just demonstrates one very basic thing you could do with them during this stage.
const redis = require('redis');
const { promisify } = require('util');
const client = redis.createClient({...opts});
const scan = promisify(client.scan).bind(client);
const scanAll = async (pattern) => {
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', pattern);
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
}
You can use recursion to keep calling scan until done.
function scanAsync(cursor, pattern, returnSet){
return redisClient.scanAsync(cursor, "MATCH", pattern, "COUNT", "100").then(
function (reply) {
cursor = reply[0];
var keys = reply[1];
keys.forEach(function(key,i){
returnSet.add(key);
});
if( cursor === '0' ){
return Array.from(returnSet);
}else{
return scanAsync(cursor, pattern, returnSet)
}
});
}
Pass in a Set() to make sure keys aren't duplicated
myResults = new Set();
scanAsync('0', "NOC-*[^listen]*", myResults).map(
function( myResults ){ console.log( myResults); }
);
You can try this snippet to scan (1000) keys per iteration and 'delete`.
var cursor = '0';
function scan(pattern,callback){
redisClient.scan(cursor, 'MATCH',pattern,'COUNT', '1000', function(err, reply){
if(err){
throw err;
}
cursor = reply[0];
if(cursor === '0'){
return callback();
}else{
var keys = reply[1];
keys.forEach(function(key,i){
redisClient.del(key, function(deleteErr, deleteSuccess){
console.log(key);
});
});
return scan(pattern,callback);
}
});
}
scan(strkey,function(){
console.log('Scan Complete');
});
Nice option for node-redis module is to use scan iterators. Example:
const redis = require("redis");
const client = redis.createClient();
async function getKeys(pattern="*", count=10) {
const results = [];
const iteratorParams = {
MATCH: pattern,
COUNT: count
}
for await (const key of client.scanIterator(iteratorParams)) {
results.push(key);
}
return results;
}
(Of course you can also process your keys on the fly in for await loop without storing them in additional array if that's enough for you).
If you do not want to override scan parameters (MATCH/COUNT) you can just skip them and execute client.scanIterator() without parameter (defaults will be used then, MATCH="*", COUNT=10).
I think the node bindings for Redis are pushing too much responsibility to the caller here. So I created my own library for scanning as well, using generators in node:
const redis = require('redis')
const client = redis.createClient(…)
const generators = require('redis-async-gen')
const { keysMatching } = generators.using(client)
…
for await (const key of keysMatching('test*')) {
console.info(key)
}
It's the last bit that obviously is the thing that you should care about. Instead of having to carefully control an iterator yourself, all you need to do is use a for comprehension.
I wrote more about it here.
Go through this, it may help.
https://github.com/fritzy/node-redisscan
do not use the library as it, go through the code available at
https://github.com/fritzy/node-redisscan/blob/master/index.js