Firestore: scheduled export - node.js

I have used the code from the Firebase documentation to schedule a backup of the data in my Firestore project in a bucket every 6 hours. See the link and the code here:
https://firebase.google.com/docs/firestore/solutions/schedule-export
const functions = require('firebase-functions');
const firestore = require('#google-cloud/firestore');
const client = new firestore.v1.FirestoreAdminClient();
// Replace BUCKET_NAME
const bucket = 'gs://BUCKET_NAME';
exports.scheduledFirestoreExport = functions.pubsub
.schedule('every 24 hours')
.onRun((context) => {
const projectId = process.env.GCP_PROJECT || process.env.GCLOUD_PROJECT;
const databaseName =
client.databasePath(projectId, '(default)');
return client.exportDocuments({
name: databaseName,
outputUriPrefix: bucket,
// Leave collectionIds empty to export all collections
// or set to a list of collection IDs to export,
// collectionIds: ['users', 'posts']
collectionIds: []
})
.then(responses => {
const response = responses[0];
console.log(`Operation Name: ${response['name']}`);
})
.catch(err => {
console.error(err);
throw new Error('Export operation failed');
});
});
Everything works well, my data is saved like I want to but nevertheless I am getting an error:
Error serializing return value: TypeError: Converting circular structure to JSON
Can someone tell me what I should change? Would be glad to get a hint.

Related

How to parse XML feed URL and store items in Firestore using cloud functions?

I have been given an assignment to fetch a JSON API, and also parse an XML feed URL and store their responses inside separate Firestore collections. I am not really good at cloud functions, but after lots of research, I have written the cloud function code below for the JSON API and it works well.
const functions = require("firebase-functions");
const axios = require("axios");
const admin = require("firebase-admin");
const api_token = "XXXXXXX";
const includes = "XXXXXX";
const url = "https://XXXXXXXXXXXXXX.com/?api_token=" + api_token + includes;
exports.allLeagues = functions.region('europe-west1').https.onRequest(async (req, res) => {
try {
let response = await axios.get(url);
var data = response.data.data;
for (let leagueData of data) {
await admin.firestore().collection("leagues").doc(leagueData.id.toString()).collection("all_data").doc(leagueData.id.toString()).set({
id : leagueData.id,
name : leagueData.name,
logo_path : leagueData.logo_path,
is_cup : leagueData.is_cup
});
}
console.log("Table complete...");
console.log("successful");
return res.status(200).json({ message: "successful" });
} catch(error) {
console.log("Error encountered: "+error);
return res.status(500).json({ error });
}
});
I am through with the JSON API. But for the XML feed, I don't know where to start. I have done lots of research to no avail. I found this on Stackoverflow but it doesn't address my need. Assuming this is my feed: https://www.feedforall.com/sample.xml , please how do I parse it and save the items inside Firestore?
Kindly help.
Thank you.
You can use rss-parser that can be used to fetch data from RSS feeds or parse from XML strings as shown below:
// npm install rss-parser
const Parser = require("rss-parser");
const parser = new Parser();
exports.rssFeedParser = functions.https.onRequest(
async (request, response) => {
const rssUrl = "https://www.feedforall.com/sample.xml";
const feed = await parser.parseURL(rssUrl);
const { items } = feed;
const batch = db.batch();
items.forEach((item) => {
const docRef = db.collection("rss").doc();
// restructure item if needed
batch.set(docRef, item);
});
await batch.commit();
response.send("Done");
}
);
Do note that you can add up to 500 documents only using Batched Writes as in the answer above. If your feed can return more than that, then you should create multiple batches of 500 or add them individually.

When connect firebase to node project it show db is not defined

When connect firebase to node project it show db is not defined.
Progress.js
const Progress = require("../models/progress");
//add new Progress
exports.addProgress = async (req, res) => {
//constant variables for the attributes
const {name, description, type, date,imgUrl} = req.body;
//object
const newProgress= new Progress({
//initializing properties
name,
description,
type,
date,
imgUrl
})
//saving the object to the db
newProgress.save().then(() => {
res.status(200).json({ status: "New Progress Added" });
}).catch((error) => {
res.status(500).json({message:"Fail to Progress Item",error:error.message})
})
}
model
I think you missed to initialize DB, Try This -
const firestore = require("firebase-admin");
const db = firestore.firestore();

Google Cloud Tasks: How to update a tasks time to live?

Description:
I have created a Firebase app where a user can insert a Firestore document. When this document is created a timestamp is added so that it can be automatically deleted after x amount of time, by a cloud function.
After the document is created, a http/onCreate cloud function is triggered successfully, and it creates a cloud task. Which then deletes the document on the scheduled time.
export const onCreatePost = functions
.region(region)
.firestore.document('/boxes/{id}')
.onCreate(async (snapshot) => {
const data = snapshot.data() as ExpirationDocData;
// Box creation timestamp.
const { timestamp } = data;
// The path of the firebase document('/myCollection/{docId}').
const docPath = snapshot.ref.path;
await scheduleCloudTask(timestamp, docPath)
.then(() => {
console.log('onCreate: cloud task created successfully.');
})
.catch((error) => {
console.error(error);
});
});
export const scheduleCloudTask = async (timestamp: number, docPath: string) => {
// Convert timestamp to seconds.
const timestampToSeconds = timestamp / 1000;
// Doc time to live in seconds
const documentLifeTime = 20;
const expirationAtSeconds = timestampToSeconds + documentLifeTime;
// The Firebase project ID.
const project = 'my-project';
// Cloud Tasks -> firestore time to life queue.
const queue = 'my-queue';
const queuePath: string = tasksClient.queuePath(project, region, queue);
// The url to the callback function.
// That gets envoked by Google Cloud tasks when the deadline is reached.
const url = `https://${region}-${project}.cloudfunctions.net/callbackFn`;
const payload: ExpirationTaskPayload = { docPath };
// Google cloud IAM & ADMIN principle account.
const serviceAccountEmail = 'myServiceAccount#appspot.gserviceaccount.com';
// Configuration for the Cloud Task
const task = {
httpRequest: {
httpMethod: 'POST',
url,
oidcToken: {
serviceAccountEmail,
},
body: Buffer.from(JSON.stringify(payload)).toString('base64'),
headers: {
'Content-Type': 'application/json',
},
},
scheduleTime: {
seconds: expirationAtSeconds,
},
};
await tasksClient.createTask({
parent: queuePath,
task,
});
};
export const callbackFn = functions
.region(region)
.https.onRequest(async (req, res) => {
const payload = req.body as ExpirationTaskPayload;
try {
await admin.firestore().doc(payload.docPath).delete();
res.sendStatus(200);
} catch (error) {
console.error(error);
res.status(500).send(error);
}
});
Problem:
The user can also extend the time to live for the document. When that happens the timestamp is successfully updated in the Firestore document, and a http/onUpdate cloud function runs like expected.
Like shown below I tried to update the cloud tasks "time to live", by calling again
the scheduleCloudTask function. Which obviously does not work and I guess just creates another task for the document.
export const onDocTimestampUpdate = functions
.region(region)
.firestore.document('/myCollection/{docId}')
.onUpdate(async (change, context) => {
const before = change.before.data() as ExpirationDocData;
const after = change.after.data() as ExpirationDocData;
if (before.timestamp < after.timestamp) {
const docPath = change.before.ref.path;
await scheduleCloudTask(after.timestamp, docPath)
.then((res) => {
console.log('onUpdate: cloud task created successfully.');
return;
})
.catch((error) => {
console.error(error);
});
} else return;
});
I have not been able to find documentation or examples where an updateTask() or a similar method is used to update an existing task.
Should I use the deleteTask() method and then use the createTask() method and create a new task after the documents timestamp is updated?
Thanks in advance,
Cheers!
Yes, that's how you have to do it. There is no API to update a task.

How to create tables in firebase with api.ai

I have a question for dialogflow. I want to know if it's possible to have the agent create new fields or tables in the firebase database (firestore or realtime) All the code I find is about changing the values ​​of a table and not creating them.
I do not know where to start, I've done integrations with the server and everything is working.
function writeToDb (agent) {
const databaseEntry = agent.parameters.databaseEntry;
const dialogflowAgentRef = db.collection('dialogflow').doc('agent');
return db.runTransaction(t => {
t.set(dialogflowAgentRef, {entry: databaseEntry});
return Promise.resolve('Write complete');
I need a explication to create new tables or fields by the agent
Google's Dialogflow Firestore sample on Github demonstrates how to connect Dialogflow to the Firestore database.
Check out the writeToDb() function below, and remember to require the same dependencies:
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {WebhookClient} = require('dialogflow-fulfillment');
process.env.DEBUG = 'dialogflow:*'; // enables lib debugging statements
admin.initializeApp(functions.config().firebase);
const db = admin.firestore();
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
function writeToDb (agent) {
// Get parameter from Dialogflow with the string to add to the database
const databaseEntry = agent.parameters.databaseEntry;
// Get the database collection 'dialogflow' and document 'agent' and store
// the document {entry: "<value of database entry>"} in the 'agent' document
const dialogflowAgentRef = db.collection('dialogflow').doc('agent');
return db.runTransaction(t => {
t.set(dialogflowAgentRef, {entry: databaseEntry});
return Promise.resolve('Write complete');
}).then(doc => {
agent.add(`Wrote "${databaseEntry}" to the Firestore database.`);
}).catch(err => {
console.log(`Error writing to Firestore: ${err}`);
agent.add(`Failed to write "${databaseEntry}" to the Firestore database.`);
});
}
function readFromDb (agent) {
// Get the database collection 'dialogflow' and document 'agent'
const dialogflowAgentDoc = db.collection('dialogflow').doc('agent');
// Get the value of 'entry' in the document and send it to the user
return dialogflowAgentDoc.get()
.then(doc => {
if (!doc.exists) {
agent.add('No data found in the database!');
} else {
agent.add(doc.data().entry);
}
return Promise.resolve('Read complete');
}).catch(() => {
agent.add('Error reading entry from the Firestore database.');
agent.add('Please add a entry to the database first by saying, "Write <your phrase> to the database"');
});
}
// Map from Dialogflow intent names to functions to be run when the intent is matched
let intentMap = new Map();
intentMap.set('ReadFromFirestore', readFromDb);
intentMap.set('WriteToFirestore', writeToDb);
agent.handleRequest(intentMap);
});

Url from getSignedUrl will expire in few weeks

I've got storage trigger function which resize and replace uploaded image into storage and then update URL in my database
}).then(() => {
console.log('Original file deleted', filePath)
const logo = storageRef.file(JPEGFilePath)
return logo.getSignedUrl({ action: 'read', expires: date })
// const logo = storageRef.child(JPEGFilePath)
// return logo.getDownloadURL()
// return storageUrl.getDownloadURL(JPEGFilePath)
}).then((url) => {
const newRef = db.collection("user").doc(uid)
return newRef.set({
profile: { profileImg: url[0] }
}, {
merge: true
})
})
here is how I set expiry date
const d = new Date()
const date = new Date(d.setFullYear(d.getFullYear() + 200)).toString()
However the image expire in few weeks (roughly about 2 weeks). Does anyone know how to fix that? I have even played with getDownloadURL as you can see from commented code but that doesn't seems to work in trigger
Per the following links:
https://stackoverflow.com/a/42959262/370321
https://cloud.google.com/nodejs/docs/reference/storage/2.5.x/File#getSignedPolicy
Not sure which version of #google/cloud-storage you're using, but assuming it's 2.5.x, it looks like any value you pass in the date field is passed into new Date(), so it looks like your code should work as I tried it in my dev tools. The only thing I can guess is it doesn't like that you want a file to live for 200 years.
Per the source code:
https://github.com/googleapis/nodejs-storage/blob/master/src/file.ts#L2358
Have you tried a shorter amount of time -- or formatting it in the dateform at mm-dd-yyyy ?
Ok so I have tried something but I have no idea if this will work or not so I'll come back in 2 weeks to mark my question as answered if it will work. For those with the same problem I'll try to recapitulate what I've done.
1/ Download the service account key from console. Here is the link
https://console.firebase.google.com/project/_/settings/serviceaccounts/adminsdk
2/ Save the downloaded JSON file in your function directory
3/ Include the key in your function storage. But be careful how you set the path to the file. Here is my question about it
https://stackoverflow.com/a/56407592/11486115
UPDATE
I just found mistake in my function. My URL was provided by cloud function by mistake (commented code)
Here is complete function
const {
db
} = require('../../admin')
const projectId = "YOUR-PROJECT-ID"
const { Storage } = require('#google-cloud/storage');
const storage = new Storage({ projectId: projectId ,keyFilename: 'PATH-TO-SERVICE-ACCOUNT'})
const os = require('os');
const fs = require('fs');
const path = require('path');
const spawn = require('child-process-promise').spawn
const JPEG_EXTENSION = '.jpg'
exports.handler = ((object) => {
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name
const JPEGFilePath = path.normalize(path.format({ dir: path.dirname(filePath), name: 'profileImg', ext: JPEG_EXTENSION }))
const destBucket = storage.bucket(bucket)
const tempFilePath = path.join(os.tmpdir(), path.basename(filePath))
const tempLocalJPEGFile = path.join(os.tmpdir(), path.basename(JPEGFilePath))
const metadata = {
contentType: contentType
}
const uid = filePath.split("/").slice(1, 2).join("")
const d = new Date()
const date = new Date(d.setFullYear(d.getFullYear() + 200)).toString()
if (!object.contentType.startsWith('image/')) {
return destBucket.file(filePath).delete().then(() => {
console.log('File is not an image ', filePath, ' DELETED')
return null
});
}
if (object.metadata.modified) {
console.log('Image processed')
return null
}
return destBucket.file(filePath).download({
destination: tempFilePath
})
.then(() => {
console.log('The file has been downloaded to', tempFilePath)
return spawn('convert', [tempFilePath, '-resize', '100x100', tempLocalJPEGFile])
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile)
metadata.modified = true
return destBucket.upload(tempLocalJPEGFile,
{
destination: JPEGFilePath,
metadata: { metadata: metadata }
})
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath)
return destBucket.file(filePath).delete()
}).then(() => {
console.log('Original file deleted', filePath)
//const logo = storageRef.file(JPEGFilePath)
const logo = destBucket.file(JPEGFilePath)
return logo.getSignedUrl({ action: 'read', expires: date })
}).then((url) => {
const newRef = db.collection("user").doc(uid)
return newRef.set({
profile: { profileImg: url[0] }
}, {
merge: true
})
}).then(() => {
fs.unlinkSync(tempFilePath);
fs.unlinkSync(tempLocalJPEGFile)
console.log(uid, 'user database updated ')
return null
})
})
I'm pretty confident that this will work now.

Resources