firebase document update trigger function error - node.js

I am trying to stream insert the data into the Bigquery table and have the below issues with function. Not sure where is an error in the code. I have followed this to achieve with realtime data.
https://github.com/googleapis/nodejs-bigquery/blob/main/samples/insertRowsAsStream.js
Function returned undefined, expected Promise or value
const functions = require("firebase-functions");
const {BigQuery} = require("#google-cloud/bigquery");
exports.onWriteTrigger = functions
.firestore
.document("leaseCompany/{documentId}")
.onWrite((change, context) => {
/*
onCreate: google.firestore.document.create
onUpdate: google.firestore.document.update
onDelete: google.firestore.document.delete
*/
const row = {
// insertId: document.data.id,
// json: {
timestamp: context.timestamp,
name: change.after.data().name,
// documentName: context.resource.name,
documentId: change.after.id,
eventId: context.eventId,
data: change.after.data().country,
};
// console.log(insertRows);
async function insertBigQuery(rows) {
try {
console.log(row);
const datasetName = "firestore_export";
const tableName = "leaseCompany";
const bigqueryClient = new BigQuery();
const ds = bigqueryClient.dataset(datasetName);
const tbl = ds.table(tableName);
await tbl.insert(rows)
.then((data) => {
return true;
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === "PartialFailureError") {
console.log("Error Sending Notifications", err);
return false;
}
});
} catch (err) {
console.error(`table.insert: ${JSON.stringify(err)}`);
return err;
}
}
// console.log(row);
insertBigQuery(row);
});

you need to return promise or value from you function
async function insertBigQuery(rows) {
try {
console.log(row);
const datasetName = "firestore_export";
const tableName = "leaseCompany";
const bigqueryClient = new BigQuery();
const ds = bigqueryClient.dataset(datasetName);
const tbl = ds.table(tableName);
return tbl.insert(rows)
.then((data) => {
return true;
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === "PartialFailureError") {
console.log("Error Sending Notifications", err);
return false;
}
});
} catch (err) {
console.error(`table.insert: ${JSON.stringify(err)}`);
return err;
}
}
return insertBigQuery(row);

Related

Get qldb ledger table data in javascript object

So my issue is simple yet challenging. I am trying to transfer data from one ledger to another. Now for that, I am reading a whole table of data then creating an object for every document than inserting these documents one by one into the other ledger table
Ledger#1 Table1 -> get all data -> convert all data to array of objects -> transfer to Ledger#2 Table1 one by one
The problem is that I cannot create the object from the document. I do this manually by using prototype functions and reading field type and then creating a thing that is messy and causes some data to become null. So I was wondering if there was a better way that is less prone to errors.
I asked a question of migrating ledger but had no luck in getting any response. Please help me in this.
Following is my code. Please copy and paste it inside an IDE so you can better understand
const getValueOfField = (field) => {
const name = field.getType().name;
switch (name) {
case "string":
return field.stringValue();
case "int":
return field.numberValue();
case "null":
return null;
default:
return null;
}
};
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
const resultList = result.getResultList();
let completeResults = [];
for (const doc of resultList) {
let newDoc = {};
const fields = doc.fields();
for (const field of fields) {
newDoc[field[0]] = getValueOfField(field[1]);
}
completeResults.push(newDoc);
}
return completeResults;
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();
So apparently I could've done this easily. I was just checking it and figured out the solution.
I can insert the whole fetched document and it will be same so my converted code is as follows
const { awsMainFunction: awsProd } = require("./awsProdConfig");
const { awsMainFunction: awsProduction } = require("./awsProductionConfig");
const { tableNamesAndIndeces: tableName, checkForErrors } = require("./utils");
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
return result.getResultList();
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();

Problem to use a Map in Firebase Functions

I am trying to get the length of a Map and I keep getting "undefined". Could please someone tell me what am I doing wrong?
This is the part of the code that gives me problems.
const GYMdetail: { [key: string]: number} = {};
GYMdetail[`${doc.data().name} (${doc.data().personalID})`] = 650;
const subtotal = 650 * GYMdetail.size;
This is the complete function code
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
query.get()
.then(async (allUsers) => {
allUsers.docs.forEach(async (doc) => {
if (doc != undefined) {
const houseForGym = doc.data().subscriptions.gym.house;
await admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
if (snapshot.data() == undefined) {
console.log(`${houseForGym}-${doc.data().name}: CREAR!!`);
} else if (snapshot.data()!.issued == false) {
let detail: { [key: string]: any} = {};
const GYMdetail: { [key: string]: number} = {};
detail = snapshot.data()!.detail;
GYMdetail[
`${doc.data().name} (${doc.data().personalID})`
] = 650;
const subtotal = 650 * GYMdetail.size;
detail["GYM"] = {"total": subtotal, "detail": GYMdetail};
snapshot.ref.set({"detail": detail}, {merge: true});
}
return null;
})
.catch((error) => {
console.log(
`${houseForGym} - ${doc.data().name}: ${error}`);
response.status(500).send(error);
return null;
});
}
});
response.send("i");
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});
});
Since you are executing an asynchronous call to the database in your code, you need to return a promise from the top-level code; otherwise Cloud Functions may kill the container when the final } executes and by that time the database load won't be done yet.
So:
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
return query.get()
...
Next you'll need to ensure that all the nested get() calls also get a chance to finish before the Functions container gets terminated. For that I recommend not using await for each nested get call, but a single Promise.all for all of them:
query.get()
.then(async (allUsers) => {
const promises = [];
allUsers.docs.forEach((doc) => {
const houseForGym = doc.data().subscriptions.gym.house;
promises.push(admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
...
});
});
response.send("i");
return Promise.all(promises);
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});

Trouble with promise in firebase functions

This code either runs once or a max of 100 times. I have a dummy data file with 6000 records as this is the average that it will have to handle.
Currently using the Blaze plan.
The code was working somewhat, I set up a new project and now I get this issue.
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
let patronPromise: any[];
patronPromise = [];
if (object.name === 'patrons/upload.csv') {
admin
.storage()
.bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin
.firestore()
.collection('patrons')
.add({ ...x })
.then(doc => {
console.log(doc);
})
.catch(err => {
console.log(err);
});
patronPromise.push(newPromise);
});
}
});
})
.catch(err => {
console.log(err);
});
}
return Promise.all(patronPromise)
.catch(err => {
console.log(err);
});
});
All it has to do is read the file from the storage, parse it and add each record to the firebase collection
Function returned undefined, expected Promise or value
This is the error I get in the logs
Because of your first promise may be shutdown even it not finish. So try to follow the rule promise/always-return
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
if (object.name === 'patrons/upload.csv') {
return admin.storage().bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
let patronPromise: any[];
patronPromise = [];
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin.firestore()
.collection('patrons')
.add({
...x
})
patronPromise.push(newPromise);
});
}
});
return Promise.all(patronPromise)
})
.then(result=>{
//return Promise.resolve or something
})
.catch(err=>{
console.log(err)
})
}
else{
//also return if it's nothing
}
});
You're ignoring the promise that admin.storage().bucket().file('/patrons/upload.csv').download({}) returns, which means that the function may get aborted.
I think it should be closer to this:
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
let patronPromise: any[];
patronPromise = [];
if (object.name === 'patrons/upload.csv') {
return admin.storage().bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin.firestore()
.collection('patrons')
.add({
...x
})
patronPromise.push(newPromise);
});
// TODO: return the Promise.all(patronPromise) here
}
});
})
}
});

Getting infinite loop in firebase cloud function

I am using firestore to store the data in firebase. To get the count i am using cloud function. When i try to add / update / delete an entry in one collection it starts the infinite loop with another collection.
Example:
I am having a user table and agent table when i add/update/delete a user it should get updated in the agent table.
Though i have used separate functions for users and agent still i am getting an infinite loop.can anyone tell me how to resolve it
Query to update the user in user and agent table:
export const addUser = (values) =>
db
.collection('users')
.add(values)
.then((docRef) => {
let customer = { customer: {} };
customer.customer[docRef.id] = {
id: docRef.id,
name: values.name,
commission: values.agent.commission
};
let agentId = values.agent.id;
db.collection('agents')
.doc(agentId)
.set(customer, { merge: true });
});
Cloud function for user:
const functions = require("firebase-functions");
const admin = require("firebase-admin");
exports = module.exports = functions.firestore
.document("users/{userUid}")
.onWrite(
(change, context) =>
new Promise((resolve, reject) => {
let dashboardId;
getDashboardId();
})
);
getDashboardId = () => {
admin.firestore().collection('dashboard').get().then((snapshot) => {
if (snapshot.size < 1) {
dashboardId = admin.firestore().collection('dashboard').doc().id;
} else {
snapshot.docs.forEach((doc) => {
dashboardId = doc.id;
});
}
return updateUser(dashboardId);
}).catch((error) => {
console.log('error is', error);
});
}
updateUser = (id) => {
admin.firestore().collection('users').where('isDeleted', '==', false).get().then((snap) => {
let usersData = {users: snap.size};
return admin.firestore().collection('dashboard').doc(id).set(usersData, {merge: true});
}).catch((error) => {
console.log('error is', error);
});
}
Cloud function for agent:
const functions = require("firebase-functions");
const admin = require("firebase-admin");
exports = module.exports = functions.firestore
.document("agents/{agentUid}")
.onWrite(
(change, context) =>
new Promise((resolve, reject) => {
let dashboardId;
getDashboardId();
})
);
getDashboardId = () => {
admin.firestore().collection('dashboard').get().then((snapshot) => {
if (snapshot.size < 1) {
dashboardId = admin.firestore().collection('dashboard').doc().id;
} else {
snapshot.docs.forEach((doc) => {
dashboardId = doc.id;
});
}
return updateAgent(dashboardId);
}).catch((error) => {
console.log('error is', error);
});
}
updateAgent = (id) => {
admin.firestore().collection('agents').where('isDeleted', '==', false).get().then((snap) => {
let agentsData = {agents: snap.size};
return admin.firestore().collection('dashboard').doc(id).set(agentsData, {merge: true});
}).catch((error) => {
console.log('error is', error);
});
}

Bulk update to Postgres with node js performance issue

I'm facing performance issue while trying to do bulk update in PostgresDB. It's taking more than 180 seconds to update around 23000 records. PFB the code. I'm using pg-promise library. Is there anything I could do to improve the performance?
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
export async function getTransactionDetails(): Promise<any> {
return new Promise<any>(async function (resolve, reject) {
try {
let db = pgp(postgresDBConfig);
db.connect();
let query = "SELECT * FROM table_name";
db.any(query)
.then(data => {
console.log("Executed successfully::");
resolve(data);
})
.catch(error => {
console.log('ERROR:', error);
})
} catch (error) {
log.error("Error::" + error);
throw error;
}
});
}
export async function updateStatus(result: any, status: string) {
try {
let db = pgp(postgresDBConfig);
//db.connect();
let updateData = [];
_.forEach(result, function (row) {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log("updateData::" + updateData.length);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
let columnset = new pgp.helpers.ColumnSet(['?sessionid', 'status'], { table: tableName });
let update = pgp.helpers.update(updateData, columnset);
db.none(update).then(() => {
console.log("Updated successfully");
})
.catch(error => {
console.log("Error updating the status" + error);
});
}
catch (error) {
log.error("Error in function updateStatus::" + error);
throw error;
}
}
The code exhibits problems all over the place
You should initialize the database object only once
You should not use db.connect() at all, which you also use incorrectly for the async code
You again incorrectly use async block, skipping await, so it doesn't execute correctly.
You do not append any UPDATE logic clause, so it is updating everything all over again, unconditionally, which may be resulting in a delayed mess that you're in.
Here's an improved example, though it may need some more work from your side...
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
const db = pgp(postgresDBConfig);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
const columnSet = new pgp.helpers.ColumnSet(['?sessionid', 'status'], {table: tableName});
export async function getTransactionDetails(): Promise<any> {
try {
const res = await db.any('SELECT * FROM table_name');
console.log('Executed successfully::');
return res;
} catch (error) {
console.log('ERROR:', error);
throw error;
}
}
export async function updateStatus(result: any, status: string) {
try {
let updateData = [];
_.forEach(result, row => {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log('updateData::', updateData.length);
const update = pgp.helpers.update(updateData, columnSet) +
' WHERE v.sessionid = t.sessionid';
await db.none(update);
console.log('Updated successfully');
}
catch (error) {
console.log('Error in function updateStatus:', error);
throw error;
}
}

Resources