Firebase cloud-function multiple executions - node.js

I have a firebase (Google) cloud-function as follows
// Initialize the Auth0 client
var AuthenticationClient = require('auth0').AuthenticationClient;
var auth0 = new AuthenticationClient({
domain: 'familybank.auth0.com',
clientID: 'REDACTED'
});
function getAccountBalance(app) {
console.log('accessToken: ' + app.getUser().accessToken);
auth0.getProfile(app.getUser().accessToken, function (err, userInfo) {
if (err) {
console.error('Error getting userProfile from Auth0: ' + err);
}
console.log('getAccountBalance userInfo:' + userInfo)
let accountowner = app.getArgument(PARAM_ACCOUNT_OWNER);
// query firestore based on user
var transactions = db.collection('bank').doc(userInfo.email)
.db.collection('accounts').doc(accountowner)
.collection('transactions');
var accountbalance = transactions.get()
.then( snapshot => {
var workingbalance = 0
snapshot.forEach(doc => {
workingbalance = workingbalance + doc.data().amount;
});
app.tell(accountowner + " has a balance of $" + workingbalance)
})
.catch(err => {
console.log('Error getting transactions', err);
app.tell('I was unable to retrieve your balance at this time.')
});
});
}
actionMap.set(INTENT_ACCOUNT_BALANCE, getAccountBalance);
app.handleRequest(actionMap);
When this executes, I see the following logs
Notice that parts of the function are being executed multiple times, and the second execution is failing. If I close out the auth0.getProfile call after logging userInfo, then the function works, but obviously doesn't have userInfo.
Any idea why parts of this function are executing multiple times and why some calls would fail?

The userInfo is undefined at point (2) because there has been an error (reported on the line right beneath it, which was the previous logged message). Your error block does not leave the function, so it continues to run with an invalid userInfo object.
But that doesn't explain why the callback is getting called twice - once with a valid userInfo and once with an err. The documentation (although not the example) for AuthenticationClient.getProfile() indicates that it returns a Promise (or undefined - although it doesn't say why it might return undefined), so I am wondering if this ends up calling the callback twice.
Since it returns a promise, you can omit the callback function and just handle it with something like this:
function getAccountBalance(app) {
let accountowner = app.getArgument(PARAM_ACCOUNT_OWNER);
console.log('accessToken: ' + app.getUser().accessToken);
var accessToken = app.getUser().accessToken;
auth0.getProfile( accessToken )
.then( userInfo => {
console.log('getAccountBalance userInfo:' + userInfo)
// query firestore based on user
var transactions = db.collection('bank').doc(userInfo.email)
.db.collection('accounts').doc(accountowner)
.collection('transactions');
return transactions.get();
})
.then( snapshot => {
var workingbalance = 0
snapshot.forEach(doc => {
workingbalance = workingbalance + doc.data().amount;
});
app.tell(accountowner + " has a balance of $" + workingbalance)
})
.catch( err => {
console.error('Error:', err );
app.tell('I was unable to retrieve your balance at this time.')
})
});
}

Related

Query was already executed, MoongoseError

I am making a transaction controller in NodeJS but when I send data through postman I get this error:
MongooseError: Query was already executed: Customer.updateOne({ name: 'Axel' }, { '$set': { balance: 98...
at model.Query._wrappedThunk [as _updateOne] (C:\Users\m4afy\Desktop\the spark foundation\Banking system\node_modules\mongoose\lib\helpers\query\wrapThunk.js:23:19)
at C:\Users\m4afy\Desktop\the spark foundation\Banking system\node_modules\kareem\index.js:494:25
at process.processTicksAndRejections (node:internal/process/task_queues:77:11) {
originalStack: 'Error\n' +
' at model.Query._wrappedThunk [as _updateOne] (C:\\Users\\m4afy\\Desktop\\the spark foundation\\Banking system\\node_modules\\mongoose\\lib\\helpers\\query\\wrapThunk.js:27:28)\n' +
' at C:\\Users\\m4afy\\Desktop\\the spark foundation\\Banking system\\node_modules\\kareem\\index.js:494:25\n' +
' at process.processTicksAndRejections (node:internal/process/task_queues:77:11)'
}
my Transaction code goes as follow:
const Transaction = require('../models/transaction')
const Customer = require('../models/customers')
const cashTransaction = async (req, res, next) => {
const {from, to, amount} = req.body
try {
let sender = await Customer.findOne({'name' : `${from}`})
let senderBalance = Number(sender.balance) - Number(amount)
await Customer.updateOne({name : from}, {balance : senderBalance}, err =>{
if (err){
console.log(err)
res.status(500).send('Could not update sender information')
} else {
console.log('Sender information updated');
}
})
let receiver = await Customer.findOne({name : to})
let receiverBalance = Number(receiver.balance) + Number(amount)
await Customer.updateOne({name : to}, {balance : receiverBalance}, err =>{
if (err){
console.log(err);
res.status(500).send('Could not update receive ver information')
} else{
console.log('receiver information updated');
}
})
const transaction = new Transaction({
from,
to,
amount
})
await transaction.save()
res.status(200).json({transaction : {transaction} , message : 'transaction saved'})
} catch (error) {
console.log(error);
res.status(500).send('An Error occured');
}
}
how can I update it multiple times?
It worked one time but then am getting this error, any help?
Using await and a callback simultaneously will result in the query executing twice.
The Model.updateOne method returns a query object. Passing a callback function causes the query to be immediately executed and then the callback is called. Await will likewise cause the query to be executed, and will return the result.
When you use both at the same time, both try to execute the query, but a specific instance of a query can only be executed once, hence the error.
You might try using await inside of a try/catch instead of a callback.
Each call to updateOne instantiates a new query object, so you should be able to do both updates

Wait for API to return its response before executing the next iteration in nodejs

I am new to nodejs.
I have an array of string that consists of around 30000+ values, which has the below format
tickerArray = ["A","AA","AAA", ..........., "C"]
I want to loop through these and need to sent each value to an external polygon.io API. But the Polygo.io free plan only allows 5 API Call per minute. Below is my code.
await tickerArray.reduce((key:any, tickerSymbol:any) =>
key.then(async () => await stockTickeDetails.runTask(tickerSymbol)),
starterPromise
);
}).catch(function (error: any) {
console.log("Error:" + error);
});
My runTask function is below :
public runTask(tickerSymbol:any) {
return axios.get('https://api.polygon.io/v1/meta/symbols/' + tickerSymbol + '/company?apiKey=' + process.env.API_KEY).then(
function (response: any) {
console.log("Logo : " + response.data.logo + 'TICKER :' + tickerSymbol);
let logo = response.data.logo;
let updateLogo = stockTickers.updateOne({ ticker: tickerSymbol }, { $set: { "logo": logo } })
}).catch(function (error: any) {
console.log("Error from symbol service file : " + error);
});
}
Here what I need is, if I pass the 0th index value ie, "A" to runTask method, it should process the API and should return the result and from the result I need to update the database collection accordingly.
Then should go back to the 1code and need to fetch the 1st index value ie "AA" and repeat the process.
Here after executing 2 APIs I am getting the following error
Request failed with status code 429. You've exceeded the maximum requests per minute.
I guess this is because it is not waiting till the request to get processed with each value. How can I resolve it by adding a set time out which delays 5 API call per minute.
You can easily achieve this using Promise pattern, here is your solution:
var tickerArray = ["A","AA","AAA", ..........., "C"]
let requests = tickerArray.map(tickerSymbol => {
//generate a promise for each API call
return new Promise((resolve, reject) => {
request({
uri: https://api.polygon.io/v1/meta/symbols/' + tickerSymbol + '/company?apiKey=' + process.env.API_KEY,
method: 'GET'
},
(err, res, body) => {
if (err) { reject(err) }
//call to resolve method which is passed to the caller
//method passed to the promise
resolve( { response : body, request: tickerSymbol })
})
})
})
Promise.all(requests).then((objArray) => {
//this is expected when all the promises have been resolved/rejected.
objArray.forEach(obj => {
if (obj) {
console.log("Logo : " + obj.response.data.logo + 'TICKER :' + obj.request);
let logo = obj.response.data.logo;
let updateLogo = stockTickers.updateOne({ ticker: obj.request }, { $set: { "logo": logo } })
}
})
}).catch(error => console.log("Error from symbol service file : " + error);)

Mongoose does not save my data, error message: Cannot read property 'then' of undefined

What I am trying to do here is retrieve some data from an API and save it in my MongoDB database, here is my code:
const request = require("request");
const Vege = require("./models/Vege");
request(
"https://data.taipei/api/v1/dataset/f4f80730-df59-44f9-bfb8-32c136b1ae68?scope=resourceAquire&limit=1",
{ json: true },
(err, res, body) => {
if (err) return console.log(err);
let data = body.result.results; // an array of objects
data.forEach(vege => {
let trimmed = JSON.parse(
JSON.stringify(vege).replace(/"\s+|\s+"/g, '"')
); // just removing spaces
const newVege = new Vege({
品名: trimmed.品名,
市場: trimmed.市場,
"平均(元 / 公斤)": trimmed["平均(元 / 公斤)"],
種類: trimmed.種類,
日期: trimmed.日期,
});
console.log(newVege); // I do see the object I want to save in the console
newVege.save(err => console.log(err)) // "Cannot read property 'then' of undefined"
.then(() => console.log("Saved successfully"));
})
}
);
When I do .save() to my newVege object and then check mongodb atlas collection it's not there, and the error message says "Cannot read property 'then' of undefined", which gets me confused because when I console.log the newVege it's exactly what I wanted. Not sure how it's undefined and why it's not saved. I appreciate any help!
----Update----
I kind of fixed the issue of data not saving by moving the above code to a "/" route, now the code looks like this
app.get("/", (req, res) => {
request(
"https://data.taipei/api/v1/dataset/f4f80730-df59-44f9-bfb8-32c136b1ae68?scope=resourceAquire&limit=1",
{ json: true },
(err, res, body) => {
if (err) return console.log(err);
let data = body.result.results;
for (const vege of data) {
let trimmed = JSON.parse(
JSON.stringify(vege).replace(/"\s+|\s+"/g, '"')
);
const newVege = new Vege({
品名: trimmed.品名,
市場: trimmed.市場,
"平均(元 / 公斤)": trimmed["平均(元 / 公斤)"],
種類: trimmed.種類,
日期: trimmed.日期,
});
newVege
.save()
.then((vege) => console.log(vege))
.catch((err) => res.status(400).json(err));
};
}
);
});
But then I have a new issue which is when I test it with Postman, the request won't stop (keep saying sending requests and keep loading). If I click cancel and check my database, I do see the data saved. Any idea why it keeps sending requests?
Foreach is not to be used with asynchronous function like save
try using for of
Maybe like this
for(const vege of data){
let trimmed = JSON.parse(
JSON.stringify(vege).replace(/"\s+|\s+"/g, '"')
); // just removing spaces
const newVege = new Vege({
品名: trimmed.品名,
市場: trimmed.市場,
"平均(元 / 公斤)": trimmed["平均(元 / 公斤)"],
種類: trimmed.種類,
日期: trimmed.日期,
});
newVege.save()
.then(result =>{
//result should contain the saved vege
console.log("Saved successfully")
})
}

MongoDB change stream timeouts if database is down for some time

I am using mongoDB change stream in nodejs, everything works fine but if database is down has taken more than 10 5 seconds to get up change stream throws timeout error, here is my change stream watcher code
Service.prototype.watcher = function( db ){
let collection = db.collection('tokens');
let changeStream = collection.watch({ fullDocument: 'updateLookup' });
let resumeToken, newChangeStream;
changeStream.on('change', next => {
resumeToken = next._id;
console.log('data is ', JSON.stringify(next))
changeStream.close();
// console.log('resumeToken is ', JSON.stringify(resumeToken))
newChangeStream = collection.watch({ resumeAfter : resumeToken });
newChangeStream.on('change', next => {
console.log('insert called ', JSON.stringify( next ))
});
});
however on database end i have handled it, i.e if database is down or reconnected by using this code
this.db.on('reconnected', function () {
console.info('MongoDB reconnected!');
});
this.db.on('disconnected', function() {
console.warn('MongoDB disconnected!');
});
but i am not able to handle change stream watcher to stop it when database is down and start it again when database is reconnected or if there is any other better way to do it ?
What you want to do is to encapsulate the watch() call in a function. This function will then call itself on error, to rewatch the collection using a previously saved resume token. What is missing from the code you have is the error handler. For example:
const MongoClient = require('mongodb').MongoClient
const uri = 'mongodb://localhost:27017/test?replicaSet=replset'
var resume_token = null
run()
function watch_collection(con, db, coll) {
console.log(new Date() + ' watching: ' + coll)
con.db(db).collection(coll).watch({resumeAfter: resume_token})
.on('change', data => {
console.log(data)
resume_token = data._id
})
.on('error', err => {
console.log(new Date() + ' error: ' + err)
watch_collection(con, coll)
})
}
async function run() {
con = await MongoClient.connect(uri, {"useNewUrlParser": true})
watch_collection(con, 'test', 'test')
}
Note that watch_collection() contains the watch() method along with its handler. On change, it will print the change and store the resume token. On error, it will call itself to rewatch the collection again.
This is the solution i developed, just add the stream.on(error) function so it will not crash when there is error, as restart the stream on database reconnect, also save resume token in file for every event, this is helpful when application is crashed or stopped and you run again and during that time if x number of records were added, so on application restart just get last resume token from file and start watcher from there it will get all records inserted after that and hence no record will be missed, here is code below
var rsToken ;
try {
rsToken = await this.getResumetoken()
} catch (error) {
rsToken = null ;
}
if (!rsToken)
changeStream = collection.watch({ fullDocument: 'updateLookup' });
else
changeStream = collection.watch({ fullDocument: 'updateLookup', resumeAfter : rsToken });
changeStream.on('change', next => {
resumeToken = next._id;
THIS.saveTokenInfile(resumeToken)
cs_processor.process( next )
});
changeStream.on('error', err => {
console.log('changestream error ')
})

How To Bind Node-js DB Query to Web Form

I'm using node and postgres, I'm new to writing async function, what I'm trying to do is a very simple query that will do a total count of records in the database, add one to it and return the result. The result will be visible before the DOM is generated. I don't know how to do this, since async function doesn't return value to callers (also probably I still have the synchronous mindset). Here's the function:
function generateRTA(callback){
var current_year = new Date().getFullYear();
const qry = `SELECT COUNT(date_part('year', updated_on))
FROM recruitment_process
WHERE date_part('year', updated_on) = $1;`
const value = [current_year]
pool.query(qry, value, (err, res) => {
if (err) {
console.log(err.stack)
} else {
var count = parseInt(res.rows[0].count) + 1
var rta_no = String(current_year) + '-' + count
callback(null, rta_no)
}
})
}
For the front-end I'm using pug with simple HTML form.
const rta_no = generateRTA(function (err, res){
if(err){
console.log(err)
}
else{
console.log(res)
}
})
app.get('/new_application', function(req, res){
res.render('new_application', {rta_number: rta_no})
});
I can see the rta_no in console.log but how do I pass it back to the DOM when the value is ready?
Based on the ajax call async response, it will update the div id "div1" when it gets the response from the Node js .
app.js
app.get("/webform", (req, res) => {
res.render("webform", {
title: "Render Web Form"
});
});
app.get("/new_application", (req, res) => {
// Connect to database.
var connection = getMySQLConnection();
connection.connect();
// Do the query to get data.
connection.query('SELECT count(1) as cnt FROM test ', function(err, rows, fields) {
var person;
if (err) {
res.status(500).json({"status_code": 500,"status_message": "internal server error"});
} else {
// Check if the result is found or not
if(rows.length==1) {
res.status(200).json({"count": rows[0].cnt});
} else {
// render not found page
res.status(404).json({"status_code":404, "status_message": "Not found"});
}
}
});
// Close connection
connection.end();
});
webform.pug - Via asynchronous call
html
head
script(src='https://ajax.googleapis.com/ajax/libs/jquery/3.1.1/jquery.min.js')
script.
$(document).ready(function(){
$.ajax({url: "/new_application", success: function(result){
$("#div1").html(result.count);
}});
});
body
div
Total count goes here :
#div1
value loading ...
That seems okay, I'm just not sure of this:
The result will be visible before the DOM is generated
This constraint defeats the purpose of async, as your DOM should wait for the returned value to be there. Instead of waiting for it you could just render the page and once the function returns and runs your callback update the value.
Also, perhaps it's worth having a look into promises

Resources