firebase functions showing error cannot read property previous of undefined - node.js

I had implemented firebase functions in my app and previously it was working fine but now it is showing error Cannot read property 'previous' of undefined
Error Logs of function
TypeError: Cannot read property 'previous' of undefined
at exports.LoveNotification.functions.database.ref.onWrite (/user_code/index.js:223:16)
at cloudFunctionNewSignature (/user_code/node_modules/firebase-functions/lib/cloud-functions.js:109:23)
at cloudFunction (/user_code/node_modules/firebase-functions/lib/cloud-functions.js:139:20)
at /var/tmp/worker/worker.js:730:24
at process._tickDomainCallback (internal/process/next_tick.js:135:7)

The signature of Cloud Functions triggers has changed. You seem to be using beta, but are deploying to the latest version. See the migration guide for complete instructions.
From there:
Before (<= v0.9.1)
exports.dbWrite = functions.database.ref('/path').onWrite((event) => {
const beforeData = event.data.previous.val(); // data before the write
const afterData = event.data.val(); // data after the write
});
Now (>= v1.0.0)
exports.dbWrite = functions.database.ref('/path').onWrite((change, context) => {
const beforeData = change.before.val(); // data before the write
const afterData = change.after.val(); // data after the write
});
So your code should look something like this:
exports.LoveNotification = functions.database.ref("/Member/{pushId}").onWrite((change, context) => {
if (change.before.exists()) {
return;
} else {
var eventLove = change.after.data.val();
var author =eventLove.fullname;
var title = eventLove.course;
var key = eventLove.key;
const payload = {
"data": {
"post_id": key
},
notification: {
title: author +'Joined the app',
body: `Course `+title,
sound: "default",
icon: "ic_launcher",
}
};
const options = {
priority: "high",
timeToLive: 60 * 60 * 24 //24 hours
};
console.log('Sending notifications');
return admin.messaging().sendToTopic("Member", payload, options);
}
});

Related

AWS Timestream - SDK V3 Nodejs, TimestreamWriteClient.send() - TypeError: command.resolveMiddleware is not a function. How to solve this?

I have the following lambda function in NodeJs 14.x using AWS SDK V3 for a timestream insertion process:
'use strict'
// https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-timestream-write/index.html
const { TimestreamWriteClient } = require("#aws-sdk/client-timestream-write")
const client = new TimestreamWriteClient({ region: process.env.region })
module.exports.fnPostElectricityTimestream = async event => {
try {
console.log('🚀 START fnPostElectricityTimestream')
const jsonBody = event
const topic = jsonBody.topic
const arrTopic = topic.split('/')
let dbName = arrTopic[4]
dbName = 'smaj56g' //Test
const currentTime = Date.now().toString() // Unix time in milliseconds get jsonBody.e_timestamp
const e_timestamp = (jsonBody.e_timestamp)*1000
const dimensions = [{
'Name': 'n',
'Value': 'v'
}]
const e_ch_1 = {
'Dimensions':dimensions,
'MeasureName': 'e_ch_1',
'MeasureValue': '[1,2,3]',
'MeasureValueType': 'VARCHAR',
'Time': currentTime
}
const records = [e_ch_1]
const params = {
DatabaseName: dbName,
TableName:'e_ch_1_v_w',
Records: records
}
const data = await client.send(params);
console.log('data', data)
return {
message: ''
}
} catch (error) {
console.log('🚀 fnPostElectricityTimestream - error.stack:', error.stack)
return {
message: error.stack
}
}
}
When I run the lambda this is the message I am getting:
2022-08-12T14:58:39.496Z e578a391-06b4-48a9-9f9d-9440a373c19e INFO 🚀 fnPostElectricityTimestream - error.stack: TypeError: command.resolveMiddleware is not a function
at TimestreamWriteClient.send (/var/task/node_modules/#aws-sdk/smithy-client/dist-cjs/client.js:13:33)
at Runtime.module.exports.fnPostElectricityTimestream [as handler] (/var/task/src/ElectricityTimestream/fnPostElectricityTimestream.js:38:31)
at Runtime.handleOnceNonStreaming (/var/runtime/Runtime.js:73:25)
There is something with const data = await client.send(params).
I am following the asyncawait code in this documentation.
How to solve this issue?
Your current insertion code is wrong. In order to write the records in the TimeStream, you need to use the WriteRecordsCommand command. Refer to the doc for a better understanding. Sample code:
import { TimestreamWriteClient, WriteRecordsCommand } from "#aws-sdk/client-timestream-write";
const client = new TimestreamWriteClient({ region: "REGION" }); //your AWS region
const params = {
DatabaseName: dbName, //your database
TableName: tableName, //your table name
Records: records //records you want to insert
}
const command = new WriteRecordsCommand(params);
const data = await client.send(command);
you need to create a command before calling send.
For example:
import { TimestreamWriteClient, CreateDatabaseCommand } from "#aws-sdk/client-timestream-write";
const params = {
DatabaseName: dbName,
TableName:'e_ch_1_v_w',
Records: records
}
const command = new CreateDatabaseCommand(params);
const data = await client.send(command);

Unable to stub an exported function with Sinon

I need to test the following createFacebookAdVideoFromUrl() that consumes a retryAsyncCall that I'd like to stub with Sinon :
async function createFacebookAdVideoFromUrl(accountId, videoUrl, title, facebookToken = FACEBOOK_TOKEN, options = null, businessId = null) {
const method = 'POST';
const url = `${FACEBOOK_URL}${adsSdk.FacebookAdsApi.VERSION}/${accountId}/advideos`;
const formData = {
access_token: businessId ? getFacebookConfig(businessId).token : facebookToken,
title,
name: title,
file_url: videoUrl,
};
const callback = () => requestPromise({ method, url, formData });
const name = 'createFacebookAdVideoFromUrl';
const retryCallParameters = buildRetryCallParameters(name, options);
const adVideo = await retryAsyncCall(callback, retryCallParameters);
logger.info('ADVIDEO', adVideo);
return { id: JSON.parse(adVideo).id, title };
}
This retryAsyncCall function is exported as such:
module.exports.retryAsyncCall = async (callback, retryCallParameters, noRetryFor = [], customRetryCondition = null) => {
// Implementation details ...
}
Here is how I wrote my test so far:
it.only("should create the video calling business's Facebook ids", async () => {
const payload = createPayloadDataBuilder({
businessId: faker.internet.url(),
});
const retryAsyncCallStub = sinon.stub(retryAsyncCallModule, 'retryAsyncCall').resolves('random');
const createdFacebookAd = await FacebookGateway.createFacebookAdVideoFromUrl(
payload.accountId,
payload.videoUrl,
payload.title,
payload.facebookToken,
payload.options,
payload.businessId,
);
assert.strictEqual(retryAsyncCallStub.calledOnce, true);
assert.strictEqual(createdFacebookAd, { id: 'asdf', title: 'asdf' });
});
I don't expect it to work straightaway as I am working in TDD fashion, but I do expect the retryAsyncCall to be stubbed out. Yet, I am still having this TypeError: Cannot read property 'inc' of undefined error from mocha, which refers to an inner function of retryAsyncCall.
How can I make sinon stubbing work?
I fixed it by changing the way to import in my SUT :
// from
const { retryAsyncCall } = require('../../../helpers/retry-async');
// to
const retry = require('../../../helpers/retry-async');
and in my test file :
// from
import * as retryAsyncCallModule from '../../../src/common/helpers/retry-async';
// to
import retryAsyncCallModule from '../../../src/common/helpers/retry-async';
The use of destructuring seemed to make a copy instead of using the same reference, thus, the stub was not applied on the right reference.

Firebase realtime database get wildcard data

I'm trying to send a notification to users whenever their message receives a new reply. However, in the firebase cloud functions logs it is returning errors and not sending a notification. Here is the error:
TypeError: Cannot read properties of undefined (reading 'uid')
Here is my function:
const functions = require("firebase-functions");
const admin = require("firebase-admin");
exports
.sendNewTripNotification = functions
.database
.ref("messagepool/{uid}/responses/")
.onWrite((event)=>{
const messageid = event.params.uid;
// console.log('User to send notification', uuid);
const ref = admin.database().ref(`messagepool/${messageid}/author`);
return ref.once("value", function(snapshot) {
const ref2 = admin.database().ref(`users/${snapshot.val()}/token`);
return ref2.once("value", function(snapshot2) {
const payload = {
notification: {
title: "💌 New Reply",
body: "You have received a new reply to your message!",
},
};
admin.messaging().sendToDevice(snapshot2.val(), payload);
}, function(errorObject) {
console.log("The read failed: " + errorObject.code);
});
}, function(errorObject) {
console.log("The read failed: " + errorObject.code);
});
});
Am I reading the wildcard uid incorrectly? Why is this happening?
The function in onWrite() takes 2 parameters - change that is a DataSnapshot and context which contains the params you are looking for. Try refactoring the code as shown below:
exports
.sendNewTripNotification = functions
.database
.ref("messagepool/{uid}/responses/")
.onWrite((change, context) => {
const { uid } = context.params;
console.log('UID:', uid);
})

How to get an docx file generated in node saved to firebase storage

Hi I am quite new to docxtemplater but I absolutely love how it works. Right now I seem to be able to generate a new docx document as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {Storage} = require('#google-cloud/storage');
var PizZip = require('pizzip');
var Docxtemplater = require('docxtemplater');
admin.initializeApp();
const BUCKET = 'gs://myapp.appspot.com';
exports.test2 = functions.https.onCall((data, context) => {
// The error object contains additional information when logged with JSON.stringify (it contains a properties object containing all suberrors).
function replaceErrors(key, value) {
if (value instanceof Error) {
return Object.getOwnPropertyNames(value).reduce(function(error, key) {
error[key] = value[key];
return error;
}, {});
}
return value;
}
function errorHandler(error) {
console.log(JSON.stringify({error: error}, replaceErrors));
if (error.properties && error.properties.errors instanceof Array) {
const errorMessages = error.properties.errors.map(function (error) {
return error.properties.explanation;
}).join("\n");
console.log('errorMessages', errorMessages);
// errorMessages is a humanly readable message looking like this :
// 'The tag beginning with "foobar" is unopened'
}
throw error;
}
let file_name = 'example.docx';// this is the file saved in my firebase storage
const File = storage.bucket(BUCKET).file(file_name);
const read = File.createReadStream();
var buffers = [];
readable.on('data', (buffer) => {
buffers.push(buffer);
});
readable.on('end', () => {
var buffer = Buffer.concat(buffers);
var zip = new PizZip(buffer);
var doc;
try {
doc = new Docxtemplater(zip);
doc.setData({
first_name: 'Fred',
last_name: 'Flinstone',
phone: '0652455478',
description: 'Web app'
});
try {
doc.render();
doc.pipe(remoteFile2.createReadStream());
}
catch (error) {
errorHandler(error);
}
} catch(error) {
errorHandler(error);
}
});
});
My issue is that i keep getting an error that doc.pipe is not a function. I am quite new to nodejs but is there a way to have the newly generated doc after doc.render() to be saved directly to the firebase storage?
Taking a look at the type of doc, we find that is a Docxtemplater object and find that doc.pipe is not a function of that class. To get the file out of Docxtemplater, we need to use doc.getZip() to return the file (this will be either a JSZip v2 or Pizzip instance based on what we passed to the constructor). Now that we have the zip's object, we need to generate the binary data of the zip - which is done using generate({ type: 'nodebuffer' }) (to get a Node.JS Buffer containing the data). Unfortunately because the docxtemplater library doesn't support JSZip v3+, we can't make use of the generateNodeStream() method to get a stream to use with pipe().
With this buffer, we can either reupload it to Cloud Storage or send it back to the client that is calling the function.
The first option is relatively simple to implement:
import { v4 as uuidv4 } from 'uuid';
/* ... */
const contentBuffer = doc.getZip()
.generate({type: 'nodebuffer'});
const targetName = "compiled.docx";
const targetStorageRef = admin.storage().bucket()
.file(targetName);
await targetStorageRef.save(contentBuffer);
// send back the bucket-name pair to the caller
return { bucket: targetBucket, name: targetName };
However, to send back the file itself to the client isn't as easy because this involves switching to using a HTTP Event Function (functions.https.onRequest) because a Callable Cloud Function can only return JSON-compatible data. Here we have a middleware function that takes a callable's handler function but supports returning binary data to the client.
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import corsInit from "cors";
admin.initializeApp();
const cors = corsInit({ origin: true }); // TODO: Tighten
function callableRequest(handler) {
if (!handler) {
throw new TypeError("handler is required");
}
return (req, res) => {
cors(req, res, (corsErr) => {
if (corsErr) {
console.error("Request rejected by CORS", corsErr);
res.status(412).json({ error: "cors", message: "origin rejected" });
return;
}
// for validateFirebaseIdToken, see https://github.com/firebase/functions-samples/blob/main/authorized-https-endpoint/functions/index.js
validateFirebaseIdToken(req, res, () => { // validateFirebaseIdToken won't pass errors to `next()`
try {
const data = req.body;
const context = {
auth: req.user ? { token: req.user, uid: req.user.uid } : null,
instanceIdToken: req.get("Firebase-Instance-ID-Token"); // this is used with FCM
rawRequest: req
};
let result: any = await handler(data, context);
if (result && typeof result === "object" && "buffer" in result) {
res.writeHead(200, [
["Content-Type", res.contentType],
["Content-Disposition", "attachment; filename=" + res.filename]
]);
res.end(result.buffer);
} else {
result = functions.https.encode(result);
res.status(200).send({ result });
}
} catch (err) {
if (!(err instanceof HttpsError)) {
// This doesn't count as an 'explicit' error.
console.error("Unhandled error", err);
err = new HttpsError("internal", "INTERNAL");
}
const { status } = err.httpErrorCode;
const body = { error: err.toJSON() };
res.status(status).send(body);
}
});
});
};
})
functions.https.onRequest(callableRequest(async (data, context) => {
/* ... */
const contentBuffer = doc.getZip()
.generate({type: "nodebuffer"});
const targetName = "compiled.docx";
return {
buffer: contentBuffer,
contentType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
filename: targetName
}
}));
In your current code, there are a number of odd segments where you have nested try-catch blocks and variables in different scopes. To help combat this, we can make use of File#download() that returns a Promise that resolves with the file contents in a Node.JS Buffer and File#save() that returns a Promise that resolves when the given Buffer is uploaded.
Rolling this together for reuploading to Cloud Storage gives:
// This code is based off the examples provided for docxtemplater
// Copyright (c) Edgar HIPP [Dual License: MIT/GPLv3]
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import PizZip from "pizzip";
import Docxtemplater from "docxtemplater";
admin.initializeApp();
// The error object contains additional information when logged with JSON.stringify (it contains a properties object containing all suberrors).
function replaceErrors(key, value) {
if (value instanceof Error) {
return Object.getOwnPropertyNames(value).reduce(
function (error, key) {
error[key] = value[key];
return error;
},
{}
);
}
return value;
}
function errorHandler(error) {
console.log(JSON.stringify({ error: error }, replaceErrors));
if (error.properties && error.properties.errors instanceof Array) {
const errorMessages = error.properties.errors
.map(function (error) {
return error.properties.explanation;
})
.join("\n");
console.log("errorMessages", errorMessages);
// errorMessages is a humanly readable message looking like this :
// 'The tag beginning with "foobar" is unopened'
}
throw error;
}
exports.test2 = functions.https.onCall(async (data, context) => {
const file_name = "example.docx"; // this is the file saved in my firebase storage
const templateRef = await admin.storage().bucket()
.file(file_name);
const template_content = (await templateRef.download())[0];
const zip = new PizZip(template_content);
let doc;
try {
doc = new Docxtemplater(zip);
} catch (error) {
// Catch compilation errors (errors caused by the compilation of the template : misplaced tags)
errorHandler(error);
}
doc.setData({
first_name: "Fred",
last_name: "Flinstone",
phone: "0652455478",
description: "Web app",
});
try {
doc.render();
} catch (error) {
errorHandler(error);
}
const contentBuffer = doc.getZip().generate({ type: "nodebuffer" });
// do something with contentBuffer
// e.g. reupload to Cloud Storage
const targetStorageRef = admin.storage().bucket().file("compiled.docx");
await targetStorageRef.save(contentBuffer);
return { bucket: targetStorageRef.bucket.name, name: targetName };
});
In addition to returning a bucket-name pair to the caller, you may also consider returning an access URL to the caller. This could be a signed url that can last for up to 7 days, a download token URL (like getDownloadURL(), process described here) that can last until the token is revoked, Google Storage URI (gs://BUCKET_NAME/FILE_NAME) (not an access URL, but can be passed to a client SDK that can access it if the client passes storage security rules) or access it directly using its public URL (after the file has been marked public).
Based on the above code, you should be able to merge in returning the file directly yourself.

req object with express causes UnhandledPromiseRejectionWarning

I have adapted the below code from Google's Datastore tutorial. I'm vaguely aware of Promises, and I'm using await where I can figure out to do so. I've used the req object with express as below without incident, but it seems to be empty here.
It causes this error:
2021-04-16 04:55:03 default[20210415t215354] (node:10) UnhandledPromiseRejectionWarning: TypeError: Cannot read property 'name' of undefined
[SNIP]
name is a reference to a param I'm specifying like this:
curl -X POST https://MYURL/gizmos --data 'name=foo&type=bar&len=29'
What am I doing wrong? How do I avoid this?
/**
* #param {object} gizmo The gizmo record to insert.
*/
const insertGizmo = gizmo => {
return datastore.save({
key: datastore.key('gizmo'),
data: gizmo,
});
};
/**
* Retrieve the latest gizmo
*/
const getLastGizmo = () => {
const query = datastore
.createQuery('gizmo')
.order('createTime', {descending: true})
.limit(1);
return datastore.runQuery(query).then( (entities) => {
return entities[0].map(fromDatastore);
});
};
//Create a Gizmo
app.post('/gizmos', async (req, res, next) => {
const createTime = new Date();
const gizmo = {
name: req.body.name,
type: req.body.type,
length: req.body.len,
createTime: createTime,
};
try {
await insertGizmo(gizmo);
const newGizmo = await getLastGizmo();
//const [newGizmos] = await getLastGizmo();
await insertGizmo(gizmo);
const newGizmo = await getLastGizmo();
//const [newGizmos] = await getLastGizmo();
const gizmoUrl = "https://MYURL/gizmos/"+newGizmo.id;
const resText = {
"id" : newGizmo.id,
"name" : newGizmo.name,
"type" : newGizmo.type,
"length" : newGizmo.length,
"self" : gizmoUrl,
};
res.status(201).set('Content-Type', 'text/plain').send(resText).end();
} catch (error) {
next(error);
}
});
Per #hoangdv, add app.use(express.urlencoded()) somewhere. Rather node-ish not have it as the default if you ask me.

Resources