How to mock Express response object in Node tests using Mocha - node.js

I need to test a function that requires the response as argument, I'm using mocha and sequelize in node environment with express framework. My code is scaffolded in multiple files, but I need to test only the superactivator istance. These are my files:
route.js
const express = require('express');
const router = express.Router();
const rcv_pcController = require('../controllers/rcv_pc.controller');
router.post('/', rcv_pcController.switchMode);
module.exports = router;
rcv_pc.controller.js
const superActivator = require('../helpers/superactivator');
...
const switchMode = async (req, res) => {
...
switch (tipo) {
case "1":
{
if (modo == "1") {
superActivator.checkLicense(license, hwId, oem, expire, nowDate, ip, allowedSerials, res)
}
else if (modo == "2") {
superActivator.generateLicense(license, hwId, reqCode, nowDate, ip, res);
}
else if (modo == "3") {
superActivator.registerLicense(license, hwId, reqCode, nowDate, customerName, referenteName, referentePhone, ip, res)
}
break;
}
}
}
module.exports.switchMode = switchMode;
superactivator.js
const pcRepo = require("../repositories/pc.server.repository");
const repository = require('../repositories/rcvpc.server.repository');
...
class SuperActivator {
...
checkLicense(license, hwId, oem, expDate, nowDate, ip, allowedSerials, res) {
repository.findLicense(license).then(key => {
// console.log(key[0]);
if (key[0]) {
if (!isset(key[0]['SS_ALLOWED_SERIALS']) || is_null(key[0]['SS_ALLOWED_SERIALS'])) {
key[0]['SS_ALLOWED_SERIALS'] = "";
}
if (this.updatePcRx(hwId, ip, nowDate) == 0) {
return res.send(this.licCheckResult.server_error);
}
if (this.checksetBanned(hwId) == 0) {
return res.send(this.licCheckResult.hwid_banned);
}
if (!isset(key[0]['SP_HW_ID'])) {
return res.send(this.licCheckResult.key_virgin);
}
if (key[0]['SS_STATUS'] < 1) {
return res.send(this.licCheckResult.key_unallowed);
}
if (key[0]['SP_HW_ID'] != hwId) {
if (this.setKeyMismatched(key[0]['SS_ID']) == 1) {
return res.send(this.licCheckResult.key_moved);
}
}
if ((strtotime(key[0]['SS_EXPIRE']) < strtotime(expDate))
|| (strtotime(nowDate) < strtotime(key[0]['SP_PC_DATE_TIME']))
|| (strtotime(nowDate) < time() - 60 * 60 * 24 * 2)) {
if (this.setKeyMismatched(key[0]['SS_ID']) == 1) {
return res.send(this.licCheckResult.dates_hacked);
} else {
return res.send(this.licCheckResult.server_error);
}
}
if ((key[0]['SS_OEM'] != oem)
|| (strtotime(key[0]['SS_EXPIRE']) > strtotime(expDate) || strcmp(key[0]['SS_ALLOWED_SERIALS'], this.decodeToMortal(allowedSerials)) != 0)) {
return res.send(this.licCheckResult.key_info_to_update);
}
if (strtotime(key[0]['SS_EXPIRE']) <= strtotime(nowDate)) {
return res.send(this.licCheckResult.key_expired);
}
return res.send(this.licCheckResult.key_ok);
} else {
return res.send(this.licCheckResult.key_insesistente);
}
}).catch(err => res.send(err.errors));
}
generateLicense(license, hwId, reqCode, nowDate, ip, res) {
pcRepo.updatePcRx(hwId, ip, nowDate);
repository.findOem(license, hwId)
.then((foundOem) => {
if (foundOem[0]) {
const keyCode = this.generateValidKey(this.decodeToMortal(reqCode));
let patchKey = keyCode;
if (keyCode.length != 10 || this.checkValidKey(keyCode, patchKey) == 'KO') {
return res.send(this.licCheckResult.invalid_reqcode);
} else {
let oem = '';
switch (foundOem[0]['SS_OEM']) {
case 0:
oem = 'thisisnotoem'
break;
case 1:
oem = 'thisisoem'
break;
case 2:
oem = 'thisisoemdoc'
break;
case 3:
oem = 'thisislock'
break;
case 10:
oem = 'thisisnotoem_lecu'
break;
case 11:
oem = 'thisisdemo_lecu'
break;
case 12:
oem = 'thisisoem_lecu'
break;
}
const keepDate = str_replace('-', "", foundOem[0]['SS_EXPIRE'])
const allowedSerials = this.getAllowedSerials(foundOem[0]['SS_ID'])
console.log(allowedSerials);
const key =
this.codeToGod(keyCode) + '|'
+ this.codeToGod(patchKey) + '|'
+ this.codeToGod(oem) + '|'
+ this.codeToGod(keepDate) + '|'
+ this.codeToGod(allowedSerials);
// console.log(key);
return res.send(key);
}
} else {
return res.send(this.licCheckResult.key_insesistente);
}
}).catch(err => res.send(err.errors));
}
registerLicense(license, hwId, reqKey, pcDate, customerName, referenteName, referentePhone, ip, res) {
pcRepo.findOne(hwId)
.then((pc) => {
let pcId = '';
if (!pc) {
const data = {
SP_HW_ID: hwId,
SP_LAST_RX: Date.now(),
SP_IP: ip,
SP_PC_DATE_TIME: new Date().toISOString().slice(0, 10)
}
pcRepo.create(data)
.then((newPc) => {
if (!newPc) {
return res.send(this.licCheckResult.server_error);
}
return pcId = newPc['SP_ID']
}).catch(err => res.send(err.errors));
} else {
pcId = pc['SP_ID']
}
console.log(pcId);
if (isset(pcId) || pcId.length == 0 || pcId == 0) {
return res.send(this.licCheckResult.server_error);
}
repository.updateLicense(pcId, customerName, referenteName, referentePhone, license)
.spread((results, metadata) => {
if (!results) {
return res.send(this.licCheckResult.server_error);
}
return this.generateLicense(license, hwId, reqKey, pcDate, ip, res);
}).catch(err => res.send(err.errors))
}).catch(err => res.send(err.errors));
}
}
...
const superActivator = new SuperActivator(licCheckResult);
module.exports = superActivator;
For the sake of clarity I omit all the Superactivator class methods. This is my test code:
superactivator.spec.js
...
const superactivator = require('../helpers/superactivator');
...
describe('checkLicense()', function () {
it('sks ', async function () {
// here I mock the request.body data
const ip = '127.0.0.1';
const license = "A2YyLM8i3G7feJt7Hlm8hxlYk";
const hwId = "123490EN40";
const oem = 12;
const expDate = "2019-10-10";
const nowDate = null; // "2018-09-05"
const allowedSerials = null;
const foundSks = await superactivator.checkLicensecheckLicense(license, hwId, oem, expDate, nowDate, ip, allowedSerials, res)
assert.equal(foundSks, '1');
});
});
I get the error
ReferenceError: res is not defined
at Context. (test\superactivator.spec.js:23:130)
because I do not have res in test files. How can I solve this problem? I need to simply mock the response to check the returned values.
Thanks for help!

Maybe for someone this quick solution will be helpful:
import { Response } from 'express';
const mockedResponse = {
// mock props, methods you use
setHeader: jest.fn(),
} as unknown as Response;

You can use Sinon to help you spy/stub res from express ie:
...
const superactivator = require('../helpers/superactivator');
const sinon = require('sinon'); // ----> use sinon
...
describe('checkLicense()', function () {
it('sks ', async function () {
// here I mock the request.body data
const ip = '127.0.0.1';
const license = "A2YyLM8i3G7feJt7Hlm8hxlYk";
const hwId = "123490EN40";
const oem = 12;
const expDate = "2019-10-10";
const nowDate = null; // "2018-09-05"
const allowedSerials = null;
const res = {
send: sinon.spy() // --> create spy for res.send method
}
const foundSks = await superactivator.checkLicensecheckLicense(license, hwId, oem, expDate, nowDate, ip, allowedSerials, res)
assert.equal(foundSks, '1');
});
});
Ref:
https://sinonjs.org/releases/v6.2.0/spies/

Related

How to get around the mkdir error in node js? I have been trying to figure it out for hours but to no avail

The error thrown in the cmd is as shown below in the link
Error as shown on cmd
Below is the code for connection-manager.js as shown in the error log, specifically at line 47
"use strict";
const fs = require("fs");
const path = require("path");
const AbstractConnectionManager = require("../abstract/connection-manager");
const { logger } = require("../../utils/logger");
const debug = logger.debugContext("connection:sqlite");
const dataTypes = require("../../data-types").sqlite;
const sequelizeErrors = require("../../errors");
const parserStore = require("../parserStore")("sqlite");
const { promisify } = require("util");
class ConnectionManager extends AbstractConnectionManager {
constructor(dialect, sequelize) {
super(dialect, sequelize);
if (this.sequelize.options.host === "localhost") {
delete this.sequelize.options.host;
}
this.connections = {};
this.lib = this._loadDialectModule("sqlite3");
this.refreshTypeParser(dataTypes);
}
async _onProcessExit() {
await Promise.all(Object.getOwnPropertyNames(this.connections).map((connection) => promisify((callback) => this.connections[connection].close(callback))()));
return super._onProcessExit.call(this);
}
_refreshTypeParser(dataType) {
parserStore.refresh(dataType);
}
_clearTypeParser() {
parserStore.clear();
}
async getConnection(options) {
options = options || {};
options.uuid = options.uuid || "default";
if (!!this.sequelize.options.storage !== null && this.sequelize.options.storage !== void 0) {
options.storage = this.sequelize.options.storage;
} else {
options.storage = this.sequelize.options.host || ":memory:";
}
options.inMemory = options.storage === ":memory:" ? 1 : 0;
const dialectOptions = this.sequelize.options.dialectOptions;
const defaultReadWriteMode = this.lib.OPEN_READWRITE | this.lib.OPEN_CREATE;
options.readWriteMode = dialectOptions && dialectOptions.mode || defaultReadWriteMode;
if (this.connections[options.inMemory || options.uuid]) {
return this.connections[options.inMemory || options.uuid];
}
if (!options.inMemory && (options.readWriteMode & this.lib.OPEN_CREATE) !== 0) {
fs.mkdirSync(path.dirname(options.storage), { recursive: true });//line 47
}
const connection = await new Promise((resolve, reject) => {
this.connections[options.inMemory || options.uuid] = new this.lib.Database(options.storage, options.readWriteMode, (err) => {
if (err)
return reject(new sequelizeErrors.ConnectionError(err));
debug(`connection acquired ${options.uuid}`);
resolve(this.connections[options.inMemory || options.uuid]);
});
});
if (this.sequelize.config.password) {
connection.run(`PRAGMA KEY=${this.sequelize.escape(this.sequelize.config.password)}`);
}
if (this.sequelize.options.foreignKeys !== false) {
connection.run("PRAGMA FOREIGN_KEYS=ON");
}
return connection;
}
releaseConnection(connection, force) {
if (connection.filename === ":memory:" && force !== true)
return;
if (connection.uuid) {
connection.close();
debug(`connection released ${connection.uuid}`);
delete this.connections[connection.uuid];
}
}
}
module.exports = ConnectionManager;
module.exports.ConnectionManager = ConnectionManager;
module.exports.default = ConnectionManager;
//# sourceMappingURL=connection-manager.js.map
**The error begins at line 47 ** as shown in the code snippet, please i really do need help with this coz it's been a huge reason for delay in accomplishing the given task.

TinyMCE text is not being saved completely

I am making an online text editor that saves the content to a database while typing and returns it to the textarea, using TinyMCE, React as frontend and Node+Express as backend
Sometimes it doesn't send the whole content to the database and i have to switch tabs and return to the editor, typing a space character so it sends the resting content
I tried using "onKeyUp", "onKeyDown" but the characters got imediatly deleted after trying to type, currently I'm using "onEditorChange" to activate the function.
Here's my code:
function App() {
const editorRef = useRef();
const { route } = useParams();
const url = "https://API/" + route;
const { httpConfig } = useFetch(url);
const [notas, setNotas] = useState("");
const [rota, setRota] = useState("");
const [init, setInit] = useState(false);
useEffect(() => {
(async () => {
let res = "";
let data = "";
if (init) {
const notinhas = {
content: notas,
rota: route,
};
httpConfig(notinhas, "PATCH");
} else {
res = await fetch(url);
data = await res.json();
if (data === null) {
const notinhas2 = {
content: "",
rota: route,
};
httpConfig(notinhas2, "POST");
} else {
setNotas(data.content);
}
}
})();
}, [notas, init]);
function onClickHandler() {
setNotas(editorRef.current.getContent());
}
return (
<div>
<Editor
onEditorChange={onClickHandler}
onInit={ (evt, editor) =>
{editorRef.current = editor;
editor.selection.select(editor.getBody(), true);
editor.selection.collapse(false);
setInit(true);
}}
init={{
plugins: "...",
toolbar: "...",
setup: function(editor) {
editor.on('keydown', function() {
var currNode = editor.selection.getNode();
var currText = currNode.textContent;
if (currNode.nodeName.toLowerCase() != 'p' || currText.length < 1) return;
var pCount = editor.dom.doc.querySelectorAll('p').length;
setTimeout(function() {
var newNode = editor.selection.getNode();
var newText = newNode.textContent;
var newPCount = editor.dom.doc.querySelectorAll('p').length;
if (pCount + 1 == newPCount && currText == newText) {
var nextNode = newNode.nextElementSibling;
var nextNodeText = nextNode.textContent;
if (nextNode.nodeName.toLowerCase() != 'p' || nextNodeText.length > 1) return;
nextNode.innerHtml = '<br>';
editor.selection.setCursorLocation(nextNode, 0);
}
}, 80);
});
editor.on('init', function()
{
this.getDoc().body.style.fontSize = '12pt';
this.getDoc().body.style.fontFamily = 'monospace';
});
}}}
value={notas}
/>
</div>
);
}
export default App;
Could someone help me in this?

Array push gives empty result node js

I am creating an API for listing trip data with image and pdf base url,
All things are working fine but I can not access the last result array data_to_send out of for loop.
app.js
app.get("/getChallanList/:userId/:role", (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
db.select("trip", "*", `driver_id = '${req.params.userId}'`, (data) => {
for (let i = 0; i < data.data.length; i++) {
site_source = data.data[i].site_from;
site_destination = data.data[i].site_to;
db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf =
base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf =
base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img = base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
}
);
}
console.log(data_to_send); // empty
});
}
}
db.select
let select = (table, column, condition, callback) => {
try {
let sql = "SELECT " + column + " FROM " + table + " WHERE " + condition;
conn.query(sql, (err, results) => {
if (err) {
let data = {
status: 0,
data: sql,
message: "Something went wrong!",
};
callback(data);
} else {
let data = {
status: 1,
data: results,
message: "Success",
};
callback(data);
}
});
} catch (err) {
let data = {
status: 0,
data: err,
message: "In catch",
};
callback(data);
}
};
async await
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
await db.select(
"trip",
"*",
`driver_id = '${req.params.userId}'`,
async (data) => {
// const data_to_send_ = [];
for (let i = 0; i < data.data.length; i++) {
site_source = data.data[i].site_from;
site_destination = data.data[i].site_to;
await db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf =
base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf =
base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img =
base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
}
);
// data_to_send_.push(data_to_send);
}
console.log(data_to_send); // empty
}
);
}
}
this is because of the asynchronous behavior of NodeJs, so you have to plan things accordingly i.e
console.log(1)
db.select(
"trip",
"*",
`driver_id = '${req.params.userId}'`,
async (data) => {
console.log(2)
})
console.log(3)
The output of the above code would be 1 then 3 and then 2 and this is how NodeJs works it does not wait for I/O calls i.e DB query in your case.
Please check how promises work in NodeJs for more details.
Here is how you can accomplish your task:
const challanList = (userData) => {
return new Promise((resolve, reject) => {
const data_to_send = [];
db.select("trip", "*", `driver_id = '${req.params.userId}'`, data => {
for (let i = 0; i < data.data.length; i++) {
const site_source = data.data[i].site_from;
const site_destination = data.data[i].site_to;
db.select("site", "*", `id in ('${site_source}','${site_destination}')`, data_site => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf = base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf = base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img = base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
});
}
resolve(data_to_send);
});
});
};
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
const challanListResult =await challanList(userData);
console.log(challanListResult);
resp.json(challanListResult);
});
Without knowing what database or ORM you are using it is difficult to answer, but my suspicion is that db.select is an asynchronous method, i.e. it is returning a Promise. If so, the second console log is still seeing the "old" data_to_send.
Try adding an await in front of the first db.select call. (Don't forget the async in front of the callback in second argument of app.get.
Your database is asynchronous so console.log(data_to_send) gets called before the query finished executing. Try adding async before (req, res) in line 1 then await before db.select.
This works for me
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
const data = await db.query(
`SELECT * FROM trip WHERE driver_id = '${req.params.userId}'`
);
// console.log(data.length);
// const data_to_send_ = [];
for (let i = 0; i < data.length; i++) {
site_source = data[i].site_from;
site_destination = data[i].site_to;
// cons
const site_data = await db.query(
`SELECT * FROM site WHERE id in ('${site_source}','${site_destination}')`
);
// console.log(site_data);
db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data[i].site_from = data_site.data[0].name;
data[i].site_to = data_site.data[1].name;
if (data[i].truck_challan_pdf != "") {
data[i].truck_challan_pdf =
base_url + "truckchallan/" + data[i].truck_challan_pdf;
}
if (data[i].driver_challan_pdf != "") {
data[i].driver_challan_pdf =
base_url + "driverchallan/" + data[i].driver_challan_pdf;
}
if (data[i].preparer_img != "") {
data[i].preparer_img = base_url + data[i].preparer_img;
}
if (data[i].driver_img != "") {
data[i].driver_img = base_url + data[i].driver_img;
}
data_to_send.push(data);
// console.log(data.data);
// console.log(data_to_send); // working
}
);
// data_to_send_.push(data_to_send);
}
// console.log(data_to_send);
// console.log(data_to_send);
res.send({ success: 1, data: data, message: "" });
}

AWS Lambda (NodeJS) does not log to cloudwatch

I'm trying to log my lambda app after following serverless-next.js because of the issue where I can't go to the root of my file. So basically I'm deploying nextJS app in AWS through lambda#edge, s3, and cloudfront.
I'm new to AWS so I'm not really sure how to debug this thing at all. I assume traditional console.log in my lambda where every request comes in would log it in the cloudwatch. I also made sure that I deployed my lambda to my cloud front
Here's the code:
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handler = void 0;
const prerender_manifest_json_1 = __importDefault(require("./prerender-manifest.json"));
const manifest_json_1 = __importDefault(require("./manifest.json"));
const next_aws_cloudfront_1 = __importDefault(require("#sls-next/next-aws-cloudfront"));
const addS3HostHeader = (req, s3DomainName) => {
req.headers["host"] = [{ key: "host", value: s3DomainName }];
};
const isDataRequest = (uri) => uri.startsWith("/_next/data");
const normaliseUri = (uri) => (uri === "/" ? "/index" : uri);
const normaliseS3OriginDomain = (s3Origin) => {
if (s3Origin.region === "us-east-1") {
return s3Origin.domainName;
}
if (!s3Origin.domainName.includes(s3Origin.region)) {
const regionalEndpoint = s3Origin.domainName.replace("s3.amazonaws.com", `s3.${s3Origin.region}.amazonaws.com`);
return regionalEndpoint;
}
return s3Origin.domainName;
};
const router = (manifest) => {
const { pages: { ssr, html } } = manifest;
const allDynamicRoutes = Object.assign(Object.assign({}, ssr.dynamic), html.dynamic);
return (uri) => {
let normalisedUri = uri;
if (isDataRequest(uri)) {
normalisedUri = uri
.replace(`/_next/data/${manifest.buildId}`, "")
.replace(".json", "");
}
if (ssr.nonDynamic[normalisedUri]) {
return ssr.nonDynamic[normalisedUri];
}
console.log(uri);
for (const route in allDynamicRoutes) {
const { file, regex } = allDynamicRoutes[route];
const re = new RegExp(regex, "i");
const pathMatchesRoute = re.test(normalisedUri);
if (pathMatchesRoute) {
return file;
}
}
if (html.nonDynamic["/404"] !== undefined) {
return "pages/404.html";
}
return "pages/_error.js";
};
};
exports.handler = (event) => __awaiter(void 0, void 0, void 0, function* () {
const request = event.Records[0].cf.request;
const uri = normaliseUri(request.uri);
const manifest = manifest_json_1.default;
const prerenderManifest = prerender_manifest_json_1.default;
const { pages, publicFiles } = manifest;
const isStaticPage = pages.html.nonDynamic[uri];
const isPublicFile = publicFiles[uri];
const isPrerenderedPage = prerenderManifest.routes[request.uri];
const origin = request.origin;
const s3Origin = origin.s3;
const isHTMLPage = isStaticPage || isPrerenderedPage;
const normalisedS3DomainName = normaliseS3OriginDomain(s3Origin);
s3Origin.domainName = normalisedS3DomainName;
if (isHTMLPage || isPublicFile) {
s3Origin.path = isHTMLPage ? "/static-pages" : "/public";
addS3HostHeader(request, normalisedS3DomainName);
if (isHTMLPage) {
request.uri = `${uri}.html`;
}
return request;
}
const pagePath = router(manifest)(uri);
if (pagePath.endsWith(".html")) {
s3Origin.path = "/static-pages";
request.uri = pagePath.replace("pages", "");
addS3HostHeader(request, normalisedS3DomainName);
return request;
}
const page = require(`./${pagePath}`);
const { req, res, responsePromise } = next_aws_cloudfront_1.default(event.Records[0].cf);
if (isDataRequest(uri)) {
const { renderOpts } = yield page.renderReqToHTML(req, res, "passthrough");
res.setHeader("Content-Type", "application/json");
res.end(JSON.stringify(renderOpts.pageData));
}
else {
page.render(req, res);
}
return responsePromise;
});
Permission:
Allow: logs:CreateLogGroup
Allow: logs:CreateLogStream
Allow: logs:PutLogEvents
What else should I do? Should I create a new stream or is it automatically created? I can see a log group in my cloudwatch named aws/lambda but i'm not sure how to connect them
Really appreciate any help
Cheers

Cannot get Google AI prediction result using JWT credential using node js

I am planning to get AI prediction result using service account and deploy a cloud function to a Firebase project. When trying to get prediction result
https://ml.googleapis.com/v1/projects/projectid/models/category:predict?
using accesstoken JWT and the result is
{ StatusCodeError: 403 - {"error":{"code":403,"message":"Access to model denied.","status":"PERMISSION_DENIED"}}
It is confirmed that the service account I'm using has been added to the ML project.
Any idea how to get the ML result in Firebase function using service account? or other method?
Here is the code (I am still newbie to NodeJS)
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const admin = require('firebase-admin');
const request = require("request");
const requestProm = require("request-promise");
const functions = require('firebase-functions');
const { GoogleAuth } = require('google-auth-library');
admin.initializeApp();
var reportFld, reportNarTr, reportTitTr;
var input, input2, input3;
var result, predictedHaz, predictedSig, predictedRep, setDoc
var getAccessTokenId
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
exports.newReport = functions.firestore
.document('/users/{usersId}')
.onCreate((change, context) => {
const db = admin.firestore();
const interDoc = db.collection('users').doc(context.params.usersId);
interDoc.get().then(doc => {
if (!doc.exists) {
console.log('No such document!');
} else {
var getPrediction
getPrediction = async function main2() {
reportFld = doc.data();
reportNarTr = JSON.stringify(reportFld.narrative);
reportTitTr = JSON.stringify(reportFld.title);
reportNumTr = context.params.usersId;
input = {
instances: [
[reportNumTr, reportTitTr, reportNarTr]
]
};
var accessToken = await getAccessTokenId();
var endpointhazCat = 'https://ml.googleapis.com/v1/projects/projectid/models/hazcat:predict?access_token=' + accessToken;
var endpointsigCat = 'https://ml.googleapis.com/v1/projects/projectid/models/sig:predict?access_token=' + accessToken;
var endpointrepCat = 'https://ml.googleapis.com/v1/projects/projectid/models/type:predict?access_token=' + accessToken;
var options1 = {
method: 'POST',
uri: endpointhazCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
var options2 = {
method: 'POST',
uri: endpointsigCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
var options3 = {
method: 'POST',
uri: endpointrepCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
requestProm.post(options1)
.then(function (response) {
result = response['predictions'];
switch (parseInt(result)) {
case 0:
predictedHaz = 'A';
break;
case 1:
predictedHaz = 'B';
break;
case 2:
predictedHaz = 'C';
break;
case 3:
predictedHaz = 'D';
break;
case 4:
predictedHaz = 'E';
break;
case 5:
predictedHaz = 'F';
break;
case 6:
predictedHaz = 'G';
break;
default:
predictedHaz = 'error';
}
const predictedHazData = {
HazardCategory: predictedHaz,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedHazData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
requestProm.post(options2)
.then(function (response) {
result = response['predictions'];
if (parseInt(result) > -4) {
predictedSig = 'Sig';
} else predictedSig = 'Insig'
const predictedSigData = {
SignifanceCategory: predictedSig,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedSigData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
requestProm.post(options3)
.then(function (response) {
result = response['predictions'];
if (parseInt(result) === 1) {
predictedRep = 'Inc';
} else predictedRep = 'Haz'
const predictedRepData = {
ReportCategory: predictedRep,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedRepData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
return true
}
getPrediction().catch(console.error);
} return null
})
.catch(err => {
console.log('Error getting document', err);
});
return true;
});
Added some details:
These are the service account permissions:
ml.jobs.cancel
ml.jobs.create
ml.jobs.get
ml.jobs.getIamPolicy
ml.jobs.list
ml.jobs.update
ml.locations.get
ml.locations.list
ml.models.create
ml.models.delete
ml.models.get
ml.models.getIamPolicy
ml.models.list
ml.models.predict
ml.models.update
ml.operations.cancel
ml.operations.get
ml.operations.list
ml.projects.getConfig
ml.studies.create
ml.studies.delete
ml.studies.get
ml.studies.getIamPolicy
ml.studies.list
ml.trials.create
ml.trials.delete
ml.trials.get
ml.trials.list
ml.trials.update
ml.versions.create
ml.versions.delete
ml.versions.get
ml.versions.list
ml.versions.predict
ml.versions.update
resourcemanager.projects.get
I have tried to use other node library 'googleapis' on debug console :
google.auth.getApplicationDefault((err, authClient, projectId) => {
if (err) {
console.log('Authentication failed because of ', err);
res.status(401).send('Authentication failed');
} else {
// create the full model name which includes the project ID
const modelName = 'projects/ml-project-id/models/hazcat';
const mlRequestJson = {
'auth': authClient,
'name': modelName,
'resource': { instances: [['RepNum', 'RepTit', 'RepNar']]
}
}
ml.projects.predict(mlRequestJson, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
}
});
}
});
and the result is:
3
and deployed to firebase:
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const admin = require('firebase-admin');
const request = require("request");
const requestProm = require("request-promise");
const functions = require('firebase-functions');
const { GoogleAuth } = require('google-auth-library');
const { google } = require('googleapis');
const ml = google.ml('v1');
admin.initializeApp();
var reportFld, reportNarTr, reportTitTr, reportNumTr, reportTitStr, reportNarStr;
var input, input2, input3;
var result, predictedHaz, predictedSig, predictedRep, setDoc
exports.predictReport = functions.firestore
.document('/users/{usersId}')
.onCreate((change, context) => {
const db = admin.firestore();
const interDoc = db.collection('users').doc(context.params.usersId);
interDoc.get().then(doc => {
if (!doc.exists) {
console.log('No such document!');
} else {
reportFld = doc.data();
reportNarTr = JSON.stringify(reportFld.narrative);
reportTitTr = JSON.stringify(reportFld.title);
reportNumTr = context.params.usersId;
input = {
instances: [
[reportNumTr, reportTitTr, reportNarTr]
]
};
var result1, result2, result3
google.auth.getApplicationDefault((err, authClient, projectId) => {
if (err) {
console.log('Authentication failed because of ', err);
res.status(401).send('Authentication failed');
} else {
const modelName = 'projects/ml-project-id/models/hazcat';
const modelName2 = 'projects/ml-project-id/models/sig';
const modelName3 = 'projects/ml-project-id/models/type';
const mlRequestJson1 = {
'auth': authClient,
'name': modelName,
'resource': input
}
const mlRequestJson2 = {
'auth': authClient,
'name': modelName2,
'resource': input
}
const mlRequestJson3 = {
'auth': authClient,
'name': modelName3,
'resource': input
}
var result1, result2, result3
ml.projects.predict(mlRequestJson1, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result1 = result.data.predictions[0];
switch (parseInt(result1)) {
case 0:
predictedHaz = 'A';
break;
case 1:
predictedHaz = 'B';
break;
case 2:
predictedHaz = 'C';
break;
case 3:
predictedHaz = 'D';
break;
case 4:
predictedHaz = 'E';
break;
case 5:
predictedHaz = 'F';
break;
case 6:
predictedHaz = 'G';
break;
default:
predictedHaz = 'error';
}
const predictedHazData = {
HazardCategory: predictedHaz,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedHazData);
}
}); // endof predict1
ml.projects.predict(mlRequestJson2, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result2 = result.data.predictions[0];
if (parseInt(result2) > -4) {
predictedSig = 'Sig';
} else predictedSig = 'Insig'
const predictedSigData = {
SignifanceCategory: predictedSig,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedSigData);
}
});// endof predict2
ml.projects.predict(mlRequestJson3, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result3 = result.data.predictions[0];
if (parseInt(result3) === 1) {
predictedRep = 'Inc';
} else predictedRep = 'Haz'
const predictedRepData = {
ReportCategory: predictedRep,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedRepData);
}
});// endof predict3
}//endof else getappdefault
});//endof getappdefault
} return true
})//endof getdocument
.catch(err => {
console.log('Error getting document', err);
});
return true;
});//endof onCreate
and the result is
Authentication failed because of Error: Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.
at AuthPlus.getApplicationDefaultAsync (/srv/node_modules/googleapis-common/node_modules/google-auth-library/build/src/auth/googleauth.js:156:23)
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:229:7)
Added details (update 2)
I have used keyfile pointing to service account json file.
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
keyFile: 'projectid.json',
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
got access token like this
ya29.c.xxxx
and the permission denied
Failed { StatusCodeError: 403 - {"error":{"code":403,"message":"Access to model denied.","status":"PERMISSION_DENIED"}}
Added details (update 3)
I'm using my personal credentials to enter the ML model works inside cloud function
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
keyFile: 'application_default_credentials.json',
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
Get the result
{ predictions: [ 3 ] }
I also have added Service Account Token Creator role on the service account but still not work using service account to access ML Model
iam.serviceAccountKeys.create
iam.serviceAccountKeys.delete
iam.serviceAccountKeys.get
iam.serviceAccountKeys.list
But curious why it doesnt have these
iam.serviceAccounts.getAccessToken
iam.serviceAccounts.signBlob
iam.serviceAccounts.signJwt
iam.serviceAccounts.implicitDelegation
iam.serviceAccounts.getOpenIdToken

Resources