Sequelize: Not all values in forEach get inserted to SQLite DB - node.js

I have an sqlite DB which i want to populate with 137k entries which are retrieved from multiple API calls.
There are only about 6k entries in my sqlite db, but if I export the lines into a text file, in the text file is the correct amount of 137k lines.
Here is the code for inserting into the database and appending to the text file.
Here is my index.js
const db = require("./config/database");
const items = require("./models/items");
var fs = require('fs');
const baseURL = 'xxx';
const namespace = 'static-eu';
const region = 'eu';
const client = 'xxx';
const secret = 'xxx';
const tokenURL = 'xxx';
const getToken = async function(client, secret)
{
const body = "grant_type=client_credentials";
const response = await fetch(tokenURL, {
method: 'post',
body: body,
headers: {'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Basic ' + Buffer.from(client + ':' + secret).toString('base64')}
});
const data = await response.json();
const jsondata = JSON.stringify(data);
const json = JSON.parse(jsondata);
return json['access_token'];
}
const getItems = async function(ID)
{
const access_token = await getToken(client, secret);
const reqURL = baseURL + '/data/wow/search/item?namespace=' + namespace + '&_pageSize=1000&orderby=id&id=['+ ID + ',]&access_token=' + access_token;
const response = await fetch(reqURL, {
method: 'get',
//headers: {'Authorization': 'Bearer ' + access_token}
});
const data = await response.json();
const jsondata = JSON.stringify(data);
const json = JSON.parse(jsondata);
var arrayID = [];
json.results.forEach(function (item, index){
try {
items.upsert({
itemID: json.results[index].data.id,
desc_de: json.results[index].data.name.de_DE,
desc_en: json.results[index].data.name.en_US,
media: json.results[index].data.id,
item_class_de: json.results[index].data.item_class.name.de_DE,
item_class_en: json.results[index].data.item_class.name.en_US,
});
}
catch (error) {
return console.log(error.name + error.message);
}
arrayID.push(json.results[index].data.id);
});
const nextID = (Math.max(...arrayID)) + 1;
return nextID;
//Object.keys(json.results[data]).forEach(function(results){
// console.log('Key : ' + results + ', Value : ' + json["id"])
// })
}
const getAll = async function()
{
let ID = 1;
while(1==1)
{
console.log(ID);
ID = await getItems(ID);
}};
db.authenticate()
.then(() => {
console.log('Logged in to DB!');
items.init(db);
items.sync();
})
.catch(err => console.log(err));
getAll();
my items.js
const { DataTypes, Model } = require('sequelize');
module.exports = class items extends Model {
static init(sequelize){
return super.init({
itemID: {
type: DataTypes.INTEGER,
allowNull: false,
unique: true
},
desc_de: DataTypes.STRING,
desc_en: DataTypes.STRING,
media: DataTypes.INTEGER,
item_class_de: DataTypes.STRING,
item_class_en: DataTypes.STRING,
}, {
tableName: 'items',
timestamps: true,
sequelize
});
}
}
my database.js
const { Sequelize } = require('sequelize');
module.exports = new Sequelize('database', 'user', 'password', {
host: 'localhost',
dialect: 'sqlite',
logging: false,
storage: './data/items.sqlite',
});

More context would be required for this one.
What you could do is check if items.upsert() returns any errors.
Add a try-catch block to check for any issues.
Use a loop instead to insert the data into batches. It definitely improves performance, a good library is async or bluebird
Check your db schema if it can handle 137k entries.
Edit: Read these updated codes out and see how it fits in your requirements
getAll()
const pQueue = require('p-queue');
const queue = new pQueue({ concurrency: 5 });
const getAll = async function() {
let ID = 1;
while (true) {
console.log(ID);
const nextID = await queue.add(() => getItems(ID));
if (!nextID) {
break;
}
ID = nextID;
}
};
getItems()
const getItems = async function(ID) {
const access_token = await getToken(client, secret);
const reqURL =
baseURL +
'/data/wow/search/item?namespace=' +
namespace +
'&_pageSize=1000&orderby=id&id=[' +
ID +
',]&access_token=' +
access_token;
const response = await fetch(reqURL, {
method: 'get',
});
const data = await response.json();
const jsondata = JSON.stringify(data);
const json = JSON.parse(jsondata);
var arrayID = [];
json.results.forEach(function(item, index) {
try {
items.upsert({
itemID: json.results[index].data.id,
desc_de: json.results[index].data.name.de_DE,
desc_en: json.results[index].data.name.en_US,
media: json.results[index].data.id,
item_class_de: json.results[index].data.item_class.name.de_DE,
item_class_en: json.results[index].data.item_class.name.en_US,
});
} catch (error) {
return console.log(error.name + error.message);
}
arrayID.push(json.results[index].data.id);
});
if (!json.hasOwnProperty('next_page')) {
return false;
}
const nextID = Math.max(...arrayID) + 1;
return nextID;
};
Alternate version with transactions
const getItems = async function(ID) {
const access_token = await getToken(client, secret);
const reqURL =
baseURL +
'/data/wow/search/item?namespace=' +
namespace +
'&_pageSize=1000&orderby=id&id=[' +
ID +
',]&access_token=' +
access_token;
const response = await fetch(reqURL, {
method: 'get',
});
const data = await response.json();
const jsondata = JSON.stringify(data);
const json = JSON.parse(jsondata);
var arrayID = [];
const itemsData = json.results.map(function(item) {
return {
itemID: item.data.id,
desc_de: item.data.name.de_DE,
desc_en: item.data.name.en_US,
media: item.data.id,
item_class_de: item.data.item_class.name.de_DE,
item_class_en: item.data.item_class.name.en_US,
};
});
return db.transaction(async (transaction) => {
await items.bulkCreate(itemsData, { transaction });
}).then(() => {
arrayID = json.results.map(function(item) {
return item.data.id;
});
if (!json.hasOwnProperty('next_page')) {
return false;
}
const nextID = Math.max(...arrayID) + 1;
return nextID;
});
};

Related

getting 403 from lambda calling api gateway

I have an api post end point which would update a customer's information in dynamodb. It is set to authenticate using AWS_IAM. I am getting 403 from my lambda when calling this api. I have allowed execute-api:Invoke permission to the api for the role lambda uses. I see in this post that I need to create a canonical request. I was able to come up with the below code and I still get a 403. I can't figure out what is missing and wish if a different eye can spot the problem. Please help!
"use strict";
const https = require("https");
const crypto = require("crypto");
exports.handler = async (event, context, callback) => {
try {
var attributes = {
customerId: 1,
body: { firstName: "abc", lastName: "xyz" }
};
await updateUsingApi(attributes.customerId, attributes.body)
.then((result) => {
var jsonResult = JSON.parse(result);
if (jsonResult.statusCode === 200) {
callback(null, {
statusCode: jsonResult.statusCode,
statusMessage: "Attributes saved successfully!"
});
} else {
callback(null, jsonResult);
}
})
.catch((err) => {
console.log("error: ", err);
callback(null, err);
});
} catch (error) {
console.error("error: ", error);
callback(null, error);
}
};
function sign(key, message) {
return crypto.createHmac("sha256", key).update(message).digest();
}
function getSignatureKey(key, dateStamp, regionName, serviceName) {
var kDate = sign("AWS4" + key, dateStamp);
var kRegion = sign(kDate, regionName);
var kService = sign(kRegion, serviceName);
var kSigning = sign(kService, "aws4_request");
return kSigning;
}
function updateUsingApi(customerId, newAttributes) {
var request = {
partitionKey: `MY_CUSTOM_PREFIX_${customerId}`,
sortKey: customerId,
payLoad: newAttributes
};
var data = JSON.stringify(request);
var apiHost = new URL(process.env.REST_API_INVOKE_URL).hostname;
var apiMethod = "POST";
var path = `/stage/postEndPoint`;
var { amzdate, authorization, contentType } = getHeaders(host, method, path);
const options = {
host: host,
path: path,
method: method,
headers: {
"X-Amz-Date": amzdate,
Authorization: authorization,
"Content-Type": contentType,
"Content-Length": data.length
}
};
return new Promise((resolve, reject) => {
const req = https.request(options, (res) => {
if (res && res.statusCode !== 200) {
console.log("response from api", res);
}
var response = {
statusCode: res.statusCode,
statusMessage: res.statusMessage
};
resolve(JSON.stringify(response));
});
req.on("error", (e) => {
console.log("error", e);
reject(e.message);
});
req.write(data);
req.end();
});
}
function getHeaders(host, method, path) {
var algorithm = "AWS4-HMAC-SHA256";
var region = "us-east-1";
var serviceName = "execute-api";
var secretKey = process.env.AWS_SECRET_ACCESS_KEY;
var accessKey = process.env.AWS_ACCESS_KEY_ID;
var contentType = "application/x-amz-json-1.0";
var now = new Date();
var amzdate = now
.toJSON()
.replace(/[-:]/g, "")
.replace(/\.[0-9]*/, "");
var datestamp = now.toJSON().replace(/-/g, "").replace(/T.*/, "");
var canonicalHeaders = `content-type:${contentType}\nhost:${host}\nx-amz-date:${amzdate}\n`;
var signedHeaders = "content-type;host;x-amz-date";
var payloadHash = crypto.createHash("sha256").update("").digest("hex");
var canonicalRequest = [
method,
path,
canonicalHeaders,
signedHeaders,
payloadHash
].join("/n");
var credentialScope = [datestamp, region, serviceName, "aws4_request"].join(
"/"
);
const sha56 = crypto
.createHash("sha256")
.update(canonicalRequest)
.digest("hex");
var stringToSign = [algorithm, amzdate, credentialScope, sha56].join("\n");
var signingKey = getSignatureKey(secretKey, datestamp, region, serviceName);
var signature = crypto
.createHmac("sha256", signingKey)
.update(stringToSign)
.digest("hex");
var authorization = `${algorithm} Credential=${accessKey}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
return { amzdate, authorization, contentType };
}

RefreshToken not present in tokenSET

I am trying to connect with the api hubstaff to which I have set up my authentication using auth0 and express as my backend. To know about the info about the logged in user I need to send the token object via the API.
By some research I have gotten to this point:
const {
Issuer,
TokenSet
} = require('openid-client');
const fs = require('fs');
const jose = require('jose');
// constants
const ISSUER_EXPIRE_DURATION = 7 * 24 * 60 * 60; // 1 week
const ACCESS_TOKEN_EXPIRATION_FUZZ = 30; // 30 seconds
const ISSUER_DISCOVERY_URL = 'https://account.hubstaff.com';
// API URl with trailing slash
const API_BASE_URL = 'https://api.hubstaff.com/';
let state = {
api_base_url: API_BASE_URL,
issuer_url: ISSUER_DISCOVERY_URL,
issuer: {}, // The issuer discovered configuration
issuer_expires_at: 0,
token: {},
};
let client;
function loadState() {
return fs.readFileSync('./configState.json', 'utf8');
}
function saveState() {
fs.writeFileSync('./configState.json', JSON.stringify(state, null, 2), 'utf8');
console.log('State saved');
}
function unixTimeNow() {
return Date.now() / 1000;
}
async function checkToken() {
//console.log('state.token.access_token', state.token.access_token);
if (!state.token.access_token || state.token.expires_at < (unixTimeNow() + ACCESS_TOKEN_EXPIRATION_FUZZ)) {
// console.log('Refresh token');
state.token = await client ? .refresh(state.token);
// console.log('Token refreshed');
saveState();
}
}
async function initialize() {
console.log('API Hubstaff API');
let data = loadState();
data = JSON.parse(data);
if (data.issuer) {
state.issuer = new Issuer(data.issuer);
state.issuer_expires_at = data.issuer_expires_at;
}
if (data.token) {
state.token = new TokenSet(data.token);
}
if (data.issuer_url) {
state.issuer_url = data.issuer_url;
}
if (data.api_base_url) {
state.api_base_url = data.api_base_url;
}
if (!state.issuer_expires_at || state.issuer_expires_at < unixTimeNow()) {
console.log('Discovering');
state.issuer = await Issuer.discover(state.issuer_url);
state.issuer_expires_at = unixTimeNow() + ISSUER_EXPIRE_DURATION;
console.log(state.issuer);
}
client = new state.issuer.Client({
// For personal access token we can use PAT/PAT.
// This is only needed because the library requires a client_id where as the API endpoint does not require it
client_id: 'Z',
client_secret: 'J',
});
saveState();
console.log('API Hubstaff initialized');
}
async function request(url, options) {
await checkToken();
let fullUrl = state.api_base_url + url;
return client ? .requestResource(fullUrl, state.token, options);
}
function tokenDetails() {
let ret = {};
if (state.token.access_token) {
ret.access_token = jose.JWT.decode(state.token.access_token);
}
if (state.token.refresh_token) {
ret.refresh_token = jose.JWT.decode(state.token.refresh_token);
}
return ret;
}
module.exports = {
initialize,
checkToken,
request,
tokenDetails
};
// COntroller
const { response } = require('express')
const api = require('../util/hubstaffConnect.util');
const testConnected = require('../util/testhubstaff.util');
const usersGet = async (req, res = response) => {
await api.initialize();
const response = await api.request('v2/organizations',{
method: 'GET',
json: true,
});
console.log('response', response);
if(response != null){
const body = JSON.parse(response);
res.json({
organizations: body.organizations || []
});
}
};
Although when I go to the address localhost:8080/oauth/api/organizations I ran into an error:
I do realise this is regarding missing tokens which won't let me get the user's information.

Redis not cache all data in ec2 server and data from mongo atlas

I have used redis but its not work as expected. when first time I have execute the find query results comes from mongoose its give approx 19MB data but after caching redis only one document. don't now where I am doing wrong below share the configuration file.
const mongoose = require('mongoose');
const { createClient } = require('redis');
const util = require('util');
let client
(async() => {
client = createClient();
await client.connect({
legacyMode: true
});
await client.ping();
})();
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = function (options = { time: 36000 }) {
this.useCache = true;
this.time = options.time;
this.hashKey = JSON.stringify(options.key || this.mongooseCollection.name);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return await exec.apply(this, arguments);
}
const key = JSON.stringify({
...this.getQuery(),
});
const cacheValue = await client.get(this.hashKey, key);
console.log(cacheValue, "Cache Value");
if (cacheValue) {
const doc = JSON.parse(cacheValue);
console.log('Response from Redis',doc, this.model);
return Array.isArray(doc) ? doc.map((d) => new this.model(d)) : new this.model(doc);
}
const result = await exec.apply(this, arguments);
client.set(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
return result;
};
module.exports = {
clearKey(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Here is Query
Products.find({is_deleted:false})
.select(select)
.populate('type', 'type')
.populate('shape', 'type')
.populate('category', 'type is_pain_scale name').sort({updated_at:1}).lean().cache({
time: 36000
});

Cannot get Google AI prediction result using JWT credential using node js

I am planning to get AI prediction result using service account and deploy a cloud function to a Firebase project. When trying to get prediction result
https://ml.googleapis.com/v1/projects/projectid/models/category:predict?
using accesstoken JWT and the result is
{ StatusCodeError: 403 - {"error":{"code":403,"message":"Access to model denied.","status":"PERMISSION_DENIED"}}
It is confirmed that the service account I'm using has been added to the ML project.
Any idea how to get the ML result in Firebase function using service account? or other method?
Here is the code (I am still newbie to NodeJS)
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const admin = require('firebase-admin');
const request = require("request");
const requestProm = require("request-promise");
const functions = require('firebase-functions');
const { GoogleAuth } = require('google-auth-library');
admin.initializeApp();
var reportFld, reportNarTr, reportTitTr;
var input, input2, input3;
var result, predictedHaz, predictedSig, predictedRep, setDoc
var getAccessTokenId
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
exports.newReport = functions.firestore
.document('/users/{usersId}')
.onCreate((change, context) => {
const db = admin.firestore();
const interDoc = db.collection('users').doc(context.params.usersId);
interDoc.get().then(doc => {
if (!doc.exists) {
console.log('No such document!');
} else {
var getPrediction
getPrediction = async function main2() {
reportFld = doc.data();
reportNarTr = JSON.stringify(reportFld.narrative);
reportTitTr = JSON.stringify(reportFld.title);
reportNumTr = context.params.usersId;
input = {
instances: [
[reportNumTr, reportTitTr, reportNarTr]
]
};
var accessToken = await getAccessTokenId();
var endpointhazCat = 'https://ml.googleapis.com/v1/projects/projectid/models/hazcat:predict?access_token=' + accessToken;
var endpointsigCat = 'https://ml.googleapis.com/v1/projects/projectid/models/sig:predict?access_token=' + accessToken;
var endpointrepCat = 'https://ml.googleapis.com/v1/projects/projectid/models/type:predict?access_token=' + accessToken;
var options1 = {
method: 'POST',
uri: endpointhazCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
var options2 = {
method: 'POST',
uri: endpointsigCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
var options3 = {
method: 'POST',
uri: endpointrepCat,
body: input,
json: true // Automatically stringifies the body to JSON
};
requestProm.post(options1)
.then(function (response) {
result = response['predictions'];
switch (parseInt(result)) {
case 0:
predictedHaz = 'A';
break;
case 1:
predictedHaz = 'B';
break;
case 2:
predictedHaz = 'C';
break;
case 3:
predictedHaz = 'D';
break;
case 4:
predictedHaz = 'E';
break;
case 5:
predictedHaz = 'F';
break;
case 6:
predictedHaz = 'G';
break;
default:
predictedHaz = 'error';
}
const predictedHazData = {
HazardCategory: predictedHaz,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedHazData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
requestProm.post(options2)
.then(function (response) {
result = response['predictions'];
if (parseInt(result) > -4) {
predictedSig = 'Sig';
} else predictedSig = 'Insig'
const predictedSigData = {
SignifanceCategory: predictedSig,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedSigData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
requestProm.post(options3)
.then(function (response) {
result = response['predictions'];
if (parseInt(result) === 1) {
predictedRep = 'Inc';
} else predictedRep = 'Haz'
const predictedRepData = {
ReportCategory: predictedRep,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedRepData);
console.log(response);
return true
})
.catch(function (err) {
console.log('Failed', err)
});
return true
}
getPrediction().catch(console.error);
} return null
})
.catch(err => {
console.log('Error getting document', err);
});
return true;
});
Added some details:
These are the service account permissions:
ml.jobs.cancel
ml.jobs.create
ml.jobs.get
ml.jobs.getIamPolicy
ml.jobs.list
ml.jobs.update
ml.locations.get
ml.locations.list
ml.models.create
ml.models.delete
ml.models.get
ml.models.getIamPolicy
ml.models.list
ml.models.predict
ml.models.update
ml.operations.cancel
ml.operations.get
ml.operations.list
ml.projects.getConfig
ml.studies.create
ml.studies.delete
ml.studies.get
ml.studies.getIamPolicy
ml.studies.list
ml.trials.create
ml.trials.delete
ml.trials.get
ml.trials.list
ml.trials.update
ml.versions.create
ml.versions.delete
ml.versions.get
ml.versions.list
ml.versions.predict
ml.versions.update
resourcemanager.projects.get
I have tried to use other node library 'googleapis' on debug console :
google.auth.getApplicationDefault((err, authClient, projectId) => {
if (err) {
console.log('Authentication failed because of ', err);
res.status(401).send('Authentication failed');
} else {
// create the full model name which includes the project ID
const modelName = 'projects/ml-project-id/models/hazcat';
const mlRequestJson = {
'auth': authClient,
'name': modelName,
'resource': { instances: [['RepNum', 'RepTit', 'RepNar']]
}
}
ml.projects.predict(mlRequestJson, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
}
});
}
});
and the result is:
3
and deployed to firebase:
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const admin = require('firebase-admin');
const request = require("request");
const requestProm = require("request-promise");
const functions = require('firebase-functions');
const { GoogleAuth } = require('google-auth-library');
const { google } = require('googleapis');
const ml = google.ml('v1');
admin.initializeApp();
var reportFld, reportNarTr, reportTitTr, reportNumTr, reportTitStr, reportNarStr;
var input, input2, input3;
var result, predictedHaz, predictedSig, predictedRep, setDoc
exports.predictReport = functions.firestore
.document('/users/{usersId}')
.onCreate((change, context) => {
const db = admin.firestore();
const interDoc = db.collection('users').doc(context.params.usersId);
interDoc.get().then(doc => {
if (!doc.exists) {
console.log('No such document!');
} else {
reportFld = doc.data();
reportNarTr = JSON.stringify(reportFld.narrative);
reportTitTr = JSON.stringify(reportFld.title);
reportNumTr = context.params.usersId;
input = {
instances: [
[reportNumTr, reportTitTr, reportNarTr]
]
};
var result1, result2, result3
google.auth.getApplicationDefault((err, authClient, projectId) => {
if (err) {
console.log('Authentication failed because of ', err);
res.status(401).send('Authentication failed');
} else {
const modelName = 'projects/ml-project-id/models/hazcat';
const modelName2 = 'projects/ml-project-id/models/sig';
const modelName3 = 'projects/ml-project-id/models/type';
const mlRequestJson1 = {
'auth': authClient,
'name': modelName,
'resource': input
}
const mlRequestJson2 = {
'auth': authClient,
'name': modelName2,
'resource': input
}
const mlRequestJson3 = {
'auth': authClient,
'name': modelName3,
'resource': input
}
var result1, result2, result3
ml.projects.predict(mlRequestJson1, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result1 = result.data.predictions[0];
switch (parseInt(result1)) {
case 0:
predictedHaz = 'A';
break;
case 1:
predictedHaz = 'B';
break;
case 2:
predictedHaz = 'C';
break;
case 3:
predictedHaz = 'D';
break;
case 4:
predictedHaz = 'E';
break;
case 5:
predictedHaz = 'F';
break;
case 6:
predictedHaz = 'G';
break;
default:
predictedHaz = 'error';
}
const predictedHazData = {
HazardCategory: predictedHaz,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedHazData);
}
}); // endof predict1
ml.projects.predict(mlRequestJson2, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result2 = result.data.predictions[0];
if (parseInt(result2) > -4) {
predictedSig = 'Sig';
} else predictedSig = 'Insig'
const predictedSigData = {
SignifanceCategory: predictedSig,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedSigData);
}
});// endof predict2
ml.projects.predict(mlRequestJson3, (err, result) => {
if (err) {
console.log(err);
} else {
console.log(result.data.predictions[0]);
result3 = result.data.predictions[0];
if (parseInt(result3) === 1) {
predictedRep = 'Inc';
} else predictedRep = 'Haz'
const predictedRepData = {
ReportCategory: predictedRep,
};
setDoc = db.collection('users').doc(context.params.usersId).update(predictedRepData);
}
});// endof predict3
}//endof else getappdefault
});//endof getappdefault
} return true
})//endof getdocument
.catch(err => {
console.log('Error getting document', err);
});
return true;
});//endof onCreate
and the result is
Authentication failed because of Error: Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.
at AuthPlus.getApplicationDefaultAsync (/srv/node_modules/googleapis-common/node_modules/google-auth-library/build/src/auth/googleauth.js:156:23)
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:229:7)
Added details (update 2)
I have used keyfile pointing to service account json file.
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
keyFile: 'projectid.json',
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
got access token like this
ya29.c.xxxx
and the permission denied
Failed { StatusCodeError: 403 - {"error":{"code":403,"message":"Access to model denied.","status":"PERMISSION_DENIED"}}
Added details (update 3)
I'm using my personal credentials to enter the ML model works inside cloud function
getAccessTokenId = async function main() {
const auth = new GoogleAuth({
keyFile: 'application_default_credentials.json',
scopes: 'https://www.googleapis.com/auth/cloud-platform'
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const accessTokenId = await auth.getAccessToken();
return accessTokenId
}
Get the result
{ predictions: [ 3 ] }
I also have added Service Account Token Creator role on the service account but still not work using service account to access ML Model
iam.serviceAccountKeys.create
iam.serviceAccountKeys.delete
iam.serviceAccountKeys.get
iam.serviceAccountKeys.list
But curious why it doesnt have these
iam.serviceAccounts.getAccessToken
iam.serviceAccounts.signBlob
iam.serviceAccounts.signJwt
iam.serviceAccounts.implicitDelegation
iam.serviceAccounts.getOpenIdToken

Error: read ECONNRESET when working with large data in Firebase Cloud functions

I perform the following task, during the registration of users for the first few months we did not save images of users in Firebase Cloud Storage and took a link that was received from Facebook. Now faced with the problem that some links to images have become expired. Because of this, I decided to make the cloud function and run it once as a script, so that it went through to users who have only one link to the image (which means that this is the first link received from facebook), take the facebook user id and request current profile image. I got a json file with the given users from Firebase, then I get links for each user separately, if the user is deleted then I process this error in a separate catch so that it does not stop the work of other promises. But after running this cloud function, I ran into this error because of this, for almost all users this operation was not successful. Even I increased the memory size in cloud function to 2 gigabytes. Please tell me how it can be fixed?
{ Error: read ECONNRESET
at exports._errnoException (util.js:1018:11)
at TLSWrap.onread (net.js:568:26) code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
My function
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const account_file = require('../account_file.json');
var FB = require('fb');
const path = require('path');
const imageDownloader = require('image-downloader');
const os = require('os');
const shortid = require('shortid');
const imageManager = require('../../lib/Images/image-manager.js');
module.exports = functions.https.onRequest((req, res) => {
const token = req.header('X-Auth-Token');
var errorsCount = 0;
return admin.auth().verifyIdToken(token)
.then(function(decodedToken) {
const adminID = decodedToken.uid;
console.log('adminID is', adminID);
const users = account_file['users'];
var fixPhotoPromises = [];
users.forEach(function(user) {
const userID = user['localId'];
const fixPhotoPromise = fixPhoto(userID).catch(error => {
console.log(error);
errorsCount += 1;
});
fixPhotoPromises.push(fixPhotoPromise);
});
return Promise.all(fixPhotoPromises);
}).then(results => {
console.log('results.length', results.length, 'errorsCount', errorsCount);
console.log('success all operations');
const successJSON = {};
successJSON["message"] = "Success operation";
return res.status(200).send(successJSON);
}).catch(error => {
console.log(error);
const errorJSON = {};
errorJSON["error"] = error;
return res.status(error.code).send(errorJSON);
});
});
function fixPhoto(userID) {
var authUser = {};
var filename = '';
return new Promise((resolve, reject) => {
return admin.auth().getUser(userID)
.then(userModel => {
const user = userModel.toJSON();
const facebookID = user['providerData'][0]['uid'];
const userID = user['uid'];
authUser = {'userID' : userID, 'facebookID' : facebookID};
const userImagesPromise = admin.database().ref()
.child('userImages')
.child(userID)
.once('value');
return Promise.all([userImagesPromise])
}).then(results => {
const userImagesSnap = results[0];
if (userImagesSnap.val() !== null && userImagesSnap.val() !== undefined) {
const userProfileImagesDict = userImagesSnap.val()['userProfileImages'];
const keys = Object.keys(userProfileImagesDict);
var userProfileImages = [];
keys.forEach(function(key){
const userProfileImage = userProfileImagesDict[key];
userProfileImages.push(userProfileImage);
});
if (userProfileImages.length > 1) {
const status = 'user has more than one image';
return resolve(status);
}
}
const facebookAppID = functions.config().facebook.appid;
const facebookAppSecret = functions.config().facebook.appsecret;
const facebookAccessPromise = FB.api('oauth/access_token', {
client_id: facebookAppID,
client_secret: facebookAppSecret,
grant_type: 'client_credentials'
});
return Promise.all([facebookAccessPromise]);
}).then(results => {
const facebookResult = results[0];
const facebookAccessToken = facebookResult['access_token'];
const profileImageURL = 'https://graph.facebook.com/' + authUser.facebookID + '/picture?width=9999&access_token=' + facebookAccessToken;
const shortID = shortid.generate() + shortid.generate() + shortid.generate();
filename = shortID + ".jpg";
const tempLocalFile = path.join(os.tmpdir(), filename);
const options = {
url: profileImageURL,
dest: tempLocalFile // Save to /path/to/dest/image.jpg
};
const imageDownloaderPromise = imageDownloader.image(options);
return Promise.all([imageDownloaderPromise])
}).then(results => {
const imageDownloaderResult = results[0];
const userID = authUser.userID;
const localImagePath = imageDownloaderResult['filename'];
const imageManagerPromise = imageManager.saveUserImageToCloudStorage(localImagePath, filename, userID);
return Promise.all([imageManagerPromise]);
}).then(results => {
const result = results[0];
return resolve(result);
}).catch(function(error) {
reject(error)
})
});
}
exports.saveUserImageToCloudStorage = function saveUserImageToCloudStorage(localImagePath, filename, userID) {
const bucketName = functions.config().googlecloud.defaultbacketname;
const bucket = gcs.bucket(bucketName);
const profileImagePath = path.normalize(path.join('userImages', userID, 'profileImages', filename));
const profileImageFile = bucket.file(profileImagePath);
return new Promise((resolve, reject) => {
bucket.upload(localImagePath, {destination: profileImagePath})
.then(() => {
const config = {
action: 'read',
expires: '03-01-2500'
};
const userRefPromise = admin.database().ref()
.child('users')
.child(userID)
.once('value');
return Promise.all([profileImageFile.getSignedUrl(config), userRefPromise])
}).then(function(results) {
const url = results[0][0];
const userSnap = results[1];
if (userSnap.val() === null || userSnap.val() === undefined) {
return resolve('user was deleted from database');
}
const userModel = userSnap.val();
const userCheckID = userModel['id'];
if (userCheckID !== userID) {
return reject("WARNING userCheckID !== userID");
}
// save to database
const userImagesRef = admin.database().ref().child('userImages')
.child(userID)
.child('userProfileImages')
.push();
const timeStamp = timestamp.now();
const imageModelID = userImagesRef.key;
const userImagesRefPromise = userImagesRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
const userRef = admin.database().ref()
.child('users')
.child(userID)
.child('currentProfileImage');
const userRefPromise = userRef.update({
'path': url,
'id': imageModelID,
'fileName': filename,
'timeStamp': timeStamp
});
return Promise.all([userImagesRefPromise, userRefPromise]);
}).then(() => {
const successJSON = {};
successJSON["message"] = "Success operation";
return resolve(successJSON);
}).catch(function(error) {
return reject(error);
});
});
};
I added this code when init google cloud storage and I did not have this error anymore.
var gcs = require('#google-cloud/storage')({keyFilename: "service-account-credentials.json"});
gcs.interceptors.push({
request: function(reqOpts) {
reqOpts.forever = false
return reqOpts
}
});

Resources