Get qldb ledger table data in javascript object - node.js

So my issue is simple yet challenging. I am trying to transfer data from one ledger to another. Now for that, I am reading a whole table of data then creating an object for every document than inserting these documents one by one into the other ledger table
Ledger#1 Table1 -> get all data -> convert all data to array of objects -> transfer to Ledger#2 Table1 one by one
The problem is that I cannot create the object from the document. I do this manually by using prototype functions and reading field type and then creating a thing that is messy and causes some data to become null. So I was wondering if there was a better way that is less prone to errors.
I asked a question of migrating ledger but had no luck in getting any response. Please help me in this.
Following is my code. Please copy and paste it inside an IDE so you can better understand
const getValueOfField = (field) => {
const name = field.getType().name;
switch (name) {
case "string":
return field.stringValue();
case "int":
return field.numberValue();
case "null":
return null;
default:
return null;
}
};
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
const resultList = result.getResultList();
let completeResults = [];
for (const doc of resultList) {
let newDoc = {};
const fields = doc.fields();
for (const field of fields) {
newDoc[field[0]] = getValueOfField(field[1]);
}
completeResults.push(newDoc);
}
return completeResults;
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();

So apparently I could've done this easily. I was just checking it and figured out the solution.
I can insert the whole fetched document and it will be same so my converted code is as follows
const { awsMainFunction: awsProd } = require("./awsProdConfig");
const { awsMainFunction: awsProduction } = require("./awsProductionConfig");
const { tableNamesAndIndeces: tableName, checkForErrors } = require("./utils");
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
return result.getResultList();
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();

Related

firebase document update trigger function error

I am trying to stream insert the data into the Bigquery table and have the below issues with function. Not sure where is an error in the code. I have followed this to achieve with realtime data.
https://github.com/googleapis/nodejs-bigquery/blob/main/samples/insertRowsAsStream.js
Function returned undefined, expected Promise or value
const functions = require("firebase-functions");
const {BigQuery} = require("#google-cloud/bigquery");
exports.onWriteTrigger = functions
.firestore
.document("leaseCompany/{documentId}")
.onWrite((change, context) => {
/*
onCreate: google.firestore.document.create
onUpdate: google.firestore.document.update
onDelete: google.firestore.document.delete
*/
const row = {
// insertId: document.data.id,
// json: {
timestamp: context.timestamp,
name: change.after.data().name,
// documentName: context.resource.name,
documentId: change.after.id,
eventId: context.eventId,
data: change.after.data().country,
};
// console.log(insertRows);
async function insertBigQuery(rows) {
try {
console.log(row);
const datasetName = "firestore_export";
const tableName = "leaseCompany";
const bigqueryClient = new BigQuery();
const ds = bigqueryClient.dataset(datasetName);
const tbl = ds.table(tableName);
await tbl.insert(rows)
.then((data) => {
return true;
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === "PartialFailureError") {
console.log("Error Sending Notifications", err);
return false;
}
});
} catch (err) {
console.error(`table.insert: ${JSON.stringify(err)}`);
return err;
}
}
// console.log(row);
insertBigQuery(row);
});
you need to return promise or value from you function
async function insertBigQuery(rows) {
try {
console.log(row);
const datasetName = "firestore_export";
const tableName = "leaseCompany";
const bigqueryClient = new BigQuery();
const ds = bigqueryClient.dataset(datasetName);
const tbl = ds.table(tableName);
return tbl.insert(rows)
.then((data) => {
return true;
})
.catch((err) => {
// An API error or partial failure occurred.
if (err.name === "PartialFailureError") {
console.log("Error Sending Notifications", err);
return false;
}
});
} catch (err) {
console.error(`table.insert: ${JSON.stringify(err)}`);
return err;
}
}
return insertBigQuery(row);

Compare data after sequelize asynchronous query

I have this code :
VerificationKey.getCode = async (visitorData, visitorCode, res) => {
console.log("Verif model visitorCode" + visitorCode);
const data = visitorData;
const testCode = visitorCode;
const findVisitor = await VerificationKey.findOne({ where: { data } })
.catch((err) => {
console.log(err);
})
.then(() => {
if (testCode == findVisitor.key) {
res.status(200).json({ response: true });
}
});
};
What I need is to compare testCode and findVisitor.key values.
If they are equal, I want to return a boolean to the front end.
But I can't write it like this because it is not possible to access findVisitor.key before initialization.
I believe you have to change your code to use async/await syntax only - without then and catch:
VerificationKey.getCode = async (visitorData, visitorCode, res) => {
console.log("Verif model visitorCode" + visitorCode);
const data = visitorData;
const testCode = visitorCode;
try {
const findVisitor = await VerificationKey.findOne({ where: { data } });
if(!findVisitor) {
res.status(404).json({ response: false });
} else if(testCode == findVisitor.key) {
res.status(200).json({ response: true });
} else {
res.status(403).json({ response: false });
}
} catch(err) {
console.log(err);
}
};

Is there any way to make this node js look more compact?

I am new to node js, and am wondering if there is any way to make this more compact. I am talking specifically about the nested then catch statements. Is there anyway to try to put this into one big then catch, or just return {res:false} once rather than a bunch of repeated time for every then catch?
const uId = data.uId;
const id = data.id;
const updates = {};
updates["/shared/" + id + "/users/" + uId] = null;
updates["/users/" + uId + "/shared/" + id] = null;
return admin.database().ref().update(updates).then(() => {
let ref = "/shared/" + id
// Check if any members are left
return admin.database().ref(ref).once("value").then((snapshot) => {
var users = snapshot.val().users;
if (users == null) {
admin.database().ref(ref).remove().then(() => {
return {res: true};
}).catch(() => {
return {res: false};
});
} else {
return {res: true};
}
}).catch(() => {
return {res: false};
});
}).catch(() => {
return {res: false};
});
Return the next Promise in the chain instead of nesting them, then have a single .catch at the end:
const ref = "/shared/" + id
return admin.database().ref().update(updates)
.then(() => {
// Check if any members are left
return Promise.all([ref, admin.database().ref(ref).once("value")]);
})
.then(([ref, snapshot]) => {
var users = snapshot.val().users;
if (users == null) {
return admin.database().ref(ref).remove();
}
})
.then(() => ({ res: true }))
.then(() => ({ res: false }));
The Promise.all is needed to pass the value from one .then to another.
Using async/await would make things cleaner:
const ref = "/shared/" + id
try {
await admin.database().ref().update(updates);
const snapshot = await admin.database().ref(ref).once("value");
const { users } = snapshot.val();
if (users == null) {
await admin.database().ref(ref).remove();
}
return { res: true };
} catch (e) {
return { res: false };
}

Cannot return array if item not present in dynamodb

I have a function that will take an array of jobs as a parameter in it. This function will check the existence of each job in the database through its id.
If a job is not to present in the database, that particular job needs to be pushed into an array called latestJobs. I'm calling this function in my main.js file. But the code breaks and stops.
Below is my main.js code:
module.exports.app = async () => {
try {
...
const jobs = await getJobsForCountries(body);
const latestJobs = await filterPreDraftedJobs(jobs);
console.log('latestJobs', latestJobs);
} catch (e) {
console.error('Error:- ', e); // Comes to here
}
};
My checker function looks like:
module.exports = async (jobs) => {
let latestJobs = [];
for (const job of jobs) {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
};
await dynamoDb.get(params, (err, data) => {
if (err) {
latestJobs.push(job);
console.log('Job not found in DB');
}
}).promise();
}
return latestJobs;
};
How can I fix this issue? I want the latestJobs which will not present in the database. Is there a function for dynamodb which can do this for me?
You are mixing callback, promise and await style. I would do it like this
module.exports = async (jobs) => {
let latestJobs = [];
for (const job of jobs) {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
};
try {
const result = await dynamoDb.get(params).promise();
if (result) {
return;
}
} catch(err) {
latestJobs.push(job);
}
}
return latestJobs;
};
Also, make sure that table is created and the region and name you are passing is correct.
I am not much familiar with dynamoDB but looking at the above conversation code must be something like this. I have tried to improve performance and making sure the code is modular and readable.
async function addUpdateJobs(jobs)
{
let paramsArray = [];
for (const job of jobs)
{
const jobParams = {
params:{
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: job.Id
}
},
job:job
};
paramsArray.push(jobParams );
}
return await this.getJobs(paramsArray);
}
function getJobs(paramsArray)
{
let latestJobs = [];
paramsArray.each(async (jobParam)=>
{
try
{
const result = await dynamoDb.get(jobParam.params).promise();
if (result)
{
return;
}
} catch (err)
{
latestJobs.push(jobParam.job);
}
});
return latestJobs;
}
PS: Also I was gonig through error handling in amazondynamodb.

Bulk update to Postgres with node js performance issue

I'm facing performance issue while trying to do bulk update in PostgresDB. It's taking more than 180 seconds to update around 23000 records. PFB the code. I'm using pg-promise library. Is there anything I could do to improve the performance?
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
export async function getTransactionDetails(): Promise<any> {
return new Promise<any>(async function (resolve, reject) {
try {
let db = pgp(postgresDBConfig);
db.connect();
let query = "SELECT * FROM table_name";
db.any(query)
.then(data => {
console.log("Executed successfully::");
resolve(data);
})
.catch(error => {
console.log('ERROR:', error);
})
} catch (error) {
log.error("Error::" + error);
throw error;
}
});
}
export async function updateStatus(result: any, status: string) {
try {
let db = pgp(postgresDBConfig);
//db.connect();
let updateData = [];
_.forEach(result, function (row) {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log("updateData::" + updateData.length);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
let columnset = new pgp.helpers.ColumnSet(['?sessionid', 'status'], { table: tableName });
let update = pgp.helpers.update(updateData, columnset);
db.none(update).then(() => {
console.log("Updated successfully");
})
.catch(error => {
console.log("Error updating the status" + error);
});
}
catch (error) {
log.error("Error in function updateStatus::" + error);
throw error;
}
}
The code exhibits problems all over the place
You should initialize the database object only once
You should not use db.connect() at all, which you also use incorrectly for the async code
You again incorrectly use async block, skipping await, so it doesn't execute correctly.
You do not append any UPDATE logic clause, so it is updating everything all over again, unconditionally, which may be resulting in a delayed mess that you're in.
Here's an improved example, though it may need some more work from your side...
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
const db = pgp(postgresDBConfig);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
const columnSet = new pgp.helpers.ColumnSet(['?sessionid', 'status'], {table: tableName});
export async function getTransactionDetails(): Promise<any> {
try {
const res = await db.any('SELECT * FROM table_name');
console.log('Executed successfully::');
return res;
} catch (error) {
console.log('ERROR:', error);
throw error;
}
}
export async function updateStatus(result: any, status: string) {
try {
let updateData = [];
_.forEach(result, row => {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log('updateData::', updateData.length);
const update = pgp.helpers.update(updateData, columnSet) +
' WHERE v.sessionid = t.sessionid';
await db.none(update);
console.log('Updated successfully');
}
catch (error) {
console.log('Error in function updateStatus:', error);
throw error;
}
}

Resources