How to test Hbase Get using Mocha/Node.js - node.js

Hi guys I have a job that has a method that extracts data from HBase given a key in Node.js, something like follows:
findHbaseData(field1, field2) {
const key = generateKey(field1, field2);
const data = hbase.get(key);
data['field3'] = data.field3.toUpperCase;
return data
}
The Hbase.get works like follows:
const HBaseRestClient = require('hbase');
const {Connection} = HBaseRestClient
this.client = HBaseRestClient(
this.confHbase // Here I have table name, host and port
);
this.table = this.confHbase.table;
async get(id, columns) {
let schema = this.getSchema(columns)
return await new Promise( (res, rej) => {
const row = this.client.table(this.table).row(id);
if (row) {
let convert = (err, values, response) => {
if (err) {
rej(err);
return;
}
const innerModel = Object.create(this.model);
const result = this.convertRow(values, innerModel, schema, columns);
res(result);
}
if (columns && columns.length) {
let hbaseColumns = this.schemaToHbaseColumns(columns)
row.get(hbaseColumns, convert);
} else {
row.get(convert);
}
}
} );
}
I need to test this method (findHbaseData) using mocha or chai but my environment test (Jenkins) can't access the Hbase, it's possible to use mocha to simulate this access and return fake data, instead to run the real hBase.get(key)?
Thanks!

I just discovered the answer.
I could create a HBaseRestClient.prototype.get(id,columns) and read the data from a json file.

Related

Getting Error [Cannot read properties of undefined (reading 'generatetypeinfo')] in Node JS API post method

I am new to Restful API development using NodeJS and SQL Server. I am trying to do a simple [post] operation where I am passing an array of objects to the API endpoint and then calling a SQL Server procedure with a table valued parameter. I am getting the below error
Cannot read properties of undefined (reading 'generateTypeInfo')
I was really shocked to see that there is not a single help topic found over Google regarding this error. I do not want to learn ASP.NET Core for this because JavaScript has an easy learning curve. Am I doing a mistake by developing a Rest API by using the combination of NodeJS and SQL Server? Below is my Related .JS file called in Post endpoint
const sql = require("mssql/msnodesqlv8");
const dataAccess = require("../DataAccess");
const fn_CreateProd = async function (product) {
let errmsg = "";
let connPool = null;
await sql
.connect(global.config)
.then((pool) => {
global.connPool = pool;
result = pool.request().query("select * from products where 1=2");
return result;
})
.then((retResult) => {
const srcTable = retResult.recordset.toTable("tvp_products");
let newsrcTable = Array.from(srcTable.columns);
console.log('Source table b4 mapping',srcTable)
newsrcTable = newsrcTable.map((i) => {
i.name = i.name.toUpperCase();
return i;
});
console.log('Source table after convert array with mapping',newsrcTable)
const prdTable = dataAccess.generateTable(
newsrcTable,
product,
"tvp_products"
);
console.log("Prepared TVp data", prdTable);
const newResult = dataAccess.execute(`sp3s_ins_products_tvp`, [
{ name: "tblprods", value: prdTable },
]);
console.log("Result of Execute Final procedure", newResult);
return newResult;
})
.then(result => {
console.log("Result of proc", result);
if (!result.errmsg) errmsg = "Products Inserted successfully";
else errmsg = result.errmsg;
})
.catch((err) => {
console.log("Enter catch of Posting prod", err.message);
errmsg = err.message;
})
.finally((resp) => {
sql.close();
});
return { retStatus: errmsg };
};
module.exports = fn_CreateProd;
and Content of Generatetable function are as below :
const generateTable = (columns, entities,tvpName) => {
const table = new mssql.Table(tvpName);
// const testobj = {type : [sql.numeric],name : 'Sanjay'}
// console.log('Columns testobj',testobj.type)
columns.forEach(column => {
// console.log('COlumn data for COlumn :',column)
if (column && typeof column === 'object' && column.name && column.type) {
let colOptions = {}
if (column.type==mssql.Numeric)
{
colOptions.scale=column.scale
colOptions.precision=column.precision
}
else
if (column.type==mssql.VarChar || column.type==mssql.Char )
{
colOptions.length = column.length
}
// console.log (`Column name type for column :${column.name} -${colType}-Actual :${column['type']}`)
if (column.hasOwnProperty('options')) {
table.columns.add(column.name.toUpperCase(), colType,column.options);
} else {
table.columns.add(column.name.toUpperCase(),colOptions)
}
}
});
console.log('Generated table',table)
const newEntities = entities.map(obj=>keystoUppercase(obj))
// console.log('New entities after uppercase',newEntities)
newEntities.forEach(entity => {
table.rows.add(...columns.map(i =>
entity[i.name]));
});
return table;
};
I have found the solution now. Actually, if you can see the code of generateTable function, I was adding the columns into the table but not mentioning the data type of the columns due to which this error was coming. I have added one more property [type] in the [colOptions] object being passed to columns.add command in the function [Generatetable]. Thanks a lot anyway to you for quick replies by Dale. K.

Caching in NODEJS

I am trying to cache the results of the Twitter query as they have rate limiting and I cannot seem to figure it out.
I am following this tutorial here.
The Cache.js file looks like so (taken from tutorial):
import NodeCache from 'node-cache';
class Cache {
constructor(ttlSeconds) {
this.cache = new NodeCache({ stdTTL: ttlSeconds, checkperiod: ttlSeconds * 0.2, useClones: false });
}
get(key, storeFunction) {
const value = this.cache.get(key);
if (value) {
return Promise.resolve(value);
}
return storeFunction().then((result) => {
this.cache.set(key, result);
return result;
});
}
del(keys) {
this.cache.del(keys);
}
delStartWith(startStr = '') {
if (!startStr) {
return;
}
const keys = this.cache.keys();
for (const key of keys) {
if (key.indexOf(startStr) === 0) {
this.del(key);
}
}
}
flush() {
this.cache.flushAll();
}
}
export default Cache;
My Twitter query file looks like so :
import TwitterCount from "../models/TwitterModel.js";
import { TwitterApi } from 'twitter-api-v2';
import dotenv from "dotenv";
dotenv.config();
import CacheService from '../middleware/Cache.js';
const twitterClient = new TwitterApi(process.env.TWITTER_API_BEARER_TOKEN);
const readOnlyClient = twitterClient.readOnly;
const ttl = 60 * 60 * 1; //cache for 1 hour
const cache = new CacheService(ttl);
export const getCount = async(req, res) => {
try {
const twit = await TwitterCount.findOne({
attributes:['followersCount']
});
const key = `Twit_${String(twit.dataValues.followersCount)}`;
const twitterFollowers = await readOnlyClient.v2.followers('1563787278857785350'); //HejOfficial account Twitter
const results = cache.get( key , () => twitterFollowers.data.length );
if (twit.dataValues.followersCount === results) {
console.log('same results');
} else {
await TwitterCount.create({
followersCount: results
});
console.log("Twitter Data added Successful");
}
res.json(twit);
cache.del(key);
} catch (error) {
console.log(error);
}
}
I presume that I am not using the key properly. Please assist if you have encountered this issue before, or provide a better caching alternative.
Thank you.
Try to use the account id as the key, rather than follower count. This way you are caching the accounts followers for an hour.
Something like this should get you going with the cache, it doesn't have the database inserts, but they can be handled now in the getCount method.
export const getCount = async(req, res) => {
const result = await getCountFromAPI("1563787278857785350") //HejOfficial account Twitter
res.json(result);
};
const getCountFromAPI = async(account) => {
const key = `Twit_${account}`;
return cache.get(key, async() => {
const twitterFollowers = await readOnlyClient.v2.followers(account);
return twitterFollowers;
});
};
Note that if you save the results to your database every time, it defeats the purpose of the cache. I'd suggest only using the API for this data, and if you want to use database as well, maybe the cache is unnessesary? Save the follower count and account id with time-updated value, and if time updated is more than one hour ago, then query the value again from Twitter.

What is the best way to execute various queries on request object in mongoose

I didn't know what would be the best title to describe my problem statement here. I went through an online course where the instructor chained methods on the request object to carry out different queries.
I do not know (the code is below) if this is the best way to do it. Honestly, I am having trouble understanding it completely.
The following is the code to display all tours in the website:
const Tour = require("../models/tourModel");
class APIFeatures {
constructor(query, queryString) {
this.query = query;
this.queryString = queryString;
}
filter() {
const queryObj = { ...this.queryString };
const excludedFields = ["page", "sort", "limit", "fields"];
excludedFields.forEach((el) => delete queryObj[el]);
// 1B) Advanced filtering
let queryStr = JSON.stringify(queryObj);
queryStr = queryStr.replace(/\b(gte|gt|lte|lt)\b/g, (match) => `$${match}`);
this.query = this.query.find(JSON.parse(queryStr));
// console.log(this.query);
return this;
}
sort() {
if (this.queryString.sort) {
const sortBy = this.queryString.sort.split(",").join(" ");
this.query = this.query.sort(sortBy);
} else {
this.query = this.query.sort("-createdAt");
}
return this;
}
paginate() {
const limit = 2;
const skip = 2;
this.query = this.query.skip(skip).limit(limit);
return this;
}
}
The controller function to get all the tours is as follows:
const getAllTours = async(req,res) => {
try{
const features = new APIFeatures(Tour.find(), req.query).sort().paginate();
const tours = await features.query;
// ... return the response
}
catch(err){
// do something if err
}
I don't understand why the instructor passed Tour.find() as a parameter in
const features = new APIFeatures(Tour.find(), req.query).sort().paginate();
In the filter() method there is already
this.query = this.query.find(JSON.parse(queryStr));
Would'nt this result in Tour.find().find()?
Is it necessary to define a class and chain the methods like this on the req object? Are there any better ways?

Fetch API Doesn't send data on first call - NestJs

I have an API in NestJs which is not sending data on the first hit. However, on hitting it again it sends the desired data. I am guessing the API returns before the internal processing is done.
How to stop this. Is sleep a good option for this?
Or is there any other way to do this?
#Post("load")
#UseGuards(AuthGuard("jwt"))
async load(#Req() body: any)
{
const organizationId = body.user.organizationId;
const userId = body.user.userId;
if ("brandIds" in body.body)
{
await this.userService.onBoardUser(userId);
}
var settings = await this.settingsService.fetchLayout(organizationId, "home");
settings.forEach(async (element) =>
{
var parsedElement = JSON.parse(JSON.stringify(element));
var innerContent = await this.fetchContent(parsedElement.method, organizationId, userId);
var template = parsedElement.content[0];
let formattedItem = {};
innerContent.forEach((item) =>
{
try
{
formattedItem = template;
Object.keys(template).forEach((key) =>
{
if (template[key]!= "" && key != "type")
{
formattedItem[key] = eval(template[key]);
}
});
parsedElement.content.push(formattedItem);
formattedItem = null;
}
catch(err)
{
}
});
this.response.data.push(parsedElement);
innerContent = null;
template = null;
formattedItem = null;
parsedElement = null;
});
return(this.response);
}
looks like your main problem here is that your using async/await inside foreach which isnt working.
Use it like this:
for (const setting of settings) {
... your async code here.
}

Node.js Query sqlite with 'sqlite`

I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.

Resources