Jasmine reporter publishing data to prometheus overwrites previous job & metrics - node.js

I am trying to write a reporter in jasmine that pushes data to prometheus. I am able to execute the tests and report them in one go but as soon as I re-execute these tests the existing job with the published metrics just gets overwritten with new data. What I want is to attach metrics to an existing job without removing the data that already exists.
This will allow me to run the specs in parallel as well, which can be done but the prometheus data just keeps being overwritten.
masterReporter.js
const PrometheusUtil = require("../utils/prometheusUtil");
class masterReporter {
async suiteDone(result) {
const prometheusUtil = new PrometheusUtil("http://localhost:9091", process.env.BASEURL);
await prometheusUtil.setTestExecutionMetrics(
{"region": prometheusUtil.region,
"spec_name": result.fullName, "tenant": process.env.BASEURL});
}
}
module.exports = masterReporter;
prometheusUtil.js
const client = require("prom-client");
class PrometheusUtil {
constructor(baseUrl, environment) {
this.environment = environment;
this.baseUrl = baseUrl;
this.name = environment.replace("https://", "").replace(".com", "")
.replaceAll(".", "_").replaceAll("-", "_").replaceAll("/", "");
this.testExecution = new client.Gauge({
name: this.name,
help: "Gauge for test execution results storing region info.",
labelNames: ["region", "tenant", "spec_name", "failure_classification_value"],
});
this.gateway = new client.Pushgateway(baseUrl);
this.setRegion(environment);
}
async setTestExecutionMetrics(value, number) {
try {
this.setRegion(this.environment);
this.testExecution.set(value, number);
this.gateway.pushAdd({jobName: this.name}, function(err, resp, body) {
if (err) {
console.log(err);
}
if (body) {
console.log(body);
}
});
} catch (error) {
console.log(error);
}
}
setRegion(baseUrl) {
if (baseUrl.includes(".us.")) {
this.region = "US";
}
else {
console.log(`Sorry, region could not be defined for ${baseUrl}.`);
}
return this.region;
}
}
module.exports = PrometheusUtil;

Related

Try and catch CATCH ERROR: ERROR STATUS: 403 (What the error is due to?)

I'm trying to deploy a smart contract, but before doing so I'm accessing${basePath}/build/ipfsMetasGeneric/_ipfsMetasResponse.json and picking up the metaData.metadata_uri to assign it to PREREVEAL_TOKEN_URI. It all works well until there (I can see on the terminal the console.log (PREREVEAL_TOKEN_URI)). But the app stop working right after as it catches an error and throws me the message CATCH ERROR: ERROR STATUS: 403.
I'm just quite puzzled since I don't understand why the try code runs all Ok but then throws me that error. I've hit a roadblock, would appreciate any help.
Thank you.
Code follows:
const path = require("path");
const basePath = process.cwd();
const fs = require("fs");
const yesno = require('yesno');
const {
fetchNoRetry,
} = require(`${basePath}/utils/functions/fetchWithRetry.js`);
let {
CHAIN,
GENERIC,
CONTRACT_NAME,
CONTRACT_SYMBOL,
METADATA_UPDATABLE,
ROYALTY_SHARE,
ROYALTY_ADDRESS,
MAX_SUPPLY,
MINT_PRICE,
TOKENS_PER_MINT,
OWNER_ADDRESS,
TREASURY_ADDRESS,
PUBLIC_MINT_START_DATE,
BASE_URI,
PREREVEAL_TOKEN_URI,
PRESALE_MINT_START_DATE,
PRESALE_WHITELISTED_ADDRESSES
} = require(`${basePath}/src/config.js`);
const deployContract = async () => {
const ok = await yesno({
question: `Is all REQUIRED contract information correct in config.js? (y/n):`,
default: null,
});
if(!ok) {
console.log("Exiting...");
process.exit(0);
}
if(GENERIC) {
try {
let jsonFile = fs.readFileSync(`${basePath}/build/ipfsMetasGeneric/_ipfsMetasResponse.json`);
let metaData = JSON.parse(jsonFile);
console.log (metaData);
if(metaData.response === "OK") {
if(!PREREVEAL_TOKEN_URI) {
PREREVEAL_TOKEN_URI = metaData.metadata_uri;
console.log (PREREVEAL_TOKEN_URI);
}
} else {
console.log('There is an issue with the metadata upload. Please check the /build/_ipfsMetasGeneric/_ipfsMetasResponse.json file for more information. Running "npm run upload_metadata" may fix this issue.');
}
} catch (err) {
console.log(`/build/_ipfsMetasGeneric/_ipfsMetasResponse.json file not found. Run "npm run upload_metadata" first.`);
console.log(`Catch: ${err}`);
process.exit(0);
}
} else {
try {
let jsonFile = fs.readFileSync(`${basePath}/build/ipfsMetas/_ipfsMetasResponse.json`);
let metaData = JSON.parse(jsonFile);
if(metaData.response === "OK") {
if(!BASE_URI) {
BASE_URI = metaData.metadata_directory_ipfs_uri;
}
} else {
console.log('There is an issue with the metadata upload. Please check the /build/_ipfsMetas/_ipfsMetasResponse.json file for more information. Running "npm run upload_metadata" may fix this issue.');
}
} catch (err) {
console.log(`/build/_ipfsMetasGeneric/_ipfsMetasResponse.json file not found. Run "npm run upload_metadata" first.`);
process.exit(0);
}
}
if (!fs.existsSync(path.join(`${basePath}/build`, "/contract"))) {
fs.mkdirSync(path.join(`${basePath}/build`, "contract"));
}
try {
const url = `https://api.nftport.xyz/v0/contracts/collections`;
const contract = {
chain: CHAIN.toLowerCase(),
name: CONTRACT_NAME,
symbol: CONTRACT_SYMBOL,
owner_address: OWNER_ADDRESS,
metadata_updatable: METADATA_UPDATABLE,
royalties_share: ROYALTY_SHARE,
royalties_address: ROYALTY_ADDRESS,
max_supply: MAX_SUPPLY,
mint_price: MINT_PRICE,
tokens_per_mint: TOKENS_PER_MINT,
treasury_address: TREASURY_ADDRESS,
public_mint_start_date: PUBLIC_MINT_START_DATE,
presale_mint_start_date: PRESALE_MINT_START_DATE,
base_uri: BASE_URI,
prereveal_token_uri: PREREVEAL_TOKEN_URI,
presale_whitelisted_addresses: PRESALE_WHITELISTED_ADDRESSES
};
const options = {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(contract),
};
const response = await fetchNoRetry(url, options);
fs.writeFileSync(`${basePath}/build/contract/_deployContractResponse.json`, JSON.stringify(response, null, 2));
if(response.response === "OK") {
console.log(`Contract deployment started.`);
} else {
console.log(`Contract deployment failed`);
}
console.log(`Check /build/contract/_deployContractResponse.json for more information. Run "npm run get_contract" to get the contract details.`);
} catch (error) {
console.log(`CATCH: Contract deployment failed`, `ERROR: ${error}`);
}
};
deployContract();

How to log stackdriver entries using stackdriver logging nodejs lib #google-cloud/logging

I am using protractor having Jasmine where I intergrated stackdriver lib #google-cloud/logging. When I run this lib, I get entryId however I dont see logs in stackdriver. I debug the issues and seems like some issues with async-await concepts:
jasmine.getEnv().addReporter({
specDone: function (result) {
if(result.failedExpectations.length){
count++;
}
},
suiteDone: function (result) {
console.log(count);
if (count!=0) {
(async() => {
console.log('1')
await stackdriverLogEntry("Your GCP project", "protractor-log-data", "success: false");
console.log('2')
})()
} else {
(async() => {
console.log('3')
await stackdriverLogEntry("Your GCP project", "protractor-log-data", "success: true");
console.log('4')
})()
};
}
})
---------------------------------------------------------------------------
this.stackdriverLogEntry = async function (projectId, logName, text) {
// Creates a client
const logging = new Logging({ projectId: projectId });
// Selects the log to write to
const log = logging.log(logName);
// The metadata associated with the entry
const metadata = {
resource: { type: 'global' },
};
// Prepares a log entry
const entry = log.entry(metadata, text);
console.log(JSON.stringify(entry));
// Writes the log entry
await log.write(entry);
console.log(`metadata: ${JSON.stringify(metadata)}`);
console.log(`Logged: ${text}`);
};
Any HELP here would be appreciated!!!

NodeJS streams not awaiting async

I have run into an issue when testing NodeJS streams. I can't seem to get my project to wait for the output from the Duplex and Transform streams after running a stream.pipeline, even though it is returning a promise. Perhaps I'm missing something, but I believe that the script should wait for the function to return before continuing. The most important part of the project I'm trying to get working is:
// Message system is a duplex (read/write) stream
export class MessageSystem extends Duplex {
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _read(size: number): void {
var chunk = this.read();
console.log(`Recieved ${chunk}`);
this.push(chunk);
}
public _write(chunk: Message, encoding: string,
callback: (error?: Error | null | undefined, chunk?: Message) => any): void {
if (chunk.data === null) {
callback(new Error("Message.Data is null"));
} else {
callback();
}
}
}
export class SystemStream extends Transform {
public type: MessageType = MessageType.Global;
public data: Array<Message> = new Array<Message>();
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _transform(chunk: Message, encoding: string,
callback: TransformCallback): void {
if (chunk.single && (chunk.type === this.type || chunk.type === MessageType.Global)) {
console.log(`Adding ${chunk}`);
this.data.push(chunk);
chunk = new Message(chunk.data, MessageType.Removed, true);
callback(undefined, chunk); // TODO: Is this correct?
} else if (chunk.type === this.type || chunk.type === MessageType.Global) { // Ours and global
this.data.push(chunk);
callback(undefined, chunk);
} else { // Not ours
callback(undefined, chunk);
}
}
}
export class EngineStream extends SystemStream {
public type: MessageType = MessageType.Engine;
}
export class IOStream extends SystemStream {
public type: MessageType = MessageType.IO;
}
let ms = new MessageSystem();
let es = new EngineStream();
let io = new IOStream();
let pipeline = promisify(Stream.pipeline);
async function start() {
console.log("Running Message System");
console.log("Writing new messages");
ms.write(new Message("Hello"));
ms.write(new Message("world!"));
ms.write(new Message("Engine data", MessageType.Engine));
ms.write(new Message("IO data", MessageType.IO));
ms.write(new Message("Order matters in the pipe, even if Global", MessageType.Global, true));
ms.end(new Message("Final message in the stream"));
console.log("Piping data");
await pipeline(
ms,
es,
io
);
}
Promise.all([start()]).then(() => {
console.log(`Engine Messages to parse: ${es.data.toString()}`);
console.log(`IO Messages to parse: ${io.data.toString()}`);
});
Output should look something like:
Running message system
Writing new messages
Hello
world!
Engine Data
IO Data
Order Matters in the pipe, even if Global
Engine messages to parse: Engine Data
IO messages to parse: IO Data
Any help would be greatly appreciated. Thanks!
Note: I posted this with my other account, and not this one that is my actual account. Apologies for the duplicate.
Edit: I initially had the repo private, but have made it public to help clarify the answer. More usage can be found on the feature/inital_system branch. It can be run with npm start when checked out.
Edit: I've put my custom streams here for verbosity. I think I'm on a better track than before, but now getting a "null" object recieved down the pipeline.
As the documentation states, stream.pipeline is callback-based doesn't return a promise.
It has custom promisified version that can be accessed with util.promisify:
const pipeline = util.promisify(stream.pipeline);
...
await pipeline(...);
After some work of the past couple of days, I've found my answer. The issue was my implementation of the Duplex stream. I have since changed the MessageSystem to be a Transform stream to be easier to manage and work with.
Here is the product:
export class MessageSystem extends Transform {
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _transform(chunk: Message, encoding: string,
callback: TransformCallback): void {
try {
let output: string = chunk.toString();
callback(undefined, output);
} catch (err) {
callback(err);
}
}
}
Thank you to #estus for the quick reply and check. Again, I find my answer in the API all along!
An archived repository of my findings can be found in this repository.

NodeJS: how to implement repository pattern

I would like to implement the Repository pattern in my NodeJS app, but I'm running into troubles with circular requires (I guess...).
How I'm trying to implement it:
PersonRepository class with methods: getAll, getById, create, update, delete
Person class with methods: init, createAccount, showRelations, addRelation,
First of all: Is my repository pattern design correct?
My classes:
personRepository.js
const PersonModel = require('./model');
const Person = require('./person');
class PersonRepository {
constructor() {
this._persons = new Set();
}
getAll( cb ) { // To Do: convert to promise
let results = new Set();
PersonModel.find({}, 'firstName lastName', (err, people) => {
if (err) {
console.error(err);
}
people.forEach((person, index) => {
let foundPerson = new Person(person._id.toString(), person.firstName, person.lastName, person.email, person.birthday);
results.add(foundPerson);
});
this._persons = results;
if (cb) cb(this._persons);
});
}
getById(id) {
return PersonModel.findOne({ _id: id });
}
getByEmail(email) {
throw new Error("Method not implemented");
}
create( person ) {
throw new Error("Method not implemented");
}
update ( person ) {
throw new Error("Method not implemented");
}
delete ( person ) {
throw new Error("Method not implemented");
}
}
module.exports = new PersonRepository();
person.js
const PersonModel = require('./model');
const personRepository = require('./personRepository');
class Person {
constructor(personId, first, last, email, birthday) {
this._id = personId ? personId : undefined;
this._firstName = first ? first : undefined;
this._lastName = last ? last : undefined;
this._email = email ? email : undefined;
this._birthday = birthday ? new Date(birthday) : undefined;
this._relations = new Map();
}
init() { // Get all data from database
personRepository.getById(this._id)
.then(console.log)
.catch(console.error);
}
}
module.exports = Person;
tests.js
console.log("--- GET ALL : results--- ");
personRepository.getAll( (persons) => {
for (let person of persons) {
person.loadAllData()
.then(() => {
console.log(person);
})
.catch((e) => {
console.log(e);
});
}
});
console.log("--- INIT : results--- ");
var personInit = new Person("59c18a9029ef510012312995");
console.log("before init");
console.log(personInit);
personInit.init();
console.log("after init");
console.log(personInit);
Problem:
When running the "Get all" test (without the INIT tests), it works.
When I add the INIT tests, I get the error:
personRepository.getById(this._id)
^
TypeError: personRepository.getById is not a function
at Person.init
How can I prevent this from happening?
- Change the way I require my modules?
- Change my design? (eg. don't require Person class in personRepository and just create a Set of ids in "getAll" instead of a Set of persons)
- Other ideas?
Thanks for helping me! I'm trying to solve this for hours now...
Solved it myself. The problem was a circular dependency between the 2 modules. Problem is fixed by moving the requires after the module.exports.
Reference: https://coderwall.com/p/myzvmg/circular-dependencies-in-node-js

Azure device twin add new reported property

I am following the azure device twin tutorial https://learn.microsoft.com/en-us/azure/iot-hub/iot-hub-csharp-node-twin-how-to-configure
I can make this work to update an existing reported property. What I am not sure about is how can I add a new reported property.
Specifically the code looks snippet looks like:
var currentTelemetryConfig = twin.properties.reported.telemetryConfig;
currentTelemetryConfig.pendingConfig =
twin.properties.desired.telemetryConfig;
currentTelemetryConfig.status = "Pending";
telemetryConfig: currentTelemetryConfig
var patch = {
telemetryConfig: currentTelemetryConfig
};
twin.properties.reported.update(patch, function(err) {
if (err) {
console.log(err);
} else {
console.log('success');
}
}
I can easily understand how this works for the existing property update (in this case the telemetryConfig) but what would it look like if the change I was trying to make was to a entirely new property?
How would it work if I decide at some point the I want a a new desired property called "favourite_colour" : "blue"?
In the azure backend I can add this but how do I dynamically build the var patch variable?
I tried this but it returned an error:
twin.properties.reported.update(twin.properties.desired, function(err) {
if (err) {
console.log('Could not report properties');
} else {
console.log('Success');
}
});
This is what my twin looks like:
"properties": {
"desired": {
"active": true,
"pws": "xyz",
"$metadata": {
"$lastUpdated": "2018-03-27T18:21:57.010036Z",
"$lastUpdatedVersion": 5,
"active": {
"$lastUpdated": "2018-03-27T18:21:57.010036Z",
"$lastUpdatedVersion": 5
},
"pws": {
"$lastUpdated": "2018-03-27T18:21:57.010036Z",
"$lastUpdatedVersion": 5
}
},
"$version": 5
},
"reported": {
"telemetryConfig": 6,
"$metadata": {
"$lastUpdated": "2018-03-27T18:56:05.2445399Z",
"telemetryConfig": {
"$lastUpdated": "2018-03-27T18:56:05.2445399Z"
}
},
"$version": 5
}
}
}
I'm guessing you want to:
Add new reported property on device side (favourite_colour)
I first recommend you to read this sample in GitHub.
Main this sample shows different way to listen for updates in twin on different level. The top most, where any update in device twin will trigger an event; or specific property (favourite_color).
I've edited the sample from the Microsoft document you provided to work with favorite_color.
'use strict';
var Client = require('azure-iot-device').Client;
var Protocol = require('azure-iot-device-mqtt').Mqtt;
var connectionString = '{iot hub connection string}';
var client = Client.fromConnectionString(connectionString, Protocol);
var initConfigChange = function(twin, patch) {
twin.properties.reported.update(patch, function(err) {
if (err) {
console.log('Could not report properties');
} else {
console.log('Reported pending config change: ' + JSON.stringify(patch));
setTimeout(function() {completeConfigChange(twin, patch);}, 30000);
}
});
}
var completeConfigChange = function(twin, patch) {
if (patch.telemetryConfig) {
// Same as Sample
} else if (patch.favourite_colour) {
var currentfavourite_colour = twin.properties.reported.favourite_colour;
currentfavourite_colour.color = currentfavourite_colour.pendingConfig.color;
currentfavourite_colour.status = "Success";
delete currentfavourite_colour.pendingConfig;
var patch = {
favourite_colour: currentfavourite_colour
};
patch.favourite_colour.pendingConfig = null;
}
twin.properties.reported.update(patch, function(err) {
if (err) {
console.error('Error reporting properties: ' + err);
} else {
console.log('Reported completed config change: ' + JSON.stringify(patch));
}
});
};
client.open(function(err) {
if (err) {
console.error('could not open IotHub client');
} else {
client.getTwin(function(err, twin) {
if (err) {
console.error('could not get twin');
} else {
console.log('retrieved device twin');
twin.properties.reported.favourite_colour = {
color: "green"
}
twin.on('properties.desired', function(desiredChange) {
console.log("received change: "+JSON.stringify(desiredChange));
if (desiredChange.telemetryConfig) {
// Same as sample
} else if (desiredChange.favourite_colour) {
var currentfavourite_colour = twin.properties.reported.favourite_colour;
currentfavourite_colour.pendingConfig = twin.properties.desired.favourite_colour;
currentfavourite_colour.status = "Pending Color";
var patch = {
favourite_colour: currentfavourite_colour
};
initConfigChange(twin, patch);
}
});
}
});
}
});
What I did was use a if else statement to check the reported property; as I mentioned there are other ways to do this, check the GitHub code I provided earlier. Once I have the matched reported property I can update the in the same way as the existing sample.

Resources