Read environment neutral Configuration Value from NodeJS - node.js

I have an env. neutral configuration which is EnableEmailCheck. If the EnableEmailCheck= true then the email validation will takes place and if it is set to false application will not do the email validation.
I need to understand how to do something like that in nodejs
What format the configuration file should be ? I think I can keep the key value pair it in .json file
How can I read this configuration value from nodejs
If the value of the configuration file is changed is it required to restart the nodejs app server to reflect the changes
This configuration value will get changed if there is an issue in the emailvalidation functionality. Then the value will be switched off; in this case it will be set to false by and application engineer.

I would save it in json format and add a watch on it:
config.json content:
{
"env":
{
"EnableEmailCheck": true;
}
}
Loading config.json file at start and when it changes:
var config = null;
var fs = require('fs');
function loadConfig(fileName )
{
var data = fs.readFileSync( fileName );
return JSON.parse( data );
}
function useConfig( )
{
if( config.env.EnableEmailCheck )
{
// do your stuff;
}
}
//load it at start
config = loadConfig("config.json");
useConfig();
//add watch for changes
fs.watchFile("config.json", function ()
{
config = loadConfig("config.json");
useConfig();
});

Related

Ensuring Azure keyvault secrets are loaded to config (node-config) at application startup

I have a NodeJS application that uses Node-Config (https://www.npmjs.com/package/config) to load application configurations. What I'm trying to do is to load secrets from Azure Keyvault to the config during startup, and ensure these are available before required (e.g. connecting to databases etc).
I have no problem connecting to and retrieving values from the Keyvault, but I am struggling with the non-blocking nature of JS. The application startup process is continuing before the config values have completed loaded (asynchronously) to the config.
One strategy could be to delay application launch to await the keyvault secrets loading How to await in the main during start up in node?
Another would be to not load them in Config but instead modify code where-ever secrets are used to load these asynchronously via promises
It seems like this will be a common problem, so I am hoping someone here can provide examples or a design pattern of the best way of ensuring remote keyvault secrets are loaded during startup.
Thanks in advance for suggestions.
Rod
I have now successfully resolved this question.
A key point to note is setting process.env['ALLOW_CONFIG_MUTATIONS']=true;
Configs are immutable by default (they can't be changed after initial setting). Since async is going to resolve these later, it's critical that you adjust this setting. Otherwise you will see asynchronous configs obtaining correct values from the keystore, but when you check with config.get they will not have been set. This really should be added to the documentation at https://github.com/node-config/node-config/wiki/Asynchronous-Configurations
My solution: first, let's create a module for the Azure keystore client - azure-keyvault.mjs :
import { DefaultAzureCredential } from '#azure/identity';
import { SecretClient } from '#azure/keyvault-secrets';
// https://learn.microsoft.com/en-us/azure/developer/javascript/how-to/with-web-app/use-secret-environment-variables
if (
!process.env.AZURE_TENANT_ID ||
!process.env.AZURE_CLIENT_ID ||
!process.env.AZURE_CLIENT_SECRET ||
!process.env.KEY_VAULT_NAME
) {
throw Error('azure-keyvault - required environment vars not configured');
}
const credential = new DefaultAzureCredential();
// Build the URL to reach your key vault
const url = `https://${process.env.KEY_VAULT_NAME}.vault.azure.net`;
// Create client to connect to service
const client = new SecretClient(url, credential);
export default client;
In the config (using #node-config) files:
process.env['ALLOW_CONFIG_MUTATIONS']=true;
const asyncConfig = require('config/async').asyncConfig;
const defer = require('config/defer').deferConfig;
const debug = require('debug')('app:config:default');
// example usage debug(`\`CASSANDRA_HOSTS\` environment variable is ${databaseHosts}`);
async function getSecret(secretName) {
const client = await (await (import('../azure/azure-keyvault.mjs'))).default;
const secret = await client.getSecret(secretName);
// dev: debug(`Get Async config: ${secretName} : ${secret.value}`);
return secret.value
}
module.exports = {
//note: defer just calculates this config at the end of config generation
isProduction: defer(cfg => cfg.env === 'production'),
database: {
// use asyncConfig to obtain promise for secret
username: asyncConfig(getSecret('DATABASE-USERNAME')),
password: asyncConfig(getSecret('DATABASE-PASSWORD'))
},
...
}
Finally modify application startup to resolve the async conferences BEFORE config.get is called
server.js
const { resolveAsyncConfigs } = require('config/async');
const config = require('config');
const P = require('bluebird');
...
function initServer() {
return resolveAsyncConfigs(config).then(() => {
// if you want to confirm the async configs have loaded
// try outputting one of them to the console at this point
console.log('db username: ' + config.get("database.username"));
// now proceed with any operations that will require configs
const client = require('./init/database.js');
// continue with bootstrapping (whatever you code is)
// in our case let's proceed once the db is ready
return client.promiseToBeReady().then(function () {
return new P.Promise(_pBootstrap);
});
});
}
I hope this helps others wishing to use config/async with remote keystores such as Azure. Comments or improvements on above welcome.
~ Rod

Two .env environments with different passwords

In my app, I am using two API's that require different credentials. I am storing each in a .env file and read them using process.env. Both .env and .config files are in different directories.
The first config.js:
const dotenv = require('dotenv');
const cfg = {};
dotenv.config({path: '.env'});
cfg.port = process.env.PORT;
cfg.apiKey = process.env.apiKey;
cfg.authDomain = process.env.authDomain;
cfg.databaseURL = process.env.databaseURL;
cfg.projectId = process.env.projectId;
cfg.storageBucket = process.env.storageBucket;
cfg.messagingSenderId = process.env.messagingSenderId;
module.exports = cfg;
The second config.js
const dotenv = require('dotenv');
const cfg = {};
dotenv.config({path: '.env'});
cfg.port = process.env.PORT;
cfg.accountSid = process.env.TWILIO_ACCOUNT_SID;
cfg.authToken = process.env.TWILIO_AUTH_TOKEN;
cfg.twimlAppSid = process.env.TWILIO_TWIML_APP_SID;
cfg.callerId = process.env.TWILIO_CALLER_ID;
module.exports = cfg;
I configured both .env files the same way. But apparently the second config.js is not able to read the credentials such as: TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN, etc. Which led me to believe that for the second .env file I have to configure differently than the first one.
How do I load the two sets of credentials into one environment? Or do I have to load them into different environments?
Thanks for your time.
For your first question, yes, you can load both set of credentials in one environment as the keys are different for each one, you just need to import both config.js in your 'main' code.
If what you need to do is to use two set of credentials in the same api, you could both credentials in the same JSON as follow:
{
firstSet: {
TWILIO_ACCOUNT_SID: 'value',
TWILIO_AUTH_TOKEN: 'value',
TWILIO_TWIML_APP_SID: 'value',
TWILIO_CALLER_ID: 'value'
},
secondSet: {
TWILIO_ACCOUNT_SID: 'value2',
TWILIO_AUTH_TOKEN: 'value2',
TWILIO_TWIML_APP_SID: 'value2',
TWILIO_CALLER_ID: 'value2'
}
}
Defined your two different credentials, you could define the logic to use one or another or both credentials in your config.js, depending on your needs, and export that to your application where you could pick the credential from the config to use in the api.

How to copy postman history from chrome app to native app?

Since Google is now ending the support for chrome apps. Recently Postman deprecated their chrome app and introduced a native app.
I am in the process of switching from postman chrome app to native app.
How do I copy the history from my chrome app to native app. Sync doesn't work.
There is a option to export data but that doesn't export the history.
Any Ideas?
So while searching for this I came across this post which is very helpful.
Thanks to stephan for sharing this code.
Follow these steps to copy your history from chrome app to native app.
//In Chrome DevTools on the background page of the Postman extension...
//A handy helper method that lets you save data from the console to a file
(function(console){
console.save = function(data, filename){
if(!data) {
console.error('Console.save: No data')
return;
}
if(!filename) filename = 'console.json'
if(typeof data === "object"){
data = JSON.stringify(data, undefined, 4)
}
var blob = new Blob([data], {type: 'text/json'}),
e = document.createEvent('MouseEvents'),
a = document.createElement('a')
a.download = filename
a.href = window.URL.createObjectURL(blob)
a.dataset.downloadurl = ['text/json', a.download, a.href].join(':')
e.initMouseEvent('click', true, false, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null)
a.dispatchEvent(e)
}
})(console)
//Common error reporting function
function reportError(){
console.error('Oops, something went wrong :-(');
}
//Open the database
var dbReq = indexedDB.open('postman')
dbReq.onerror = reportError;
dbReq.onsuccess = function(){
var db = dbReq.result;
//Query for all the saved requests
var requestReq = db.transaction(["requests"],"readwrite").objectStore('requests').getAll();
requestReq.onerror = reportError;
requestReq.onsuccess = function(){
var requests = requestReq.result;
//Dump them to a file
console.save(JSON.stringify(requests), 'postman-requests-export.json')
console.info('Your existing requests have been exported to a file and downloaded to your computer. You will need to copy the contents of that file for the next part')
};
};
//Switch to standalone app and open the dev console
//Paste the text from the exported file here (overwriting the empty array)
var data = []
//Enter the guid/id of the workspace to import into. Run the script with this value blank if you need some help
// finding this value. Also, be sure you don't end up with extra quotes if you copy/paste the value
var ws = '';
//Common error reporting function
function reportError(){
console.error('Oops, something went wrong :-(');
}
//Open the database
var dbReq = indexedDB.open('postman-app')
dbReq.onerror = reportError;
dbReq.onsuccess = function(){
var db = dbReq.result;
if(!data.length){
console.error('You did not pass in any exported requests so there is nothing for this script to do. Perhaps you forgot to paste your request data?');
return;
}
if(!ws){
var wsReq = db.transaction(["workspace"],"readwrite").objectStore('workspace').getAll();
wsReq.onerror = reportError;
wsReq.onsuccess = function(){
console.error('You did not specify a workspace. Below is a dump of all your workspaces. Grab the guid (ID field) from the workspace you want these requests to show up under and include it at the top of this script');
console.log(wsReq.result);
}
return;
}
data.forEach(function(a){
a.workspace = ws;
db.transaction(["history"],"readwrite").objectStore('history').add(a);
});
console.log('Requests have been imported. Give it a second to finish up and then restart Postman')
}
//Restart Postman
Note :
1.To Use DevTools on your chrome app you will need to enable following flag in
chrome://flags
2.Then just right click and inspect on your chrome postman app.
3.To User DevTools on your native app ctrl+shift+I (view->showDevTools)

Bluemix Nodejs FileTransferStep, documentation

I am a newbie to bluemix. I downloaded the client libraries. But I don't see API docs for Javascript. Where do I find that? How do I go about calling several of javascript functions which is neither in the nodejs client libs nor I could find it online?
about the Workload service call you have to edit your package.json file
to add a dependency on the iws-light module using an https link, as follows
"dependencies": {
"iws-light": "https://start.wa.ibmserviceengage.com/bluemix/iws-light.tgz"
}
then you have to open your shell, go to the root of your app and run:
npm install
after this you can require the Workload Scheduler service in your application:
var ws = require("iws-light");
and create a connection to Bluemix:
//retrieve service URL from Bluemix VCAP_SERVICES...
var wsConn;
if(process.env.VCAP_SERVICES) {
wsConn = ws.createConnection();
} else {
//...or set it on your own(if you're working in local)
var url = "your workload scheduler url";
wsConn = ws.createConnection(url);
}
//retrieve cloud agent
var agentName;
wsConn.getCloudAgent(function(data) {
agentName = data;
});
//set your timezone
wsConn.setTimezone({timezone: "Europe/Rome"}, function(err, data){
if(err){
console.log(err);
}
});
now you're ready to use the lib and create a process
and add to it a FileTransferStep:
//create a process
var process = new ws.WAProcess("ProcessName", "This process transfer a file every day from local to remote server");
//supported operations are ws.steps.FileTransferStep.OperationDownload or ws.steps.FileTransferStep.OperationUpload
var operation = ws.steps.FileTransferStep.OperationUpload;
//create FileTransferStep
var ftStep = new ws.steps.FileTransferStep(agentName, operation);
//supported protocols are AUTO, FTP, FTPS, SSH, WINDOWS;
ftStep.setProtocol(ws.steps.FileTransferStep.ProtocolAuto);
//set local file
var local = {
path: "local file path",
user: "local username",
password: "local password"
};
ftStep.setLocalFile(local.path, local.user, local.password);
//set remote file
var remote = {
path: "remote file path",
user: "remote username",
password: "remote password",
server: "remote server"
};
ftStep.setRemoteFile(remote.server, remote.path, remote.user, remote.password);
//the binary mode flag: true if it uses FTP binary mode
var binaryMode = true;
the passive mode flag: true if it uses FTP passive mode
var passiveMode = true;
//set timeout
var timeout = 5;
ftStep.setMode(binaryMode, passiveMode , timeout);
//add FileTransferStep to the process
process.addStep(ftStep);
//create a trigger
var trigger = new ws.TriggerFactory.everyDayAt(1, 7, 30);
//add Trigger to the process
process.addTrigger(trigger);
process.tasklibraryid = "your task library id";
//create and enable process
wsConn.createAndEnableProcess(process, function(err, data){
if(err){
console.log(error);
} else{
console.log("process created and enabled");
}
});
The code above creates a process using a file transfer step from node.js code, however I'm not sure if this is what you actually need.
If you can explain the scenario you are trying to implement, I can be more precise about which is the best way to implement this scenario using Workload Scheduler service.
Regards,
Gabriele

Change connection used on runtime

I hope you can help me (us). I'm working on a API project which have two databases :
Production DB : api.myapp.fr
Testing DB : test.api.myapp.fr
Theses two databases are writable by the user.
When a user call our API, he can set the authorization header whichever he needs. For example :
Authorization: s_0
Will perform operations on api.myapp.fr and
Authorization: s_t_0
Will perform operations on test.api.myapp.fr .
My question is : How can I do that with sails ?
Actually, I have a policie which check if the user is using a production key or a testing key, and I override the default models with the one for testings purposes, like this :
if (!is_production) {
req.session.isProd = false;
req.session.logs.environment = "test";
User = UserTest;
Payment = PaymentTest;
PayzenStatus = PayzenStatusTest;
Transaction = TransactionTest;
Card = CardTest;
Doc = DocTest;
}
But you can see the problem if a user makes a test request and then a production request, the models are still the tests ones...
I use my models in services and policies, therefor I can't do
req.models = {};
// If not in production, use the test models
if (!is_production) {
req.session.isProd = false;
req.session.logs.environment = "test";
req.models.User = UserTest;
req.models.Payment = PaymentTest;
req.models.PayzenStatus = PayzenStatusTest;
req.models.Transaction = TransactionTest;
req.models.Card = CardTest;
req.models.Doc = DocTest;
}
// Otherwise use the production models
else {
req.models.User = User;
req.models.Payment = Payment;
req.models.PayzenStatus = PayzenStatus;
req.models.Transaction = Transaction;
req.models.Card = Card;
req.models.Doc = Doc;
}
If you have any idea on how ton achieve this (whatever the way, we can still perform deep changes in our code), I would be really happy to ear it.
Thanks
Two different ways of doing this.
First you could set an environment variable on your production host and check that environment variable to see if you are running in prod. If you are running in prod then use the URI to the production database.
Secodonly, which is probably the better way of doing this creating a config.js file that allows you to read environment variables. What I do for all my apps is set environment variables for connection info to databases and API keys. When running locally/testing I have some defaults in my app but when they environment variables are set they are read and used. So set the environment variable in production to point to your production databases.
The config.js file Im posting below contains references to VCAP, which assumes you are running on Cloud Foundry.
config.js
var VCAP_SERVICES = process.env["VCAP_SERVICES"],
vcapServices;
if (VCAP_SERVICES) {
vcapServices = JSON.parse(VCAP_SERVICES);
}
function getEnv(propName, defaultValue) {
if (process.env[propName]) {
return process.env[propName];
} else {
return defaultValue;
}
}
module.exports = function() {
return {
getEnv : getEnv,
couchDbURL: function() {
// Default to a local couch installation for development
if (VCAP_SERVICES) {
return vcapServices["cloudantNoSQLDB"][0].credentials.url;
}
else {
return "http://localhost:5984";
}
},
couchDbName: function() {
return getEnv("COUCHDB_NAME", "mydb");
}
};
};
app.js
var config = require("./config")();
console.log(config.couchDbURL());

Resources