How to get env variable and save to parameter Store? - node.js

i'm using serverless framework(aws).
From cloudFormation when creating a database, writes to the env. variable endpoint of the database.
part of my serverless.yml file
environment:
DATABASE_HOST:
"Fn::GetAtt": [ServerlessRDS, Endpoint.Address]
This variable is available from the lambda level as it is already deployed on aws. But I want to have access to this variable from locally. I came across the idea that I would write this variable to the parameter store (aws systems manager).
So I attached the script to my serverless.yml file (using serverless-scriptable-plugin).
scriptHooks, part of my serverless.yml file
:
after:aws:deploy:finalize:cleanup:
- scripts/update-dbEndopint.js
Here's the script. Nothing special, writes an environment variable process.env.DATABASE_HOST to parameter stora.
const aws = require('aws-sdk');
const ssm = new aws.SSM();
(async () => {
try {
const params = {
Name: `${process.env.AWS_SERVICE_NAME}-DATABASE_HOST-${
process.env.AWS_STAGE
}`,
Value: `${process.env.DATABASE_HOST}`,
Type: 'String',
Overwrite: true,
};
await ssm.putParameter(params).promise();
log(`[DATABASE_HOST]: ${process.env.DATABASE_HOST} `);
log('Task done.');
} catch (e) {
throw e;
}
})();
But after taking deploy the variable is undefined.
This is because the variable value is only available later.
Do you know how to get me to get the base endpoint to parameter store?

Your servreless.yml will set the environment variable for the function but not for the process.env of the scripts run by serverless-scriptable-plugin.
You'll need to save it as an output for your stack using something similar to this:
Resources:
ServerlessRDS:
....
Outputs:
ServerlessRDSEndpointAddress:
Value:
"Fn::GetAtt": [ServerlessRDS, Endpoint.Address]
Then in your script extract that value from the stack something like this:
const fs = require('fs');
const yaml = require('js-yaml');
const aws = require('aws-sdk');
const ssm = new aws.SSM();
const getStackName = (stage) => {
const content = fs.readFileSync('serverless.yml');
return `${yaml.safeLoad(content).service}-${stage}`;
};
const getStackOutputs = async (provider, stackName, stage, region) => {
const result = await provider.request(
'CloudFormation',
'describeStacks',
{ StackName: stackName },
stage,
region,
);
const outputsArray = result.Stacks[0].Outputs;
let outputs = {};
for (let i = 0; i < outputsArray.length; i++) {
outputs[outputsArray[i].OutputKey] = outputsArray[i].OutputValue;
}
return outputs;
};
(async () => {
try {
const provider = serverless.getProvider('aws');
const { stage, region } = options;
const { ServerlessRDSEndpointAddress } = await getStackOutputs(provider, getStackName(stage), stage, region)
const params = {
Name: `${process.env.AWS_SERVICE_NAME}-DATABASE_HOST-${
process.env.AWS_STAGE
}`,
Value: `${ServerlessRDSEndpointAddress}`,
Type: 'String',
Overwrite: true,
};
await ssm.putParameter(params).promise();
log(`[DATABASE_HOST]: ${ServerlessRDSEndpointAddress} `);
log('Task done.');
} catch (e) {
throw e;
}
})();
I'm not sure how saving the value in parameter store will allow you to access it locally though.
If you want to invoke the function locally you can use:
serverless invoke local -f functionName -e DATABASE_HOST=<DATABASE_HOST>
Or use dotenv for any other JavaScript code

Related

AWS Timestream - SDK V3 Nodejs, TimestreamWriteClient.send() - TypeError: command.resolveMiddleware is not a function. How to solve this?

I have the following lambda function in NodeJs 14.x using AWS SDK V3 for a timestream insertion process:
'use strict'
// https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-timestream-write/index.html
const { TimestreamWriteClient } = require("#aws-sdk/client-timestream-write")
const client = new TimestreamWriteClient({ region: process.env.region })
module.exports.fnPostElectricityTimestream = async event => {
try {
console.log('🚀 START fnPostElectricityTimestream')
const jsonBody = event
const topic = jsonBody.topic
const arrTopic = topic.split('/')
let dbName = arrTopic[4]
dbName = 'smaj56g' //Test
const currentTime = Date.now().toString() // Unix time in milliseconds get jsonBody.e_timestamp
const e_timestamp = (jsonBody.e_timestamp)*1000
const dimensions = [{
'Name': 'n',
'Value': 'v'
}]
const e_ch_1 = {
'Dimensions':dimensions,
'MeasureName': 'e_ch_1',
'MeasureValue': '[1,2,3]',
'MeasureValueType': 'VARCHAR',
'Time': currentTime
}
const records = [e_ch_1]
const params = {
DatabaseName: dbName,
TableName:'e_ch_1_v_w',
Records: records
}
const data = await client.send(params);
console.log('data', data)
return {
message: ''
}
} catch (error) {
console.log('🚀 fnPostElectricityTimestream - error.stack:', error.stack)
return {
message: error.stack
}
}
}
When I run the lambda this is the message I am getting:
2022-08-12T14:58:39.496Z e578a391-06b4-48a9-9f9d-9440a373c19e INFO 🚀 fnPostElectricityTimestream - error.stack: TypeError: command.resolveMiddleware is not a function
at TimestreamWriteClient.send (/var/task/node_modules/#aws-sdk/smithy-client/dist-cjs/client.js:13:33)
at Runtime.module.exports.fnPostElectricityTimestream [as handler] (/var/task/src/ElectricityTimestream/fnPostElectricityTimestream.js:38:31)
at Runtime.handleOnceNonStreaming (/var/runtime/Runtime.js:73:25)
There is something with const data = await client.send(params).
I am following the asyncawait code in this documentation.
How to solve this issue?
Your current insertion code is wrong. In order to write the records in the TimeStream, you need to use the WriteRecordsCommand command. Refer to the doc for a better understanding. Sample code:
import { TimestreamWriteClient, WriteRecordsCommand } from "#aws-sdk/client-timestream-write";
const client = new TimestreamWriteClient({ region: "REGION" }); //your AWS region
const params = {
DatabaseName: dbName, //your database
TableName: tableName, //your table name
Records: records //records you want to insert
}
const command = new WriteRecordsCommand(params);
const data = await client.send(command);
you need to create a command before calling send.
For example:
import { TimestreamWriteClient, CreateDatabaseCommand } from "#aws-sdk/client-timestream-write";
const params = {
DatabaseName: dbName,
TableName:'e_ch_1_v_w',
Records: records
}
const command = new CreateDatabaseCommand(params);
const data = await client.send(command);

How to solve , Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information

I am implementing google automl in NodeJS to predict the image level. I have created model, level and uploaded images manually. Now I want to predict level of an image using NodeJS.
I wrote a function but always getting the below error,
Error: Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information
the code is below-
async function addfile() {
console.log("add file called")
const projectId = "project-name";
const computeRegion = "us-central1";
const modelId = "modelid";
const filePath = "./src/assets/uploads/micro.jpeg";
const scoreThreshold = "0.9";
const client = new automl.PredictionServiceClient();
const modelFullId = client.modelPath(projectId, computeRegion, modelId);
try {
const content = fs.readFileSync(filePath, 'base64');
const params = {};
if (scoreThreshold) {
params.score_threshold = scoreThreshold;
}
const payload = {};
payload.image = { imageBytes: content };
console.log("try block is running")
var [response] = await client.predict({
name: modelFullId,
payload: payload,
params: params,
keyFilename: "./src/assets/uploads/service_account_key.json"
});
console.log('Prediction results: ' + JSON.stringify(response));
response.payload.forEach(result => {
console.log('Predicted class name: ${result.displayName}');
console.log('Predicted class score: ${result.classification.score}');
});
} catch (exception) {
console.log("exception occur = " + exception);
}
}
Any solution for that will be appreciated.
As mentioned by #Rakesh Saini , this error occurs when environment variables are not set or missing.The environment can be set by adding application credentials in the project and adding other required environment variables like Project ID and location.

AWS Transcribe client does not provide an export named 'transcribeClient'

I'm trying to integrate AWS Transcribe in my Node.JS application. AWS S3 and Polly works fine, but AWS Transcribe does not. I'm using the example code of AWS.
When I want to start a transcribe job by the AWS example code I receive the following error: The requested module './libs/transcribeClient.js' does not provide an export named 'transcribeClient'
That was also the only file where I received the error that required is not defined. I wonder why it only happens with AWS transcribe but not with the other services as well? I'm also able to start a transcribe job via the AWS CLI.
That AWS Transcribe code does not work - transcribeClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[transcribe.JavaScript.createclientv3]
const { TranscribeClient } = require('#aws-sdk/client-transcribe');
// Create anAmazon EC2 service client object.
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY });
module.exports = { transcribeClient };
That AWS Polly code works - pollyClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[polly.JavaScript.createclientv3]
const { PollyClient } =require( "#aws-sdk/client-polly");
// Create an Amazon S3 service client object.
const pollyClient = new PollyClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { pollyClient };
I'm looking forward to reading from you! Thanks!
I solved it. Now it's working with my Node.js 12 environment.
package.json
I changed "type": "modules" to "type": "commonjs".
transcribeClient.js needs to look like this:
Here I changed export to module.exports.
const { TranscribeClient } = require("#aws-sdk/client-transcribe");
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { transcribeClient };
transcribe_create_job.js needs to look like this:
Here I changed the import statement to require.
const { StartTranscriptionJobCommand } = require("#aws-sdk/client-transcribe");
const { transcribeClient } = require("./libs/transcribeClient.js")
// Set the parameters
const params = {
TranscriptionJobName: "test123",
LanguageCode: "en-GB", // For example, 'en-US'
MediaFormat: "webm", // For example, 'wav'
Media: {
MediaFileUri: "https://x.s3.eu-central-1.amazonaws.com/dlpasiddi.webm",
},
};
const run = async () => {
try {
const data = await transcribeClient.send(
new StartTranscriptionJobCommand(params)
);
console.log("Success - put", data);
return data; // For unit tests.
} catch (err) {
console.log("Error", err);
}
};
run();

How to initialize cosmosDB databases and containers in azure functions using the node sdk?

In my current implementation I have database initialization code that gets run on every function request, which is bad for performance reasons.
How to check if a container exists in cosmos DB using the node sdk?
It's best to create static connections on app initialization as described here:
https://learn.microsoft.com/en-us/azure/azure-functions/manage-connections#static-clients
but I'm having a bit of trouble with the initialization. Here is how the documentation describes it in javascript.
const cosmos = require('#azure/cosmos');
const endpoint = process.env.COSMOS_API_URL;
const key = process.env.COSMOS_API_KEY;
const { CosmosClient } = cosmos;
const client = new CosmosClient({ endpoint, key });
// All function invocations also reference the same database and container.
const container = client.database("MyDatabaseName").container("MyContainerName");
module.exports = async function (context) {
const { resources: itemArray } = await container.items.readAll().fetchAll();
context.log(itemArray);
}
The issues/questions I'm having are how do I do error handling if the database does not exist or if the container does not exist.
Do I need to separate my "createIfNotExists" logic from the functions app entirely?
If I try to run the createIfNotExists code on startup, I'm not able to do top level awaits and I have been getting promise rejections errors.
I'd like to do something like the following:
try
{
const cosmos = require('#azure/cosmos');
const endpoint = process.env.COSMOS_API_URL;
const key = process.env.COSMOS_API_KEY;
const { CosmosClient } = cosmos;
const client = new CosmosClient({ endpoint, key });
const db = await client.databases.createIfNotExists({id: "databaseId"});
const container1 = await db.container.createIfNotExists(containerDefinition1)
const container2 = await db.container.createIfNotExists(containerDefinition2)
}
catch(err)
{
handleError(err)
}
...
module.exports = async function (context) {
...
const {resources: items } = await container1.items.query(querySpec).fetchAll();
}
What's the best way to implement this? Any help is appreciated!
I think you need to handle each individually, for example
async onApplicationLoad() {
// Create DB if it doesn't exist
try {
await this.client.databases.createIfNotExists({ id: this.mDBId });
} catch (error) {
Logger.log(`Error creating database: ${error}`, 'AzureCosmosDbService');
}
// Create the containers if they don't exist
try {
await this.client.database(this.mDBId).containers.createIfNotExists({ id: this.mNoteContainerId });
await this.client.database(this.mDBId).containers.createIfNotExists({ id: this.mReportedNotesContainerId });
const iterator = this.client.database(this.mDBId).containers.readAll();
const { resources: containersList } = await iterator.fetchAll();
} catch (error) {
Logger.log(`Error creating containers: ${error}`, 'AzureCosmosDbService');
}
return;
}

Retrieve AWS ssm parameter in bulk

How can I retrieve parameters from AWS Systems Manager (parameter store) in bulk (or more than one parameter) at a time? Using aws-sdk, following is the Node.js code I have written to retrieve SSM parameter from parameter store:
const ssm = new (require('aws-sdk/clients/ssm'))()
const getSSMKey = async params => {
const {Parameter: {Value: APIKey}} = await ssm.getParameter(params).promise()
return APIKey
}
const [param1, param2, param3] = await Promise.all([
getSSMKey({ Name: '/data/param/PARAM1', WithDecryption: true }),
getSSMKey({ Name: '/data/param/PARAM2', WithDecryption: true }),
getSSMKey({ Name: '/data/param/PARAM3', WithDecryption: true })
])
console.log(param1, param2, param3)
But with this code, I am sending 3 request for getting 3 parameters which is inefficient in case of large number of parameters. Is there any way to retrieve more than one parameters in one request. if ssm.getParameters() is the method to do that then please give an example (particularly parameter to that method). I tried but I receive nothing.
According to the AWS document, GetParameter gets the value for one parameter, whereas GetParameters gets the value for multiple.
Their usages are very similar too. When using GetParameters to get multiple values, pass in multiple names as a list for Names, instead of passing a single name as string for Name.
Code sample, to get parameters named "foo" and "bar", in "us-west-1" region:
const AWS = require('aws-sdk');
AWS.config.update({ region: "us-west-1" });
const SSM = require('aws-sdk/clients/ssm');
const ssm = new SSM()
const query = {
"Names": ["foo", "bar"],
"WithDecryption": true
}
let param = ssm.getParameters(query, (err, data) => {
console.log('error = %o', err);
console.log('raw data = %o', data);
})
At last it worked for me. Following is the code:
const ssmConfig = async () => {
const data = await ssm.getParameters({ Names: ['/data/param/PARAM1', '/data/param/PARAM2', '/bronto/rest//data/param/PARAM3'],
WithDecryption: true }).promise()
const config = {}
for (const i of data.Parameters) {
if (i.Name === '/data/param/PARAM1') {
config.param1 = i.Value
}
if (i.Name === '/data/param/PARAM2') {
config.rest.clientId param2 = i.Value
}
if (i.Name === '/data/param/PARAM3') {
config.param3 = i.Value
}
}
return config
}
This is what I did to retrieve all the parameters from a specific path.
**your SSM function client :**
'use strict';
const SSM = require('aws-sdk/clients/ssm');
let ssmclient;
module.exports.init = () => {
const region = process.env.REGION === undefined ? 'us-east-1' : process.env.REGION ;
ssmclient = new SSM({region: region});
}
module.exports.getParameters = async (path) => {
try {
let params = {
Path: path,
WithDecryption: true
};
let allParameters = [];
let data = await ssmclient.getParametersByPath(params).promise();
allParameters.push.apply(allParameters, data.Parameters);
while(data.NextToken) {
params.NextToken = data.NextToken;
data = await ssmclient.getParametersByPath(params).promise();
allParameters.push.apply(allParameters, data.Parameters);
}
return allParameters;
} catch (err) {
return Promise.reject(err);
}
}
calling this client:
const ssm = require("yourssmclinet");
ssm.init();
// you call only once to retrieve everything which falls under /data/param
const parameters = await getParameters("/data/param");
//from here you can fetch parameters['what ever needed'].
You essentially have two options to get parameters in bulk.
One is the method provided by #user1032613, but the other is to use the built-in function getParametersByPath().
A Lambda code example in node with all three methods can be seen below. Each method can take different params, for instance with the path you can make filters, etc. to get the exact values you need, see the documentation.
'use strict';
const AWS = require('aws-sdk');
const SSM = new AWS.SSM();
exports.handler = async (event) => {
//Example get single item
const singleParam = { Name: 'myParam' };
const getSingleParam = await SSM.getParameter(singleParam).promise();
//Example: Get Multiple values
const multiParams = {
Names: [ 'myParam1', 'myParam2', 'myParam3' ],
WithDecryption: true
};
const getMultiParams = await SSM(multiParams).promise();
//Example: Get all values in a path
const pathParams = { Path: '/myPath/', WithDecryption: true };
const getPathParams = await SSM.getParametersByPath(pathParams).promise();
return 'Success';
};
Remember that you can also use environment variables. For example, you could write singleParam like this:
const singleParam = { Name: process.env.PARAM }
That way you can have code that extracts code from DEV, PROD, etc. depending on the stage.

Resources