Node.js Async Module Require - node.js

I have a question regarding an express based node.js application that is dependent on the first require(). This is my first node.js application. The first require() hits AWS ec2 parameter store to gather credentials for a database. I can not make a connection to the database until this require resolves in an asynchronous manner.
The best way I've found to do this is to export a callback and wrap the rest of the require() statements in the callback of the first require(). Is this a bad practice?
//app.js
var appConfig = require('./config/appconfig');
appConfig.fetchAppConfig(function(err, result) {
if(err) {
console.log(err);
console.error("Server failed to startup. Config parameters not available.");
}
else {
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
...
app.use(bodyParser.json());
etc
...
//appConfig.js
module.exports = function fetchAppConfig(callback) {
getCredentials(function(err, result) {
if(err) {
console.log(err);
callback(err);
} else {
awsLogin.paramStoreService(result).then(
data => {
appConfig = decodeAppConfig(data.Parameter.Value);
callback(null, appConfig);
}
).catch(
error => {
console.error(error);
callback(err);
}
)
}
})
}
Am I missing a more simple option?
Would I be better served to have this logic to pull the configuration somewhere in the deployment code?

I would define a couple of functions, one to request the credentials and another to connect to the database once the credentials are retrieved. You could use the async module's series function that will easily allow you to control the flow of your application.
From the documentation:
Run the functions in the tasks collection in series, each one running
once the previous function has completed. If any functions in the
series pass an error to its callback, no more functions are run, and
callback is immediately called with the value of the error. Otherwise,
callback receives an array of results when tasks have completed.
Here's an example:
var async = require('async');
function getCredentials(callback) {
callback(null, {
user: 'hello',
pass: 'world',
});
};
function connectToDatabase(callback, creds) {
console.log('Connecting to database => ' + JSON.stringify(creds));
callback(null, 'Done');
};
async.series([
getCredentials,
connectToDatabase,
],
function(err, results) {
console.error(err);
console.log(results);
});

Until node supports top-level awaits, below is the solution I'm using for this exact use-case.
// index.js
(async () => {
await require('./config').initialize();
require('./app');
})();
// config.js
const _ = require('lodash');
const secretKeys = ['secret1', 'secret2'];
const parameterHierarchyPrefix = `/${process.env.NODE_ENV}/app/`;
const getParamNameWithoutHierarchy = name => _.replace(name, new RegExp(`${parameterHierarchyPrefix}(.*)`), '$1');
const config = {};
config.initialize = async (callback = () => {}) => {
try {
// initialize aws sdk and ssm
const AWS = require('aws-sdk');
AWS.config.update({
region: 'us-west-2',
accessKeyId: S3_ACCESS_KEY,
secretAccessKey: S3_SECRET,
});
const ssm = new AWS.SSM();
// map secret keys to lowercase
const secretNames = _.map(secretKeys, key => `${parameterHierarchyPrefix}key`);
// this api only allows fetching 10 params per call
const secretFetchBatches = _.chunk(secretNames, 10);
// fetch secrets from aws parameter store
let secrets = {};
for (let i = 0; i < _.size(secretFetchBatches); i += 1) {
const parameters = await ssm.getParameters({
Names: secretFetchBatches[i],
WithDecryption: true,
}).promise();
secrets = {
...secrets,
..._.zipObject(_.map(parameters.Parameters, ({ Name }) => ([getParamNameWithoutHierarchy(Name)])), _.map(parameters.Parameters, 'Value')),
};
}
// write the secrets into the config object
_.each(secrets, (v, k) => constants[k] = v);
// invoke the callback
callback();
} catch (e) {
throw e
}
};
module.exports = config;

Related

Node.js Lambda Async return Undefined

Simple call to ec2 Describing Security groups and returning the security group ID. Using Async / await, but when logging the return value, I get undefined. I fully admit I'm coming from Python and I've tried my hardest to wrap my brain around async calls. I thought I had it nailed, but I'm obviously missing something.
'use strict';
// Load Modules
const AWS = require('aws-sdk')
//Set the region
AWS.config.update({region: 'us-west-2'});
// Call AWS Resources
const ec2 = new AWS.EC2();
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
var ec2params = { Filters: [{Name: 'tag:t_whitelist',Values[event['site']]}]};
await ec2.describeSecurityGroups(ec2params, function (err, response) {
if (err) {return console.error(err.message)}
else {
var sgId = response.SecurityGroups[0].GroupId;
return sgId;
};
});
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => {console.log(sgId)});
}
"sgId" should return the security group ID. It does print out fine in the original function before the return.
Typically if it is an async call you want you handle it similar to this way without using a callback
// Load Modules
const AWS = require('aws-sdk')
//Set the region
AWS.config.update({ region: 'us-west-2' });
// Call AWS Resources
const ec2 = new AWS.EC2();
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
var ec2params = { Filters: [{ Name: 'tag:t_whitelist', Values[event['site']]}] };
try {
const securityGroupsDesc = await ec2.describeSecurityGroups(ec2params).promise();
const sgId = securityGroupsDesc.SecurityGroups[0].GroupId;
//do something with the returned result
return sgId;
}
catch (error) {
console.log('handle error');
// throw error;
}
});
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => { console.log(sgId) });
}
however if it doesn't support async you just use the callback to handle the returned data or error without using async function.However Reading into AWS docs you can find that the function ec2.describeSecurityGroups() returns an AWS Request
which has a method promise() that needs to be invoked to send the request and get a promise returned.Note that the try catch here is not needed but good to have in case error occurs during the process.
As I said in the comment, chance are that describeSecurityGroups doesn't return a Promise. Try transforming it explictly in a Promise instead:
const promiseResponse = await new Promise((res, rej) => {
ec2.describeSecurityGroups(ec2params, function (err, response) {
if (err) {return rej(err.message)}
else {
var sgId = response.SecurityGroups[0].GroupId;
res(sgId);
};
})
});
// promiseResponse is now equal to sgId inside the callback
return promiseResponse; // this will work because the function is async
Note: You can drop the else keyword
Here is the code that worked using async / await. Thanks to #Cristian Traina I realized ec2.describeSecurityGroups wasn't returning a promise, it was returning an AWS.Event.
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
console.log('Getting Security Group ID')
var params = { Filters: [{Name: 'tag:t_whitelist', Values
[event['site']]}]};
const describeSG = await ec2.describeSecurityGroups(params).promise();
return describeSG.SecurityGroups[0].GroupId;
};
// Get Ingress Rules from Security Group
const getSgIngressRules = async (sgId) => {
console.log(`Getting SG Ingress rules for ${sgId}`)
var params = { GroupIds: [ sgId]};
try{
const ingressRules = await ec2.describeSecurityGroups(params).promise();
return ingressRules;
}
catch (error) {
console.log("Something went wrong getting Ingress Ruls");
}
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => {return getSgIngressRules(sgId);})
.then(ingressRules => {console.log(ingressRules);});
}
I submitted this as the answer now since the getSgIdFromEvent function I have, is only 8 lines and still using the async/await like I was desiring.
What I was missing was the .promise() on the end of the function and returning that promise.
Thanks for all the responses!

Have to test lambda several times before it works

I have a lambda function that I am playing around with. It inserts very basic information into a DynamoDB table. Here is the code:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
documentClient.put(params, function (err, data) {
if (err) {
callback("Error", err);
} else {
callback(null, data);
}
});
});
}
The issue I am having is that it only sometimes saves the data in the DB. I have to click test 5-10 times before it does anything.
Can anyone help with what might be causing this?
The reason this is happening is because alexa-skill-kit takes care of the callback for you.
See the documentation. By passing in the context object you allow the wrapping handler (alexaSkillKit(...)) to manage decoding and encoding the returned objects and payload. The handler alexSkillKit callback function just expects you to return a value.
For your code sample you could do the following:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
return documentClient.put(params).promise()
.then((data) => {
// stuff with the data!
return data;
}).catch((err) => {
// stuff with the error
throw err;
});
});
}
n.b. The reason it worked after a few invocations is that lambda re-uses the environments each invocation executes in. It does this by effectively "freezing" the state of the environment and thawing it when it's needed again. This is the basis of a lot of optimisations people make; and it meant that you would sometimes thaw an environment that was midway through calling back when it was frozen by the alexaSkillKit returning first.

Sharing DB Connection across AWS Lambda function calls

So I'm following the example here https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs, to optimize my lambda functions.
I've tried two approaches and tested them locally using serverless-offline and both don't seem to work.
First Approach
// endpoint file
import {connectToDatabase} from "lib/dbUtils.js";
let cachedDb = null;
export function post(event, context, callback) {
let response;
context.callbackWaitsForEmptyEventLoop = false;
connectToDatabase()
.then(//do other stuff
// lib/dbUtils.js
export async function connectToDatabase() {
if (cachedDb && cachedDb.serverConfig.isConnected()) {
console.log(" using cached db instance");
return cachedDb;
}
cachedDb = await mongoose.createConnection(
process.env.DB_URL,
async err => {
if (err) {
throw err;
}
}
);
return cachedDb;
}
Second Approach
global.cachedDb = null;
export function post(event, context, callback) {
let response;
context.callbackWaitsForEmptyEventLoop = false;
connectToDatabase()
.then(connection => createUser(event.body, connection))
// lib/dbUtils.js
export async function connectToDatabase() {
// eslint-disable-next-line
if (global.cachedDb && global.cachedDb.serverConfig.isConnected()) {
// eslint-disable-next-line
console.log(" using cached db instance");
// eslint-disable-next-line
return global.cachedDb;
}
// eslint-disable-next-line
global.cachedDb = await mongoose.createConnection(
process.env.DB_URL,
async err => {
if (err) {
throw err;
}
}
);
// eslint-disable-next-line
return global.cachedDb;
}
In both cases the using cached db instance console log does not run.
Why does this not work? Is this because of serverless-offline?
The answer is simple: serverless-offline doesn't simulate the full AWS. Use the AWS console to to make a real Lambda
The MongoDB Atlas guide is OK, but it's also worth checking the official AWS Lambda documentation describing the context option in each lambda:
callbackWaitsForEmptyEventLoop – Set to false to send the response right away when the callback executes, instead of waiting for the Node.js event loop to be empty. If false, any outstanding events will continue to run during the next invocation.
It's possible to run your code on a real Lambda and see using cached db instance on the console. Since MongoDB's JavaScript code is fairly poor, I've written out my own version below:
var MongoClient = require("mongodb").MongoClient
let db = null
var log = console.log.bind(console)
var print = function(object) {
return JSON.stringify(object, null, 2)
}
// Use your own credentials (and better yet, put them in environment variables)
const password = `notactuallyapassword`
const uri = `mongodb+srv://lambdauser:${password}#fakedomain.mongodb.net/test?retryWrites=true`
exports.handler = function(event, context, callback) {
log(`Calling MongoDB Atlas from AWS Lambda with event: ${print(event)}`)
var document = JSON.parse(JSON.stringify(event))
const databaseName = "myDatabase",
collectionName = "documents"
// See https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs
// and https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-context.html#nodejs-prog-model-context-properties
context.callbackWaitsForEmptyEventLoop = false
return createDoc(databaseName, collectionName, document)
}
async function createDoc(databaseName, collectionName, document) {
var isConnected = db && db.serverConfig.isConnected()
if (isConnected) {
log(`Already connected to database, warm start!`)
} else {
log(`Connecting to database (cold start)`)
var client = await MongoClient.connect(uri)
db = client.db(databaseName)
}
var result = await db.collection(collectionName).insertOne(document)
log(`just created an entry into the ${collectionName} collection with id: ${result.insertedId}`)
// Don't close the connection thanks to context.callbackWaitsForEmptyEventLoop = false - this will re-use the connection on the next called (if it can re-use the same Lambda container)
return result
}
Use the Test button to run the lambda above twice in the AWS Lambda console.
The first time you run it you'll see Connecting to database (cold start)
The second time you'll see Already connected to database, warm start!
See the log output section in screenshot below:

Mocking putRecord on firehose not working with aws-sdk-mock

I am trying to mock putRecord method on AWS Firehose object but mocking is not succeeding. The code ends up calling the aws-sdk api on firehose object which talks with live aws service. What is wrong in below code? what needs to be changed to avoid this live service call and make mock to be effective?
Is there a way to send a thenable object and not just plain object as it does with callback below? i.e. someway to use something like callbackFunc that I defined there in the test code?
Also eventually I need to check if the mock did get called or not. How would I achieve that? Could I use sinon.stub in some manner to achieve that so that later on I can verify? How?
Here is code and test code portions... modified to simple form for this posting.
code that is part of a file say samplelogging.js. ...
/*jshint strict:true */
/*jshint node:true */
/*jshint esversion:6 */
/*jshint mocha:true */
"use strict";
var Promise = require('bluebird');
var AWS = require('aws-sdk');
var uuid = require('uuid');
AWS.config.setPromisesDependency(Promise);
var Logger = {
/**
* Get's a AWS Firehose Instance
*/
getFirehoseInstance: function getFirehoseInstance() {
if (!(this.firehose)) {
this.firehose = new AWS.Firehose({apiVersion: "2015-08-04", region: "us-west-2"});
}
return this.firehose;
},
getLogger: function getLogger(options) {
options = options || {};
let self = this;
self.class = options.class;
self.firehose = self.getFirehoseInstance();
return self;
},
logInfo: function logInfo(dataToLog, callback) {
this._log("info", dataToLog)
.then(function (data){
if (callback) {
callback();
}
});
return;
},
/**
* #api: private
*/
_log: function _log(traceLevel, dataToLog) {
return new Promise(function(resolve, reject) {
var params = params || {};
AWS.config.update({ logger: process.stdout });
AWS.config.update({ retries: 3 });
var recordParams = {
type: params.type || 'LogEntry'
};
if (typeof dataToLog === 'string' || dataToLog instanceof String) {
recordParams.data = { message: dataToLog };
} else {
recordParams.data = dataToLog;
}
recordParams.data.id = uuid.v1();
recordParams.data.preciseTimestamp = Math.floor(new Date().getTime()/1000);
recordParams.data.class = this.class;
recordParams.data.traceLevel = traceLevel;
var firehoseRecordParams = {
DeliveryStreamName: "mystreamname", //replace mystreamname with real stream name
Record: {
Data: JSON.stringify(recordParams)+',\n'
}
};
this.firehose.putRecord(firehoseRecordParams, function(err, recordId) {
console.log("debug: recordId returned by putRecord = " + JSON.stringify(recordId));
return resolve(recordId);
});
}.bind(this));
}
};
module.exports = Logger;
Here is my test code that is part of a file say sampleloggingtest.js ...
var expect = require('chai').expect;
var Promise = require("bluebird");
var sinon = require("sinon");
var AWSMock = require('aws-sdk-mock');
describe.only("Logging tests", function () {
it.only("Test AWS firehose API invoked", function (done) {
let mylogger = Logger.getLogger({class: "Unit Test"});
let firehoseInstance = mylogger.getFirehoseInstance();
// want to have a callback function that returns a thenable object and not just an object. Not sure how to use it though with mock
// so for now this is just code that shows what i intend to do.
let callBackFunc = function( err, recordId) {
console.log("debug: returend from putRecord, recordId = " + JSON.stringify(recordId));
return Promise.resolve(recordId);
};
// calling mock as per the documentation at https://github.com/dwyl/aws-sdk-mock
AWSMock.mock('Firehose', 'putRecord', function(params, callback) {
console.log("debug: callback to putRecord to be called");
callback(null, {"RecordId": "12345"} );
});
// calling a method that should call firehose logging but our mock should intercept it - though it doesn't.
mylogger.logInfo({ prop1: "value1" }, function(){
console.log("debug: in the callback that was passed to logInfo...");
done();
});
});
});
Sharing the answer that I figured out, especially since another person (mmorrisson) was trying to do the same.
Essentially I added _setFirehoseInstance method to my logger class that would be called only from my test code which replaces the firehose instance (which in production code would have called new AWS.Firehose()) with my own simple mock class.
In my test code...
let firehoseMock = {};
In beforeEach() create and set the mock to replace the actualfirehose instance and in afterEach() restore that.
beforeEach(function (done) {
logger = new Logger({class: "Unit Test"});
firehose = logger.getFirehoseInstance();
consoleMock = sinon.mock(console);
firehoseMock.putRecord = function(params, callback) {
let recordIdObj = {"RecordId": recordIdVal};
callback(null, recordIdObj);
};
logger._setFirehoseInstance(firehoseMock);
sinon.spy(firehoseMock, "putRecord");
done();
});
afterEach(function (done) {
firehoseMock.putRecord.restore();
logger._setFirehoseInstance(firehose);
consoleMock.restore();
done();
});
And in the test code where we try to log, check if firehoseMock.putRecord was called or not...
it("Test AWS firehose API invoked", function (done) {
logger.setMode("production");
logger.setEnvironment("test");
logger.logInfo({ prop1: "value1" }, function(data){
expect(firehoseMock.putRecord.calledOnce).to.equal(true); // should have been called once
expect(data.RecordId).to.equal(recordIdVal); // should have matching recordId
done();
});
});
In production code, in the logger class have the getter and setter for firehose instance.
/**
* Get's a AWS Firehose Instance
*/
getFirehoseInstance() {
if (!(this.firehose)) {
this.firehose = new AWS.Firehose({apiVersion: Config.apiVersion, region: Config.region});
}
return this.firehose;
}
/**
* Set a AWS Firehose Instance - for TEST purose
*/
_setFirehoseInstance(firehoseInstance) {
if (firehoseInstance) {
this.firehose = firehoseInstance;
}
}
This worked for me. when logger is calling firehose instance method in production, it will go to AWS service but in unit tests when I call log method, it calls the putRecord on the mock because I have replaced the firehose instance with mock. I can then appropriately test if putRecord on mock was called (using sinon.spy).

Mocking using aws-sdk-mock's promise support with DocumentClient

I'm trying to write a unit test using aws-sdk-mock's promise support. I'm using DocumentClient.
My code looks like this:
const docClient = new AWS.DynamoDB.DocumentClient();
const getItemPromise = docClient.get(params).promise();
return getItemPromise.then((data) => {
console.log('Success');
return data;
}).catch((err) => {
console.log(err);
});
My mock and unit test looks like this:
const AWS = require('aws-sdk-mock');
AWS.Promise = Promise.Promise;
AWS.mock('DynamoDB.DocumentClient', 'get', function (params, callback)
{
callback(null, { Item: { Key: 'test value } });
});
dynamoStore.getItems('tableName', 'idName', 'id').then((actualResponse) => {
// assertions
done();
});
Runnning my unit test, does not return my test value, it actually bypasses my mock, and calls calls dynamoDb directly. What am I doing wrong? How can I get my mock set up properly?
It's unclear from your code but aws-sdk-mock has this note
NB: The AWS Service needs to be initialised inside the function being tested in order for the SDK method to be mocked
so the following will not mock correctly
var AWS = require('aws-sdk');
var sns = AWS.SNS();
var dynamoDb = AWS.DynamoDB();
exports.handler = function(event, context) {
// do something with the services e.g. sns.publish
}
but this will
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
var sns = AWS.SNS();
var dynamoDb = AWS.DynamoDB();
// do something with the services e.g. sns.publish
}
see more here https://github.com/dwyl/aws-sdk-mock#how-usage
It might be too late for an answer, but I had the same problem and I stumbled upon this question. After a few tries I found a solution that doesn't involve aws-sdk-mock but only plain Sinon, and I hope that sharing it would help someone else. Note that the DynamoDB client is create outside the lambda.
The lambda itself looks like this:
const dynamoDB = new DynamoDB.DocumentClient();
exports.get = async event => {
const params = {
TableName: 'Tasks',
Key: {
id: event.pathParameters.id
}
};
const result = await dynamoDB.get(params).promise();
if (result.Item) {
return success(result.Item);
} else {
return failure({ error: 'Task not found.' });
}
};
And the test for this lambda is:
const sandbox = sinon.createSandbox();
describe('Task', () => {
beforeAll(() => {
const result = { Item: { id: '1', name: 'Go to gym'}};
sandbox.stub(DynamoDB.DocumentClient.prototype, 'get').returns({promise: () => result});
});
afterAll(() => {
sandbox.restore();
});
it('gets a task from the DB', async () => {
// Act
const response = await task.get(getStub);
// Assert
expect(response.statusCode).toEqual(200);
expect(response.body).toMatchSnapshot();
});
});
I like to use Sinon's sandbox to be able to stub a whole lot of different DynamoDB methods and clean up everything in a single restore().
sinon and proxyquire can be used to mock the dynamodb client.
It supports both callback based and async/await based calls.
Refer this link for full details
https://yottabrain.org/nodejs/nodejs-unit-test-dynamodb/
Somewhat related to the question, expanding wyu's solution - i too faced similar issue - for me, below didn't work with aws-sdk-mock
const AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
let call = function (action, params) {
const dynamoDb = new AWS.DynamoDB.DocumentClient();
return dynamoDb[action](params).promise();
};
where as this worked
let call = function (action, params) {
const AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
const dynamoDb = new AWS.DynamoDB.DocumentClient();
return dynamoDb[action](params).promise();
};
I had exactly the same problem of mock failing but resolved the issue after following the suggestion by a user who above by moving the following line within the function rather than defining outside:
let sns = new AWS.SNS(.....)

Resources