Lambda function not pushing data into DynamoDB table - node.js

I'm running a NodeJS lambda function which is triggered by API Gateway.
My goal is to push the data and then send a status response. I think the lambda stops running before the insertData function finishes its execution, because sometimes it works but in most requests it doesn't.
Could someone lend a hand on this?
Here is my code:
// Set a table name that we can use later on
const tableName = "InterestRates"
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
// Create the DynamoDB service object
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
// TODO implement
console.log(">>> Running LoanInterestRates")
if(event['resource'] == '/rates'){
if(event.httpMethod === 'POST'){
return newRate(event);
}
}
};
function insertData(params){
let status;
// Call DynamoDB to add the item to the table
ddb.putItem(params, function(err, data) {
if (err) {
status = "Error";
} else {
status = "Success";
}
});
return status
}
function newRate (event){
const newRate = JSON.parse(event.body);
var params = {
TableName: 'InterestRates',
Item: {
'termlength' : {N: newRate["length"]},
'InterestRate': {S: newRate["rate"]}
}
};
let addNewRate = insertData(params);
return {
statusCode: 200,
body: JSON.stringify({
response: addNewRate
})
}
}
I also tried using Async/Await but it didn't work.

You lambda function is Async but your code is not. You need to await the completion of your function newRate which in turn should also await the function inserData which should also await your DDB request.
I would advise you to do one of two things:
Learn how JS Async nature works and ensure you understand when you need to await.
Use a synchronous programming language like Python/Boto3 where you will not run into such issues.

Related

How can I read in sensor data from my ESP32 to DynamoDB?

I'm learning about AWS services and I'm attempting to create a data pipeline from my ESP32 (photoresistor data) to DynamoDB.
I have created a rule that takes incoming MQTT messages from my ESP32 and triggers a lambda function that pushes the data to my DynamoDB.
I have it working for hardcoded values in the lambda function, but how can I modify the following code to read in real-time sensor data from the ESP32?
Here is the lambda code (node.js):
const AWS = require("aws-sdk");
const ddb = new AWS.DynamoDB.DocumentClient({region: 'us-west-2'});
exports.handler = async (event, context, callback) => {
// Captures the requestId from the context message
const requestId = context.awsRequestId;
// Handle promise fulfilled/rejected states
await createMessage(requestId).then(() => {
callback(null, {
statusCode: 201,
body: '',
headers: {
'Access-Control-Allow-Origin' : '*'
}
});
}).catch((err) => {
console.error(err)
})
};
// Function createMessage
// Writes message to DynamoDb table Message
function createMessage(requestId) {
const params = {
TableName: 'my-ddd-data',
Item: {
'partKey' : requestId,
'Dropouts': "67476", // this is successfully sent to my database but I'd like real time sensor data
'Runtime' : "0 mins"
}
}
return ddb.put(params).promise();
}
The json format of the data being fed to this lambda function:
{
"Dropouts": "1",
"Runtime": "0 mins"
}
Please, consider logging your event and see how it looks like. Probably it will contain the JSON information from your sensor. I think you could directly pass that information to DynamoDB:
const AWS = require("aws-sdk");
const ddb = new AWS.DynamoDB.DocumentClient({region: 'us-west-2'});
exports.handler = async (event, context, callback) => {
// Log your event, and see how it looks like
console.log('Event\n', JSON.stringify(event))
// Captures the requestId from the context message
const requestId = context.awsRequestId;
// Handle promise fulfilled/rejected states
// Pass the event that is being processed
await createMessage(requestId, event).then(() => {
callback(null, {
statusCode: 201,
body: '',
headers: {
'Access-Control-Allow-Origin' : '*'
}
});
}).catch((err) => {
console.error(err)
})
};
// Function createMessage
// Writes message to DynamoDb table Message
function createMessage(requestId, event) {
const params = {
TableName: 'my-ddd-data',
Item: {
'partKey' : requestId,
'Dropouts': event['Dropouts'], // read values from the event
'Runtime' : event['Runtime']
}
}
return ddb.put(params).promise();
}
Although implemented in Python, I think this tutorial from the Amazon documentation could be of help as well.

AWS Node.js Lambda POST Function in AWS Console

I'm trying to create a lambda function inside of the AWS Console that does a POST of a record to my DynamoDB table records.
I am currently getting a success message when I run the test as I haven't hooked up a trigger yet but the test message is coming back null and isn't posting anything to my table.
I've gone through the AWS SDK docs and haven't found what I'm looking for in terms of running the exports.handle needed for the lambda function to work. I see the code side Node.js without the exports.
I've tried setting recordId and recordAlbum like this,
let recordId = 1;
let recordAlbum = "Album";
and that just returns a structure error when testing.
Does anyone have any resources or experience with this? I'm playing around to build a serverless CRUD app. Any advise or resources would really help.
This is the code I am using in my AWS Lambda function.
let AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
// Create the DynamoDB service object
let ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
let recordId = {N: '001'};
let recordAlbum = {S: 'Album Here'}
exports.handler = async function(event, context) {
let params = {
TableName: 'TABLE_NAME',
Item: {
'recordId' : recordId,
'album' : recordAlbum
}
};
console.log('generating record ID', recordId);
console.log('generating Album', recordAlbum);
console.log('generated parmas', params);
// Call DynamoDB to add the item to the table
ddb.putItem(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
}
You've got an async handler so you should use promises rather than callbacks.
Try this:
exports.handler = async function(event, context) {
let params = {
TableName: 'TABLE_NAME',
Item: {
'recordId' : recordId,
'album' : recordAlbum
}
};
console.log('generating record ID', recordId);
console.log('generating Album', recordAlbum);
console.log('generated parmas', params);
try {
let result = await ddb.putItem(params).promise();
console.log(result);
}
catch(err)
{
console.error(err);
}
}
Here's a bit more about async/await on the AWS blog, and documentation on the promise() method in the AWS JS SDK.
One answer to this question is found in this tutorial by following step 3 and modifying the information.
I changed the "Id" to "id" and changed exports.writeMovie to exports.handler
https://hackernoon.com/create-a-serverless-rest-api-with-node-js-aws-lambda-dynamodb-api-gateway-f08e7111fd16
It doesn't use the async and await and is a touch outdated but it works.

Invalid JSON when calling Node 8.10 Lambda

I am using lambda with cognito to write to dynamoDB after a successful login.
Node 8.10 has a different layout with the promise and asycn/await. the callback(null, event) return is not working for my. Anyone now how to solve the problem of Invalid lambda function output : Invalid JSON with node 8.10.
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
//AWS.config.update({region: 'REGION'});
// Create DynamoDB document client
var docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.myHandler = async (event, context, callback) => {
// TODO implement
console.log ("Authentication successful");
console.log ("Trigger function =", event.triggerSource);
console.log ("User pool = ", event.userPoolId);
console.log ("App client ID = ", event.callerContext.clientId);
console.log ("User ID = ", event.userName);
const params = {
TableName: 'xxxx',
Item: {
'userId': event.userName,
'systemUpdateDate': new Date().toJSON()
}
};
let putItem = new Promise((res, rej) => {
docClient.put(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
});
const result = await putItem;
console.log(result);
// Return to Amazon Cognito
callback(null, event);
};
thanks
With the suggested Node 8 approach of async/await you should use the following approach to structuring your function:
async function handler(event) {
const response = doSomethingAndReturnAJavascriptObject();
return response;
}
You're getting that error because whatever you're returning isn't something that can be parsed as a JSON object.
Without seeing your code it's hard to debug further. I expect that you might accidentally not be awaiting a .promise() version of a dynamo/cognito API call, which is causing you to return a Promise instead of a result.
n.b. you can still use the 'old' callback() method with Node 8 if you find it easier.

Have to test lambda several times before it works

I have a lambda function that I am playing around with. It inserts very basic information into a DynamoDB table. Here is the code:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
documentClient.put(params, function (err, data) {
if (err) {
callback("Error", err);
} else {
callback(null, data);
}
});
});
}
The issue I am having is that it only sometimes saves the data in the DB. I have to click test 5-10 times before it does anything.
Can anyone help with what might be causing this?
The reason this is happening is because alexa-skill-kit takes care of the callback for you.
See the documentation. By passing in the context object you allow the wrapping handler (alexaSkillKit(...)) to manage decoding and encoding the returned objects and payload. The handler alexSkillKit callback function just expects you to return a value.
For your code sample you could do the following:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
return documentClient.put(params).promise()
.then((data) => {
// stuff with the data!
return data;
}).catch((err) => {
// stuff with the error
throw err;
});
});
}
n.b. The reason it worked after a few invocations is that lambda re-uses the environments each invocation executes in. It does this by effectively "freezing" the state of the environment and thawing it when it's needed again. This is the basis of a lot of optimisations people make; and it meant that you would sometimes thaw an environment that was midway through calling back when it was frozen by the alexaSkillKit returning first.

Mocking using aws-sdk-mock's promise support with DocumentClient

I'm trying to write a unit test using aws-sdk-mock's promise support. I'm using DocumentClient.
My code looks like this:
const docClient = new AWS.DynamoDB.DocumentClient();
const getItemPromise = docClient.get(params).promise();
return getItemPromise.then((data) => {
console.log('Success');
return data;
}).catch((err) => {
console.log(err);
});
My mock and unit test looks like this:
const AWS = require('aws-sdk-mock');
AWS.Promise = Promise.Promise;
AWS.mock('DynamoDB.DocumentClient', 'get', function (params, callback)
{
callback(null, { Item: { Key: 'test value } });
});
dynamoStore.getItems('tableName', 'idName', 'id').then((actualResponse) => {
// assertions
done();
});
Runnning my unit test, does not return my test value, it actually bypasses my mock, and calls calls dynamoDb directly. What am I doing wrong? How can I get my mock set up properly?
It's unclear from your code but aws-sdk-mock has this note
NB: The AWS Service needs to be initialised inside the function being tested in order for the SDK method to be mocked
so the following will not mock correctly
var AWS = require('aws-sdk');
var sns = AWS.SNS();
var dynamoDb = AWS.DynamoDB();
exports.handler = function(event, context) {
// do something with the services e.g. sns.publish
}
but this will
var AWS = require('aws-sdk');
exports.handler = function(event, context) {
var sns = AWS.SNS();
var dynamoDb = AWS.DynamoDB();
// do something with the services e.g. sns.publish
}
see more here https://github.com/dwyl/aws-sdk-mock#how-usage
It might be too late for an answer, but I had the same problem and I stumbled upon this question. After a few tries I found a solution that doesn't involve aws-sdk-mock but only plain Sinon, and I hope that sharing it would help someone else. Note that the DynamoDB client is create outside the lambda.
The lambda itself looks like this:
const dynamoDB = new DynamoDB.DocumentClient();
exports.get = async event => {
const params = {
TableName: 'Tasks',
Key: {
id: event.pathParameters.id
}
};
const result = await dynamoDB.get(params).promise();
if (result.Item) {
return success(result.Item);
} else {
return failure({ error: 'Task not found.' });
}
};
And the test for this lambda is:
const sandbox = sinon.createSandbox();
describe('Task', () => {
beforeAll(() => {
const result = { Item: { id: '1', name: 'Go to gym'}};
sandbox.stub(DynamoDB.DocumentClient.prototype, 'get').returns({promise: () => result});
});
afterAll(() => {
sandbox.restore();
});
it('gets a task from the DB', async () => {
// Act
const response = await task.get(getStub);
// Assert
expect(response.statusCode).toEqual(200);
expect(response.body).toMatchSnapshot();
});
});
I like to use Sinon's sandbox to be able to stub a whole lot of different DynamoDB methods and clean up everything in a single restore().
sinon and proxyquire can be used to mock the dynamodb client.
It supports both callback based and async/await based calls.
Refer this link for full details
https://yottabrain.org/nodejs/nodejs-unit-test-dynamodb/
Somewhat related to the question, expanding wyu's solution - i too faced similar issue - for me, below didn't work with aws-sdk-mock
const AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
let call = function (action, params) {
const dynamoDb = new AWS.DynamoDB.DocumentClient();
return dynamoDb[action](params).promise();
};
where as this worked
let call = function (action, params) {
const AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
const dynamoDb = new AWS.DynamoDB.DocumentClient();
return dynamoDb[action](params).promise();
};
I had exactly the same problem of mock failing but resolved the issue after following the suggestion by a user who above by moving the following line within the function rather than defining outside:
let sns = new AWS.SNS(.....)

Resources