I have a lambda function that I am playing around with. It inserts very basic information into a DynamoDB table. Here is the code:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
documentClient.put(params, function (err, data) {
if (err) {
callback("Error", err);
} else {
callback(null, data);
}
});
});
}
The issue I am having is that it only sometimes saves the data in the DB. I have to click test 5-10 times before it does anything.
Can anyone help with what might be causing this?
The reason this is happening is because alexa-skill-kit takes care of the callback for you.
See the documentation. By passing in the context object you allow the wrapping handler (alexaSkillKit(...)) to manage decoding and encoding the returned objects and payload. The handler alexSkillKit callback function just expects you to return a value.
For your code sample you could do the following:
'use strict';
const alexaSkillKit = require('alexa-skill-kit');
const AWS = require('aws-sdk');
function binDaySkill(event, context, callback) {
alexaSkillKit(event, context, (message) => {
let params = {
Item: {
user_id: '123',
some_data: 'some data here'
},
TableName: 'my_table'
};
let documentClient = new AWS.DynamoDB.DocumentClient();
return documentClient.put(params).promise()
.then((data) => {
// stuff with the data!
return data;
}).catch((err) => {
// stuff with the error
throw err;
});
});
}
n.b. The reason it worked after a few invocations is that lambda re-uses the environments each invocation executes in. It does this by effectively "freezing" the state of the environment and thawing it when it's needed again. This is the basis of a lot of optimisations people make; and it meant that you would sometimes thaw an environment that was midway through calling back when it was frozen by the alexaSkillKit returning first.
Related
I'm running a NodeJS lambda function which is triggered by API Gateway.
My goal is to push the data and then send a status response. I think the lambda stops running before the insertData function finishes its execution, because sometimes it works but in most requests it doesn't.
Could someone lend a hand on this?
Here is my code:
// Set a table name that we can use later on
const tableName = "InterestRates"
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
// Create the DynamoDB service object
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
// TODO implement
console.log(">>> Running LoanInterestRates")
if(event['resource'] == '/rates'){
if(event.httpMethod === 'POST'){
return newRate(event);
}
}
};
function insertData(params){
let status;
// Call DynamoDB to add the item to the table
ddb.putItem(params, function(err, data) {
if (err) {
status = "Error";
} else {
status = "Success";
}
});
return status
}
function newRate (event){
const newRate = JSON.parse(event.body);
var params = {
TableName: 'InterestRates',
Item: {
'termlength' : {N: newRate["length"]},
'InterestRate': {S: newRate["rate"]}
}
};
let addNewRate = insertData(params);
return {
statusCode: 200,
body: JSON.stringify({
response: addNewRate
})
}
}
I also tried using Async/Await but it didn't work.
You lambda function is Async but your code is not. You need to await the completion of your function newRate which in turn should also await the function inserData which should also await your DDB request.
I would advise you to do one of two things:
Learn how JS Async nature works and ensure you understand when you need to await.
Use a synchronous programming language like Python/Boto3 where you will not run into such issues.
I'm scanning all items from a DynamoDB table - within a Lambda function - with DocumentClient. I'm then looping through each item and extracting the payload that I need. I'll use that item from the payload as a parameter with ExpressionAttributeValues in a new query.
Everything works dandy independently. The issue is with the use of the asynchronous function queryItems when nested within an array forEach() method. I getting a parsing error with the function queryItems. I can query the table when I call the function outside of the loop but how else am I going to query each item independently?
I'm not sure how to handle this.
'use strict';
const aws = require('aws-sdk');
const docClient = new aws.DynamoDB.DocumentClient();
var paramsAll = {
TableName: 'MyTable',
Select: "ALL_ATTRIBUTES"
};
exports.handler = async (event, context) => {
try {
let arr = [];
let sequence = '';
//scan all items in table
docClient.scan(paramsAll, function(err, data) {
if (err) {
//handle error
}
else {
//Loop through each item in the table:
let items = (data.Items);
items.forEach(function(Item) {
let p = (Item.payload);
//Extract sequence from the payload
sequence = (p.seq);
arr.push(sequence);
//perform other function with this array (not listed for brevity)
});
//Here is where I'm having the issue:
arr.forEach(function(Item) {
//Pass these items as a paramater within queryItems function but getting Parsing Error: unexpected token queryItems
const results = await queryItems(Item);
//do something with the results...
})
}
});
}
catch (err) {
return { error: err };
}
};
async function queryItems(p) {
try {
var params = {
TableName: 'MyTable',
KeyConditionExpression: '#seq = :value',
ExpressionAttributeValues: { ':value': p },
ExpressionAttributeNames: { '#seq': 'seq' }
};
const data = await docClient.query(params).promise();
return data;
}
catch (err) {
return err;
}
}
I've definitely run into a similar issue. What I believe is happening is just a Javascript syntax issue, where awaiting queryItems inside the synchronous function provided to forEach will produce an error. (Although, when running the code, I do get the specific error "SyntaxError: await is only valid in async functions and the top level bodies of modules", so there might be something else going on.)
I see nothing wrong with the DynamoDB queries, but hoangdv's suggestions are spot on. Specifically, I'd also suggest using the promise style for scan, and while a for...loop will definitely work, using Promise.all and map will be a lot quicker to complete all the queries. Here's how I'd modify the code:
'use strict';
const aws = require('aws-sdk');
const docClient = new aws.DynamoDB.DocumentClient();
// avoid var unless you specifically require it's hoisting behavior.
const paramsAll = {
TableName: 'MyTable',
Select: "ALL_ATTRIBUTES" // most likely not needed, I'd review this section of the docs: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Scan.html#DDB-Scan-request-Select
};
exports.handler = async (event, context) => {
try {
// unless you need to assign a new array to this variable, it is better practice to use const instead.
const arr = [];
// let sequence = ''; // see line 24 for why I commented this out.
// scan all items in table.
// Destructure Items out of the response.
// You may also need to continue scanning with the LastEvaluatedKey depending on the size of your table, and/or your use case.
// You'd continue scanning in a while loop, for example.
const { Items, LastEvaluatedKey } = await docClient.scan(paramsAll).promise();
// push the sequence to the arr.
// There is most likely a reason you omitted for brevity to have sequence defined above,
// but since this example doesn't need it above, I've omitted it entirely
Items.forEach(Item => {
const p = Item.payload;
arr.push(p.seq);
});
// use a for loop or map here instead. forEach will return undefined, which cannot be await'ed.
// instead, map will return a new array of Promises (since the callback is async).
// Then, you can use Promise.all to await until each Promise in the array is resolved.
// Keep in mind, depending on how many items you are iterating through, you may run into DynamoDB's ThrottlingException.
// You would have to batch the queries (in other words, split the arr into pieces, and iterate over each piece), which would have to be done before using map. Then, sleep for a few milliseconds before starting on the next piece.
// I doubt the queries will be quick enough to cause this when using a for loop, though.
await Promise.all(arr.map(async Item => {
const results = await queryItems(Item);
// do something with the results...
}));
}
catch (err) {
// Again, not sure what the use case is, but just FYI this is not a valid return value if this lambda function is intended for use with using API Gateway.
// See here :) https://docs.aws.amazon.com/lambda/latest/dg/services-apigateway.html#apigateway-types-transforms
return { error: err };
}
};
// Presumably, MyTable has a partitionKey of seq, otherwise this KeyConditionExpression is invalid.
async function queryItems(p) {
try {
var params = {
TableName: 'MyTable',
KeyConditionExpression: '#seq = :value',
ExpressionAttributeValues: { ':value': p },
ExpressionAttributeNames: { '#seq': 'seq' }
};
const data = await docClient.query(params).promise();
return data;
}
catch (err) {
return err;
}
}
Your issue is how you await on the for loop, its best to use Promise.all() with a map to await inside of a loop:
await Promise.all(arr.map(async Item => {
const results = await queryItems(Item);
// do something with the results...
}));
However, I cannot seem to understand your logic really well.
You Scan a table called MyTable, but you do not paginate, meaning you are only getting up to 1MB worth of data.
With the results, you strip out the seq value and then once again read every item from MyTable this time using a Query and seq as the key?
I am using AWS Comprehend's "PII" detection tools to spot personally identifying information (PII) in arbitrary text submitted by the public.
This is for a Nodejs Lambda that I mean to run in our AWS account.
const {Comprehend} = require('#aws-sdk/client-comprehend');
const client = new Comprehend({ region: "us-east-2" });
module.exports.handler = async (event, _context) => {
const message = event.data;
var redactedMessage = await getRedactedMessage(message);
return redactedMessage;
}
const getRedactedMessage = async (message) => {
// the Comprehend PII system takes strings up to 4000 characters
// so we split it on word boundaries after 4000 chars or less.
var messageParts = message.match(/.{1,4000}(\s|$)/g);
var redactedString = "";
for (const part in messageParts) {
try {
const checkPart = {
Text : message,
LanguageCode : "en"
};
client.detectPiiEntities(checkPart, function (err, pii) {
console.log("Why does this come last?");
});
} catch (error) {
console.error(error);
} finally {
console.log("Why do I get here first?");
}
console.log("And here? Then suddenly...");
}
}
No matter what, the output is
Why do I get here first?
And here? Then suddenly...
Why does this come last?
I have littered this with every possible combination of "async" and "await" commands. I have rewritten the client.detectPiiEntities(checkPart, function (err, pii)... line like a promise with a .then() but no matter what, I can't make it wait for the results of client.detectPiiEntities block.
I'm sure it's something dumb. (Probably me.) Thanks in advance.
Simple call to ec2 Describing Security groups and returning the security group ID. Using Async / await, but when logging the return value, I get undefined. I fully admit I'm coming from Python and I've tried my hardest to wrap my brain around async calls. I thought I had it nailed, but I'm obviously missing something.
'use strict';
// Load Modules
const AWS = require('aws-sdk')
//Set the region
AWS.config.update({region: 'us-west-2'});
// Call AWS Resources
const ec2 = new AWS.EC2();
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
var ec2params = { Filters: [{Name: 'tag:t_whitelist',Values[event['site']]}]};
await ec2.describeSecurityGroups(ec2params, function (err, response) {
if (err) {return console.error(err.message)}
else {
var sgId = response.SecurityGroups[0].GroupId;
return sgId;
};
});
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => {console.log(sgId)});
}
"sgId" should return the security group ID. It does print out fine in the original function before the return.
Typically if it is an async call you want you handle it similar to this way without using a callback
// Load Modules
const AWS = require('aws-sdk')
//Set the region
AWS.config.update({ region: 'us-west-2' });
// Call AWS Resources
const ec2 = new AWS.EC2();
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
var ec2params = { Filters: [{ Name: 'tag:t_whitelist', Values[event['site']]}] };
try {
const securityGroupsDesc = await ec2.describeSecurityGroups(ec2params).promise();
const sgId = securityGroupsDesc.SecurityGroups[0].GroupId;
//do something with the returned result
return sgId;
}
catch (error) {
console.log('handle error');
// throw error;
}
});
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => { console.log(sgId) });
}
however if it doesn't support async you just use the callback to handle the returned data or error without using async function.However Reading into AWS docs you can find that the function ec2.describeSecurityGroups() returns an AWS Request
which has a method promise() that needs to be invoked to send the request and get a promise returned.Note that the try catch here is not needed but good to have in case error occurs during the process.
As I said in the comment, chance are that describeSecurityGroups doesn't return a Promise. Try transforming it explictly in a Promise instead:
const promiseResponse = await new Promise((res, rej) => {
ec2.describeSecurityGroups(ec2params, function (err, response) {
if (err) {return rej(err.message)}
else {
var sgId = response.SecurityGroups[0].GroupId;
res(sgId);
};
})
});
// promiseResponse is now equal to sgId inside the callback
return promiseResponse; // this will work because the function is async
Note: You can drop the else keyword
Here is the code that worked using async / await. Thanks to #Cristian Traina I realized ec2.describeSecurityGroups wasn't returning a promise, it was returning an AWS.Event.
// Get Security Group ID From Event
const getSgIdFromEvent = async (event) => {
console.log('Getting Security Group ID')
var params = { Filters: [{Name: 'tag:t_whitelist', Values
[event['site']]}]};
const describeSG = await ec2.describeSecurityGroups(params).promise();
return describeSG.SecurityGroups[0].GroupId;
};
// Get Ingress Rules from Security Group
const getSgIngressRules = async (sgId) => {
console.log(`Getting SG Ingress rules for ${sgId}`)
var params = { GroupIds: [ sgId]};
try{
const ingressRules = await ec2.describeSecurityGroups(params).promise();
return ingressRules;
}
catch (error) {
console.log("Something went wrong getting Ingress Ruls");
}
};
// MAIN FUNCTION
exports.handler = (event, context) => {
getSgIdFromEvent(event)
.then(sgId => {return getSgIngressRules(sgId);})
.then(ingressRules => {console.log(ingressRules);});
}
I submitted this as the answer now since the getSgIdFromEvent function I have, is only 8 lines and still using the async/await like I was desiring.
What I was missing was the .promise() on the end of the function and returning that promise.
Thanks for all the responses!
I am using lambda with cognito to write to dynamoDB after a successful login.
Node 8.10 has a different layout with the promise and asycn/await. the callback(null, event) return is not working for my. Anyone now how to solve the problem of Invalid lambda function output : Invalid JSON with node 8.10.
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
//AWS.config.update({region: 'REGION'});
// Create DynamoDB document client
var docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.myHandler = async (event, context, callback) => {
// TODO implement
console.log ("Authentication successful");
console.log ("Trigger function =", event.triggerSource);
console.log ("User pool = ", event.userPoolId);
console.log ("App client ID = ", event.callerContext.clientId);
console.log ("User ID = ", event.userName);
const params = {
TableName: 'xxxx',
Item: {
'userId': event.userName,
'systemUpdateDate': new Date().toJSON()
}
};
let putItem = new Promise((res, rej) => {
docClient.put(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
});
const result = await putItem;
console.log(result);
// Return to Amazon Cognito
callback(null, event);
};
thanks
With the suggested Node 8 approach of async/await you should use the following approach to structuring your function:
async function handler(event) {
const response = doSomethingAndReturnAJavascriptObject();
return response;
}
You're getting that error because whatever you're returning isn't something that can be parsed as a JSON object.
Without seeing your code it's hard to debug further. I expect that you might accidentally not be awaiting a .promise() version of a dynamo/cognito API call, which is causing you to return a Promise instead of a result.
n.b. you can still use the 'old' callback() method with Node 8 if you find it easier.