I am simple creating a post api in which sending data to DynamoDb which i working with hardcoded data but dont know why when i am trying to add the event values its not working.
My lambda function
const AWS = require('aws-sdk');
const dynamodb = new AWS.DynamoDB({region: 'us-east-2', apiVersion: '2012-08-10'});
exports.handler = (event, context, callback) => {
const params = {
Item: {
"QuestionID": {
S: context.awsRequestId
},
"Would": {
S: event.would
},
"Rather": {
S: event.rather
},
"wouldClick": {
N: event.wouldClick
},
"ratherClick": {
N: event.ratherClick
}
},
TableName: "Would-You-Rather"
};
dynamodb.putItem(params, function(err, data) {
if (err) {
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, data);
}
});
};
This is my model use in post method
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "WouldYouRatherModel",
"type": "object",
"properties": {
"would": {"type": "string"},
"rather": {"type": "string"},
"wouldClick": {"type": "integer"},
"ratherClick": {"type": "integer"}
},
"required": ["would", "rather", "wouldClick", "ratherClick"]
}
My integration request mapping
#set($inputRoot = $input.path('$'))
{
"would" : "$inputRoot.would",
"rather" : "$inputRoot.rather",
"wouldClick" : "$inputRoot.wouldClick",
"ratherClick" : "$inputRoot.ratherClick"
}
If i hardcoded the values in lamda function and test in lamda its saving the values in database this is how i am hardcoding values
"QuestionID": {
S: context.awsRequestId
},
"Would": {
S: "helllo"
},
"Rather": {
S: "bye"
},
"wouldClick": {
N: 1
},
"ratherClick": {
N: 2
}
But problem is when I add the event values in lambda function and try to test the post API from resources it's showing this error
"errorMessage": "There were 2 validation errors:\n* InvalidParameterType: Expected params.Item['wouldClick'].N to be a string\n* InvalidParameterType: Expected params.Item['ratherClick'].N to be a string"
I think the issue is in I am passing the string value in the wrong way? because if there is something wrong in lamda or module then the hardcoded values will also not save in DynamoDB its causing error when I send the values.
Even though wouldClick and ratherClick are Number types (N), they need to be passed to DynamoDB as strings. That is what the error message is telling you.
The documentation on DynamoDB attribute values says:
Numbers are sent across the network to DynamoDB as strings, to maximize compatibility across languages and libraries. However, DynamoDB treats them as number type attributes for mathematical operations.
The Number variables should be formatted as follows:
"wouldClick": { "N": "1" },
"ratherClick": { "N": "2" }
Related
All - I am working on a specific business requirement and with the lack of info on Google I thought I would stop here for some info:
I am basically ingesting a CSV, converting it to a JSON Object and stuffing it into Dynamo. The interesting part is, the data types of the row values jump between strings and numbers but I am unable to get this to work properly.
I am using Node and the aws-sdk and literally used the Amazon Docs to test this straight up and it still did not work, see below:
var params = {
TableName: foo,
Item: {
masterReportsUuid: uuidv4(),
reportDate: _eventDate,
"testAttribute": {
"Name": {
"S": "Joe"
},
"Age": {
"N": "35"
}
},
}
};
dbDocClient.put(params, (err, data) => {
if (err) {
//log to CloudWatch
console.log(err);
reject(err);
} else {
resolve(data);
}
});
The testAttribute obviously is a Map with Name and Age, string and number. This is straight from the documentation -
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/DynamoDB.html#putItem-property
An attribute of type Map. For example:
"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}
But this outputs like this in Dynamo -
Dynamo Output
So my question is - why is this not working?
EDIT: Typos.
Ugh - I fixed the issue. I will leave this here in case anyone runs into this.
Two Issues - I was using the DynamoDB.DoucmentClient().put API Call and not the DynamoDB.putItem call AND my params object was close but not correct. Please see below for a working example of nested Map AttributeTypes -
const dbDocClient = new aws.DynamoDB.DocumentClient();
const dbDynamo = new aws.DynamoDB();
var params = {
TableName: _ReportsTable,
Item: {
testUuid: {
"S": uuidv4()
},
testDate: {
"S": _eventDate
},
testAttribute: {
"M": {
"Name": {
"S": "Joe"
},
"Age": {
"N": "35"
}
}
},
}
};
dbDynamo.putItem(params, (err, data) => {
if (err) {
//log to CloudWatch
console.log(err);
reject(err);
} else {
resolve(data);
}
});
I'm trying to store the json object in dynamodb using the code given below.
exports.handler = function (event, context, callback) {
var docClient = new AWS.DynamoDB.DocumentClient();
var table = "Logs";
id_val = 1
var params = {
TableName: table,
Item: {
"id": id_val,
"message": event
}
};
docClient.put(params, function(err, data) {
if (err) {
callback(null, JSON.stringify(err, null, 2));
context.fail("Unable to add item. Error JSON:", JSON.stringify(err, null, 2));
}
});
}
Input to event
[
{
"id": 1,
"demographic": {
"firstName": "John",
"middleName": "w",
"lastName": "Doe",
"suffix": "jr",
"birthDate": "1990-02-02",
"gender": "M",
"ssn": 123
}
}
]
What's stored in the table
{
"id": {
"N": "84.20420287568176"
},
"message": {
"L": [
{
"M": {
"demographic": {
"M": {
"birthDate": {
"S": "1990-02-02"
...
...
...
Why is datatype being stored in the table? How can one break this down so that attributes are stored separately?
That's the way dynamoDB stores data because when we look at the data we should be able to know the datatype of the field, which is excatly same as data type of column in relationa data base. Though data stores and displays like this in dynamoDB when we get data from dynamoDB through an actual application or API we can get rid of data types, which will return the actual data only.
I Am running a lambda function (NodeJS) to upload some documents to AWS Cloud Search. I keep getting the following error.
{
"errorMessage": "{ [\"The value of tags cannot be a JSON array or object\"] }",
"errorType": "DocumentServiceException",
"stackTrace": [
"Object.extractError (/var/task/node_modules/aws-sdk/lib/protocol/json.js:48:27)",
"Request.extractError (/var/task/node_modules/aws-sdk/lib/protocol/rest_json.js:37:8)",
"Request.callListeners (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:105:20)",
"Request.emit (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:77:10)",
"Request.emit (/var/task/node_modules/aws-sdk/lib/request.js:678:14)",
"Request.transition (/var/task/node_modules/aws-sdk/lib/request.js:22:10)",
"AcceptorStateMachine.runTo (/var/task/node_modules/aws-sdk/lib/state_machine.js:14:12)",
"/var/task/node_modules/aws-sdk/lib/state_machine.js:26:10",
"Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:38:9)",
"Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:680:12)"
]
}
I have followed the document format of
var item = {
type: 'add',
id: key,
fields: {
userid: value.userId,
storyid: value.storyId,
description: value.description,
title: value.title,
type: 'xyz'
}
}
This is the code I am using to upload the data
exports.handle = function(e, ctx, cb) {
ctx.callbackWaitsForEmptyEventLoop = false;
var documentsBatch = e.data;
var params = {
contentType: 'application/json',
documents: JSON.stringify(documentsBatch)
};
var req = cloudsearchdomain.uploadDocuments(params, function(err, data) {
if (err){
// an error occurred
cb(err, null);
}else{
// successful response
}
});
req.send();
}
My stringified data when logged looks something similar to
[
{
"type": "add",
"id": "FpgAxxxxKrM4utxosPy23--KhO6FgvxK",
"fields": {
"userid": "FpgARscKlxaxutxosPy23",
"storyid": "-KhxbPpRP7REEK",
"description": "xyz 🔥 🔥",
"title": "umm",
"type": "story"
}
},
{
"type": "add",
"id": "FccccxosPy23--KiYbrrPjtJVk2bghO-W",
"fields": {
"userid": "FpgARfPy23",
"storyid": "-KiYbrfggO-W",
"description": "noo",
"title": "lalaa out",
"type": "story"
}
}
]
Can someone point me in the right direction?
The problem was with another JSON object which had an additional JSON attribute other than fields. Once I was able to find and remove it everything worked. There should be a lint-er for the same, or the SDK should throw a better exception.
We have an API that will be used to provision certain resources in AWS using Cloud Formation. This includes a Lambda function that will send events to S3, with the bucket being configurable. The thing is, we will know the bucket name when we provision the lambda, not within the lambda code itself.
As far as I can tell, there is no way to inject the S3 bucket name at the time of provisioning, in the Cloud Formation Template itself. Is that true?
The only solution I can see is to generate the function code on the fly, and embed that into the Cloud Formation template. This would make us unable to use any NPM dependencies along with the function code. Is there a better option?
So, I realized I had never updated this question with my eventual solution. I ended up embedding a proxy lambda function into the cloudformation template, which enabled me to inject template parameters.
Example:
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Creates a function to relay messages from a Kinesis instance to S3",
"Parameters": {
"S3Bucket" : {
"Type": "String",
"Description": "The name of the S3 bucket where the data will be stored"
},
"S3Key": {
"Type": "String",
"Description": "The key of the directory where the data will be stored"
}
},
"Resources": {
"mainLambda": {
"Type" : "AWS::Lambda::Function",
"Properties" : {
"Handler" : "index.handler",
"Description" : "Writes events to S3",
"Role" : { "Ref": "LambdaRoleARN" },
"Runtime" : "nodejs4.3",
"Code" : {
"S3Bucket": "streams-resources",
"S3Key": "astro-bass/${GIT_COMMIT}/lambda/astro-bass.zip"
}
}
},
"lambdaProxy": {
"Type" : "AWS::Lambda::Function",
"Properties" : {
"Handler" : "index.handler",
"Runtime" : "nodejs",
"Code" : {
"ZipFile": { "Fn::Join": ["", [
"var AWS = require('aws-sdk');",
"var lambda = new AWS.Lambda();",
"exports.handler = function(event, context) {",
"event.bundledParams = ['",
{ "Ref": "S3Bucket" },
"','",
{ "Ref": "S3Key" },
"'];",
"lambda.invoke({",
"FunctionName: '",
{ "Ref": "mainLambda" },
"',",
"Payload: JSON.stringify(event, null, 2),",
"InvocationType: 'Event'",
"}, function(err, data) {",
"if(err) {",
"context.fail(err);",
"}",
"context.done();",
"});",
"};"
]]}
}
}
},
},
...
}
The proxy function had the parameters injected into its code (s3bucket/key), and then it invokes the main lambda with a modified event object. It's a little unorthodox but struck me as much cleaner than the other available solutions, such as parse stacknames/etc. Worked well thus far.
Note that this solution only works currently with the legacy node environment. Not an issue, but worrisome in terms of the longevity of this solution.
UPDATE:
We ran into limitations with the previous solution and had to devise yet another one. We ended up with an off-label usage of the description field to embed configuration values. Here is our Lambda
'use strict';
var aws = require('aws-sdk');
var lambda = new aws.Lambda({apiVersion: '2014-11-11'});
let promise = lambda.getFunctionConfiguration({ FunctionName: process.env['AWS_LAMBDA_FUNCTION_NAME'] }).promise();
exports.handler = async function getTheConfig(event, context, cb) {
try {
let data = await promise;
cb(null, JSON.parse(data.Description).bucket);
} catch(e) {
cb(e);
}
};
Then, in the description field, you can embed a simple JSON snipped like so:
{
"bucket": "bucket-name"
}
Moreover, this structure, using the promise outside of the handler, limits the request to only occurring when the container is spawned - not for each individual lambda execution.
Not quite the cleanest solution, but the most functional one we've found.
There is no way of passing parameters to a Lambda function beside the event itself at the moment.
If you are creating a Lambda function with CloudFormation you could use the following workaround:
Use the Lambda function name to derive the CloudFormation stack name.
Use the CloudFormation stack name to access resources, or parameters of the stack when executing the Lambda function.
I would suggest doing it like this.
First create an index.js file and add this code.
var AWS = require('aws-sdk');
const s3 = new AWS.S3();
const https = require('https');
exports.handler = (event, context, callback) => {
const options = {
hostname: process.env.ApiUrl,
port: 443,
path: '/todos',
method: 'GET'
};
const req = https.request(options, (res) => {
console.log('statusCode:', res.statusCode);
console.log('headers:', res.headers);
res.on('data', (d) => {
process.stdout.write(d);
});
});
req.on('error', (e) => {
console.error(e);
});
req.end();
};
Zip the index.js file and upload it to an S3 bucket in the same region as your lambda function.
Then use this Cloudformation template make sure you specific the correct bucket name.
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "ApiWorkflow",
"Metadata": {
},
"Parameters": {
"ApiUrl": {
"Description": "Specify the api url",
"Type": "String",
"Default": "jsonplaceholder.typicode.com"
},
},
"Mappings": {
},
"Conditions": {
},
"Resources": {
"lambdaVodFunction": {
"Type": "AWS::Lambda::Function",
"Properties": {
"Code": {
"S3Bucket": "lamdba-exec-tests",
"S3Key": "index.js.zip"
},
"Handler": "index.handler",
"Role": "arn:aws:iam::000000000:role/BasicLambdaExecRole",
"Runtime": "nodejs10.x",
"FunctionName": "ApiWorkflow",
"MemorySize": 128,
"Timeout": 5,
"Description": "Texting Lambda",
"Environment": {
"Variables": {
"ApiUrl": {
"Ref": "ApiUrl"
},
"Test2": "Hello World"
}
},
}
}
},
"Outputs": {
"ApiUrl": {
"Description": "Set api url",
"Value": {
"Ref": "ApiUrl"
}
}
}
}
You should see in the template Environmental variables you can access these in your NodeJS Lambda function like this.
process.env.ApiUrl
I'm trying to set up a small api from AWS Lambda to DynamoDB and I am having trouble figuring out if and how I can write an array of objects into a key.
I have an object like
{
"teamName": "Team Awesome",
"members": [
{
"email": "person-1#example.com",
"name": "Bob"
},
{
"email": "person-2#example.com",
"name": "Alice"
}
]
}
The members array is giving me issues, in the docs it looks like it can be done considering the list types, but there is just no example how HOW to do it, and I am running out of ways to try it.
So is it possible to write something in this format at all and how do you in that case do it?
Example code - what do I put at ???
var AWS = require('aws-sdk');
var dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context) {
var tableName = "GDCCompetition";
var datetime = new Date().getTime().toString();
DynamoDB.putItem({
"TableName": tableName,
"Item": {
"datetime": {
"N": datetime
},
"teamName": {
"S": event.teamName
},
"members": ???
}
});
}
The documentation is not really obvious, but there is a thing called DocClient, you can pass a usual JS object to it and it will do all the parsing and transformation into AWS object (with all the types). You can use it like this:
var AWS = require("aws-sdk");
var DynamoDB = new AWS.DynamoDB.DocumentClient();
var params = {
TableName: "MyTable",
Item: {
"teamName": "Team Awesome",
"members": [
{
"email": "person-1#example.com",
"name": "Bob"
},
{
"email": "person-2#example.com",
"name": "Alice"
}
]
}
};
DynamoDB.put(params, function (err) {
if (err) {
return throw err;
}
//this is put
});
You could convert the object to DynamoDb record first
const AWS = require('aws-sdk');
var tableName = "GDCCompetition";
var datetime = new Date().getTime().toString();
const members = [
{
"email": "person-1#example.com",
"name": "Bob"
},
{
"email": "person-2#example.com",
"name": "Alice"
}
];
const marshalled = AWS.DynamoDB.Converter.marshall({ members });
const params = {
"TableName": tableName,
"Item": {
"datetime": {
"N": datetime
},
"teamName": {
"S": event.teamName
},
"members": marshalled.members,
},
}
AWS.DynamoDB.putItem(params, function (err) {
if (err) {
return throw err;
}
});