NodeJS AWS Lambda, DynamoDB not writing result and not logging - node.js

I'm having some trouble with DynamoDB. I've set up my Lambda permissions for full CRUDL (administrator access, so no restrictions). The following is the handler, and it's based on the doca
https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GettingStarted.NodeJs.03.html
const uuidv4 = require("uuid/v4");
const services = require("./services/services");
var AWS = require("aws-sdk");
AWS.config.update({ region: "eu-west-2" });
var docClient = new AWS.DynamoDB.DocumentClient();
var tableName = "usersTable";
module.exports = {
registerUser: async (event, context) => {
const id = uuidv4();
let body;
if (event.body !== null && event.body !== undefined) {
body = JSON.parse(event.body);
}
const isValid = await services.validateUser(body);
if (isValid) {
var params = {
TableName: tableName,
Item: {
userId: "123abc",
firstName: "finn",
lastName: "murtons",
email: "email#email.com",
password: "secret"
}
};
console.log("Adding a new item...");
console.log(" Adding params", params);
docClient.put(params, function(err, data) {
if (err) {
console.error(
"Unable to add item. Error JSON:",
JSON.stringify(err, null, 2)
);
} else {
console.log("Added item:", JSON.stringify(data, null, 2));
}
});
}
},
... other functions
For this example, I'm hardcoding the params for clarity, but obviously I would usually get them from the event.body.
When I make a post request to this Lambda, I get a 502 error.
Looking at the cloudwatch logs, it gets as far as:
INFO Adding params { TableName: 'usersTable',
Item:
{ userId: '123abc',
firstName: 'finn',
lastName: 'murtons',
email: 'email#email.com',
password: 'secret' } }
Then there are no more logs after that. Ignore the isValid function, it's just checking that the request event body has the required fields.
Anybody have any ideas where I might be going wrong?

It's likely that the lambda is exiting before the DynamoDB call is made. You should make the call a Promise and await it:
await docClient.put(params).promise();

Related

Storing username with post confirmation trigger (AWS, Lambda, DynamoDB)

I try to store some parameters in a AWS DynamoDB with Cognito post confirmation trigger.
The lambda is written in node.js but I am not able to store the username of the signed up cognito user.
const AWS = require('aws-sdk');
const dynamodb = new AWS.DynamoDB.DocumentClient({region: 'eu-central-1'});
exports.handler = async (event, context, callback) => {
console.log(event);
const username = event.userName;
await createMessage(username).then(() => {
callback(null, {
statusCode: 201,
body: '',
headers: {
'Access Control Allow Origin' : '*'
}
});
}).catch((err) => {
console.error(err);
});
};
function createMessage(username){
const familyid = (new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);
const params = {
TableName: 'eldertech',
Item: {
'UserId' : username,
'message' : familyid
}
};
return dynamodb.put(params).promise();
}
My test:
{
"username":"admin",
"email":"admin#admin.com",
"userId": "AD87S"
}
The error:
ERROR ValidationException: One or more parameter values were invalid: Missing the key UserId in the item
Can somebody please help a newbie?
Okay, I just had a typo....
const username = event.username;
No capital n in username...

AWS Lambda using s3 getObject function and putItem function to insert it into DynamoDB but nothing happens

this is the node.js code:
'use strict';
const AWS = require("aws-sdk");
AWS.config.update({
region: 'eu-west-1'});
const docClient = new AWS.DynamoDB.DocumentClient();
const tableName = 'Fair';
const s3 = new AWS.S3();
exports.handler = async (event) => {
var getParams = {
Bucket: 'dataforfair', //s3 bucket name
Key: 'fairData.json' //s3 file location
}
const data = await s3.getObject(getParams).promise()
.then( (data) => {
//parse JSON
let fairInformations = JSON.parse(data.Body.toString());
fairInformations.forEach(function(fairInformationEntry) {
console.log(fairInformationEntry);
var params = {
TableName: tableName,
Item: {
"year": fairInformationEntry.year,
"fairName": fairInformationEntry.fairName,
"info": fairInformationEntry.info
}
};
docClient.put(params, function(err, data) {
console.log('*****test');
if (err) {
console.error("Unable to add fairInformation", fairInformationEntry.fairName, ". Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("PutItem succeeded:", fairInformationEntry.fairName);
}
});
});
})
.catch((err) => {
console.log(err);
});
const response = {
statusCode: 200,
body: JSON.stringify(data),
};
return response;
};
Hello everyone,
I want to put the data into the Dynamo DB after getting the JSON file from the s3 Bucket. Getting the JSON works and the console.log(fairInformationEntry); is also still triggered, but the docClient.put() never gets called. I am getting no error, nothing. I do not know what is wrong and why it is not working. I have the right IAM role and access to everything I need.
I hope you can help me!
The problem is mixup of promise, callback and async/await. You are also trying to do asynchronous operation inside foreach. The code should look something like this
"use strict";
const AWS = require("aws-sdk");
AWS.config.update({
region: "eu-west-1"
});
const docClient = new AWS.DynamoDB.DocumentClient();
const tableName = "Fair";
const s3 = new AWS.S3();
exports.handler = async event => {
var getParams = {
Bucket: "dataforfair", //s3 bucket name
Key: "fairData.json" //s3 file location
};
const data = await s3.getObject(getParams).promise();
//parse JSON
let fairInformations = JSON.parse(data.Body.toString());
await Promise.all(
fairInformations.map(fairInformationEntry => {
console.log(fairInformationEntry);
var params = {
TableName: tableName,
Item: {
year: fairInformationEntry.year,
fairName: fairInformationEntry.fairName,
info: fairInformationEntry.info
}
};
return docClient.put(params).promise();
})
);
const response = {
statusCode: 200,
body: JSON.stringify(data)
};
return response;
};
Hope this helps

AWS cognito is giving serializationException(Start of structure or map found where not expected) while doing sign up in node.js How to fix this issue?

I'm trying to add signup functionality with AWS cognito, But While signing up up getting SerializationException How to resolve this issue?
My signup function look like this
const AmazonCognitoIdentity = require("amazon-cognito-identity-js");
const AWS = require("aws-sdk");
global.fetch = require("node-fetch");
const keys = require("../../config/keys");
AWS.config.update({
accessKeyId: keys.awsKeys.key,
secretAccessKey: keys.awsKeys.secret,
region: keys.region.awsRegionId
});
const poolConfig = {
UserPoolId: keys.cognito.userPoolId,
ClientId: keys.cognito.clientId
};
// create a new user pool
const userPool = new AmazonCognitoIdentity.CognitoUserPool(poolConfig);
async function signupFunc(userData) {
console.log('JSON string received : ' + JSON.stringify(userData));
const emailData = {
Name: "email",
Value: userData.email
};
const name = {
Name: "name",
Value: userData.name
}
const password = userData.password;
const familyname = {
Name: 'family_name',
Value: userData.familyname
}
return new Promise((resolve, reject) => {
try {
var attributeList = [];
attributeList.push(new AmazonCognitoIdentity.CognitoUserAttribute(name));
attributeList.push(new AmazonCognitoIdentity.CognitoUserAttribute(familyname));
userPool.signUp(emailData, password, attributeList, null, (err, result) => {
if (err) {
console.error(`ERROR : ${JSON.stringify(err)}`);
return reject({ status: 0, error: "Error!!!" });
}
return resolve({
status: "200",
message: "Check email and verify!"
});
});
} catch (error) {
console.log(`ERROR : ${JSON.stringify(error)}`);
return reject({error: error});
}
});
}
module.exports = signupFunc;
While executing this method I'm getting below exception.
{
"code":"SerializationException",
"name":"SerializationException",
"message":"Start of structure or map found where not expected."
}
any help will much appreciated.
I had the same problem, because I was mistakenly passing the password as an Object instead of a String. Make sure you password is a String:
Auth.completeNewPassword(user, password, {} ).then(data=> console.log(data)})
Here user is an Object and password is a String.

lambda with graphql and sqs send 2 messages to sqs in nodejs?

i'm working on project where i need to write a lambda function which provides AWS API to handles GraphQL query and send the payload to AWS SQS everything is working fine but when i check my AWS SQS queue it shows 2 messages every single time instead of 1 and cloud watch also shows function trigger only once. below i'm sharing my code with you any help would be very much appreciated.
index.js
const { graphql } = require("graphql");
const { schema } = require("./graphql/schema");
exports.handler = async (event) => {
// getting query from lambda event
const query = event.query;
// getting query variables from lambda event
const variables = event.variables;
return await graphql(schema, query, null, null, variables);
};
sqs.js
const AWS = require("aws-sdk");
AWS.config.update({ region: "us-east-1"});
// Create an SQS service object
const sqs = new AWS.SQS({apiVersion: '2012-11-05', "accessKeyId": process.env.ACCESS_KEY_ID, "secretAccessKey": process.env.SECRET_ACCESS_KEY});
const QueueUrl = process.env.SQS_QUEUE_URL;
const sendPayloadToSQS = message => {
const params = {
MessageBody: JSON.stringify(message),
QueueUrl
};
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
};
module.exports = sendPayloadToSQS;
graphql mutation file
const { GraphQLNonNull } = require("graphql");
const { mutationWithClientMutationId } = require("../../common");
const { JobRequestEventResponse } = require("../jobRequestEventResponse");
const { JobRequestInput, JobEventMetadataInput } = require("../jobSchema");
const sendPayloadToSQS = require("../../../sqs");
const { newId } = require("../../../newId");
const JobRequestEvent = mutationWithClientMutationId({
name: "JobRequestEvent",
inputFields: {
eventMetadataInput: {
type: new GraphQLNonNull(JobEventMetadataInput),
},
eventInput: {
type: new GraphQLNonNull(JobRequestInput),
},
},
outputFields: {
JobRequestEventResponse: {
type: JobRequestEventResponse,
},
},
mutateAndGetPayload: async (params) => {
const new_id = newId();
if(params.eventInput.jobId === null || params.eventInput.jobId === undefined) {
params.eventInput.jobId = new_id;
}
const payload = {
_id: new_id,
transactionId: new_id,
name: params.eventMetadataInput.name,
userRole: params.eventMetadataInput.userRole,
date: params.eventMetadataInput.date,
languageCode: params.eventMetadataInput.languageCode,
eventInput: params.eventInput,
};
//send payload to sqs
await sendPayloadToSQS(payload);
return {
JobRequestEventResponse: {
id: payload._id,
transactionId: payload.transactionId,
status: "Success",
},
};
},
});
module.exports = {
JobRequestEvent,
};
I read the documentation again and found the callback is the root cause of my problem: if I provide a callback to sendMessage it triggers my function and after when I write promise() again it triggers my function so I remove the callback only as you can see below.
Refer : AWS Official documentation
Instead of this:
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
I write this:
const request = sqs.sendMessage(params);
const result = await request.promise();
if(result) {
console.log("Message queued to SQS successfully : ", result.MessageId);
} else {
console.log("Message queued failed");
}

Can anyone provide an example of a dynamodb document client upsert?

I'm looking for a non trivial upsert example in node for AWS dynamodb's DocumentClient. Can someone share a sample that has worked for them?
I would like to see an example that sets a created_at, updated_at and a id field on creating the record but only sets the updated_at when the record is found and updated.
This should achieve your goal. My apologies, as I missed your criteria in my original posting. I am assuming id is the key in your table.
'use strict';
var AWS = require('aws-sdk');
var ec2 = new AWS.EC2({ apiVersion: '2016-09-15' });
var ddb = new AWS.DynamoDB.DocumentClient();
AWS.config.region = 'us-east-1';
var ret = {};
const ddbTbl = 'table_name';
var date = new Date();
var dateAsIso = date.toISOString();
exports.handler = (event, context, callback) => {
var key = 'bar';
var params = {
TableName: ddbTbl,
Key: {
"id": key
},
UpdateExpression: "set #u = :t, #c = if_not_exists(#c, :t)",
ExpressionAttributeNames: {
"#u": "updated_at",
"#c": "created_at"
},
ExpressionAttributeValues: {
":t": dateAsIso
},
ReturnValues: "UPDATED_NEW"
};
ddb.update(params, function(err, data) {
if (err) {
console.log(err, err.stack);
ret.ddbUpdate = err;
callback(null, {success: false, message: ["ddb upsert failed"], payload: ret});
// or use the regular callback for failures if you don't want to do your own envelope response
} else {
console.log(data);
ret.ddbUpdate = data;
callback(null, {success: true, message: ["ddb upsert succeeded"], payload: ret});
}
});
};

Resources