According to this aws doc http://docs.aws.amazon.com/general/latest/gr/api-retries.html automatic retry feature is build in the aws sdk in my case node.js aws sdk. I configured the DocumentClient object like this:
var dynamodb = new AWS.DynamoDB.DocumentClient({
region: 'us-west-2',
retryDelayOptions: {base: 50},
maxRetries: 20
});
but I still cannot make it auto-retry for me. I want to auto-retry with all UnprocessedItems as well.
Can you point me to where is my mistake?
Thanks
The retryDelayOptions and maxRetries are the options present on AWS.DynamoDB. The DocumentClient has to be configured by setting the DynamoDB service.
var dynamodb = new AWS.DynamoDB({maxRetries: 5, retryDelayOptions: {base: 300} });
var docClient = new AWS.DynamoDB.DocumentClient({service : dynamodb});
The AWS Client SDKs all have built-in mechanisms for retry indeed, however those retries are at the request level. That means that any request that gets rejected by the server with a 500-level error, or in some cases, a 400-level throttling error will get automatically retried based on the configured settings.
What you are asking for is business-layer retry behavior which is NOT built into the SDK. The UnprocessedItems collection contains items that were rejected by the service for various reasons and you have to write your own logic to handle those.
After sending Response we can handle unprocessed Item's background Process until all unprocessed Items should be complete. below code is useful for you
var AWS= require('aws-sdk');
var docClient = new AWS.DynamoDB.DocumentClient();
router.post('/someBatchWrites',(req,res)=>{
docClient.batchWrite(params, function (error, data) {
res.send(error, data);
handler(error, data);//handling unprocessed items /back ground
})
});
//handle Method
function handler(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
if (Object.keys(data.UnprocessedItems).length) {
setTimeout(() => { docClient.batchWrite({ RequestItems: data.UnprocessedItems }, handler);
}, 100000);
}
}
}
Related
I'm trying to build an application with a basic client-server infrastructure. The server infrastructure is hosted on AWS, and when a client logs on, it sends a message to the server to set up various infrastructure considerations. One of the pieces of infrastructure is an SQS Queue that the client can poll from to get updates from the server (eventually I'd like to build a push service but I don't know how for right now).
I'm building this application in NodeJS using the Node AWS SDK. The problem I'm having is I need the queue ARN to do various things like subscribe the SQS queue to an SNS topic that the application uses, but the create queue API returns the queue URL, not ARN. So I can get the ARN from the URL using the getQueueAttributes API, but it doesn't seem to be working. Whenever I call it, I get undefined as the response. Here's my code, please tell me what I'm doing wrong:
exports.handler = (event, context, callback) => {
new aws.SQS({apiVersion: '2012-11-05'}).createQueue({
QueueName: event.userId
}).promise()
)
.then(data => { /* This has the Queue URL */
new aws.SQS({apiVersion: '2012-11-05'}).getQueueAttributes({
QueueUrl: data.QueueUrl,
AttributeNames: ['QueueArn']
}).promise()
})
.then(data => {
console.log(JSON.stringify(data)); /* prints "undefined" */
})
/* Some more code down here that's irrelevant */
}
Thanks!
const AWS = require('aws-sdk');
const sqs = new AWS.SQS();
exports.handler = async(event, context, callback) => {
var params = {
QueueUrl: 'my-queue-url',
AttributeNames: ['QueueArn']
};
let fo = await sqs.getQueueAttributes(params).promise();
console.log(fo);
};
and it printed
{
ResponseMetadata: { RequestId: '123456-1234-1234-1234-12345' },
Attributes: {
QueueArn: 'arn:aws:sqs:eu-west-1:12345:my-queue-name'
}
}
With the help of Ersoy, I realized that I was using block-formatting (with {}) to write my Promises, but I was never returning anything from those blocks. I had thought that the last value in the Promise block was the return value by default, but it seems that was not the case. When I added return before the SQS API command, then it worked (without using async/await).
I have a function which is accessing multiple aws resources and now need to test this function, but I don't know how to mock these resources.
I have tried following github of aws-sdk-mock, but didn't get much there.
function someData(event, configuration, callback) {
// sts set-up
var sts = new AWS.STS(configuration.STS_CONFIG);
sts.assumeRole({
DurationSeconds: 3600,
RoleArn: process.env.CROSS_ACCOUNT_ROLE,
RoleSessionName: configuration.ROLE_NAME
}, function(err, data) {
if (err) {
// an error occurred
console.log(err, err.stack);
} else {
// successful response
// resolving static credential
var creds = new AWS.Credentials({
accessKeyId: data.Credentials.AccessKeyId,
secretAccessKey: data.Credentials.SecretAccessKey,
sessionToken: data.Credentials.SessionToken
});
// Query function
var dynamodb = new AWS.DynamoDB({apiVersion: configuration.API_VERSION, credentials: creds, region: configuration.REGION});
var docClient = new AWS.DynamoDB.DocumentClient({apiVersion: configuration.API_VERSION, region: configuration.REGION, endpoint: configuration.DDB_ENDPOINT, service: dynamodb });
// extract params
var ID = event.queryStringParameters.Id;
console.log('metrics of id ' + ID);
var params = {
TableName: configuration.TABLE_NAME,
ProjectionExpression: configuration.PROJECTION_ATTR,
KeyConditionExpression: '#ID = :ID',
ExpressionAttributeNames: {
'#ID': configuration.ID
},
ExpressionAttributeValues: {
':ID': ID
}
};
queryDynamoDB(params, docClient).then((response) => {
console.log('Params: ' + JSON.stringify(params));
// if the query is Successful
if( typeof(response[0]) !== 'undefined'){
response[0]['Steps'] = process.env.STEPS;
response[0]['PageName'] = process.env.STEPS_NAME;
}
console.log('The response you get', response);
var success = {
statusCode: HTTP_RESPONSE_CONSTANTS.SUCCESS.statusCode,
body: JSON.stringify(response),
headers: {
'Content-Type': 'application/json'
},
isBase64Encoded: false
};
return callback(null, success);
}, (err) => {
// return internal server error
return callback(null, HTTP_RESPONSE_CONSTANTS.BAD_REQUEST);
});
}
});
}
This is lambda function which I need to test, there are some env variable also which is being used here.
Now I tried writing Unit test for above function using aws-sdk-mock but still I am not able to figure out how to actually do it. Any help will be appreciated. Below is my test code
describe('test getMetrics', function() {
var expectedOnInvalid = HTTP_RESPONSE_CONSTANTS.BAD_REQUEST;
it('should assume role ', function(done){
var event = {
queryStringParameters : {
Id: '123456'
}
};
AWS.mock('STS', 'assumeRole', 'roleAssumed');
AWS.restore('STS');
AWS.mock('Credentials', 'credentials');
AWS.restore('Credentials');
AWS.mock('DynamoDB.DocumentClient', 'get', 'message');
AWS.mock('DynamoDB', 'describeTable', 'message');
AWS.restore('DynamoDB');
AWS.restore('DynamoDB.DocumentClient');
someData(event, configuration, (err, response) => {
expect(response).to.deep.equal(expectedOnInvalid);
done();
});
});
});
I am getting the following error :
{ MultipleValidationErrors: There were 2 validation errors:
* MissingRequiredParameter: Missing required key 'RoleArn' in params
* MissingRequiredParameter: Missing required key 'RoleSessionName' in params
Try setting aws-sdk module explicitly.
Project structures that don't include the aws-sdk at the top level node_modules project folder will not be properly mocked. An example of this would be installing the aws-sdk in a nested project directory. You can get around this by explicitly setting the path to a nested aws-sdk module using setSDK().
const AWSMock = require('aws-sdk-mock');
import AWS = require('aws-sdk');
AWSMock.setSDKInstance(AWS);
For more details on this : Read aws-sdk-mock documentation, they have explained it even better.
I strongly disagree with #ttulka's answer, so I have decided to add my own as well.
Given you received an event in your Lambda function, it's very likely you'll process the event and then invoke some other service. It could be a call to S3, DynamoDB, SQS, SNS, Kinesis...you name it. What is there to be asserted at this point?
Correct arguments!
Consider the following event:
{
"data": "some-data",
"user": "some-user",
"additionalInfo": "additionalInfo"
}
Now imagine you want to invoke documentClient.put and you want to make sure that the arguments you're passing are correct. Let's also say that you DON'T want the additionalInfo attribute to be persisted, so, somewhere in your code, you'd have this to get rid of this attribute
delete event.additionalInfo
right?
You can now create a unit test to assert that the correct arguments were passed into documentClient.put, meaning the final object should look like this:
{
"data": "some-data",
"user": "some-user"
}
Your test must assert that documentClient.put was invoked with a JSON which deep equals the JSON above.
If you or any other developer now, for some reason, removes the delete event.additionalInfo line, tests will start failing.
And this is very powerful! If you make sure that your code works the way you expect, you basically don't have to worry about creating integration tests at all.
Now, if a SQS consumer Lambda expects the body of the message to contain some field, the producer Lambda should always take care of it to make sure the right arguments are being persisted in the Queue. I think by now you get the idea, right?
I always tell my colleagues that if we can create proper unit tests, we should be good to go in 95% of the cases, leaving integration tests out. Of course it's better to have both, but given the amount of time spent on creating integration tests like setting up environments, credentials, sometimes even different accounts, is not worth it. But that's just MY opinion. Both you and #ttulka are more than welcome to disagree.
Now, back to your question:
You can use Sinon to mock and assert arguments in your Lambda functions. If you need to mock a 3rd-party service (like DynamoDB, SQS, etc), you can create a mock object and replace it in your file under test using Rewire. This usually is the road I ride and it has been great so far.
I see unit testing as a way to check if your domain (business) rules are met.
As far as your Lambda contains exclusively only integration of AWS services, it doesn't make much sense to write a unit test for it.
To mock all the resources means, your test will be testing only communication among those mocks - such a test has no value.
External resources mean input/output, this is what integration testing focuses on.
Write integration tests and run them as a part of your integration pipeline against real deployed resources.
This is how we can mock STS in nodeJs.
import { STS } from 'aws-sdk';
export default class GetCredential {
constructor(public sts: STS) { }
public async getCredentials(role: string) {
this.log.info('Retrieving credential...', { role });
const apiRole = await this.sts
.assumeRole({
RoleArn: role,
RoleSessionName: 'test-api',
})
.promise();
if (!apiRole?.Credentials) {
throw new Error(`Credentials for ${role} could not be retrieved`);
}
return apiRole.Credentials;
}
}
Mock for the above function
import { STS } from 'aws-sdk';
import CredentialRepository from './GetCredential';
const sts = new STS();
let testService: GetCredential;
beforeEach(() => {
testService = new GetCredential(sts);
});
describe('Given getCredentials has been called', () => {
it('The method returns a credential', async () => {
const credential = {
AccessKeyId: 'AccessKeyId',
SecretAccessKey: 'SecretAccessKey',
SessionToken: 'SessionToken'
};
const mockGetCredentials = jest.fn().mockReturnValue({
promise: () => Promise.resolve({ Credentials: credential }),
});
testService.sts.assumeRole = mockGetCredentials;
const result = await testService.getCredentials('fakeRole');
expect(result).toEqual(credential);
});
});
I have this Lambda function code which is invoked by an SQS.
SQS triggers my Lambda function ( in nodeJS).
Lambda will also send out an SES email. Is there a way I can test this on my local Ubuntu rather than always using AWS web console?
Any help is appreciated.
Here is my Lambda NodeJS code: This code works only on AWS Lambda. When I run
$node index.js , it does not send out SES email.
var aws = require("aws-sdk");
var nodemailer = require("nodemailer");
aws.config.loadFromPath('aws_config.json');
var ses = new aws.SES();
var s3 = new aws.S3();
// Set the region
aws.config.update({region: 'us-west-2'});
exports.handler = function (event, context, callback) {
const response = {
statusCode: 200,
body: JSON.stringify({
message: 'SQS event processed.',
input: event,
}),
};
console.log('event: ', JSON.stringify(event.Records));
result = JSON.stringify(event.Records)
result = result.replace(/(^\[)/, '');
result = result.replace(/(\]$)/, '');
var resultObj = JSON.parse(result);
var idCustomer = resultObj.body;
console.log('===SENDING EMAIL====');
// Create sendEmail paramssd
var params = {
Destination: {
/* required */
CcAddresses: [
'XXXXX#gmail.com',
/* more items */
]
},
Message: {
/* required s*/
Body: {
/* required */
Html: {
Charset: "UTF-8",
Data: "BODY:"
},
Text: {
Charset: "UTF-8",
Data: "TEXT_FORMAT_BODY"
}
},
Subject: {
Charset: 'UTF-8',
Data: idCustomer
}
},
Source: 'xxxx#eeeee.com', /* required */
ReplyToAddresses: [
'wwwwww#wwwwwwwww.com',
/* more items */
],
};
// Create the promise and SES service object
var sendPromise = new aws.SES({apiVersion: '2010-12-01'}).sendEmail(params).promise();
// Handle promise's fulfilled/rejected states s
sendPromise.then(
function (data) {
console.log("Successfully sent using SES");
console.log(data.MessageId);
}).catch(
function (err) {
console.log("An Error occured while senting using using SES");
console.error(err, err.stack);
});
};
You should definetely take a look at SAM LOCAL. It is a tool developed by the AWS team specifically for testing lambdas.
https://github.com/awslabs/aws-sam-cli
Publishes a version of your function from the current snapshot of
$LATEST. That is, AWS Lambda takes a snapshot of the function code and
configuration information from $LATEST and publishes a new version.
The code and configuration cannot be modified after publication. For
information about the versioning feature, see
It is easy to use, you just type
sam local invoke --event event.json
And behind the scenes it will run a docker cotnainer for your lambda and call it.
Regarding your SES, you should put a small if(SAM_LOCAL) condition in the code and call the real one only if not in local mode. Note that SAM_LOCAL is env variable set by the SAM LOCAL tool when you run a function locally.
Good luck !
If you want to use aws as a backend - serverless framework is probably what you looking for https://serverless.com/ If you want to test your code without executing lambda on aws backend take a look at localastack framework https://github.com/localstack/localstack
so on my first time learning AWS stuff (it is a beast), I'm trying to create e-mail templates, I have this lambda function:
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({ region: "us-east-1" });
exports.handler = async (event, context, callback) => {
// Create createTemplate params
var params = {
Template: {
TemplateName: "notification" /* required */,
HtmlPart: "HTML_CONTENT",
SubjectPart: "SUBJECT_LINE",
TextPart: "sending emails with aws lambda"
}
};
// Create the promise and SES service object
const templatePromise = new AWS.SES({ apiVersion: "2010-12-01" })
.createTemplate(params)
.promise();
// Handle promise's fulfilled/rejected states
templatePromise
.then((data) => {
console.log(data);
callback(null, JSON.stringify(data) );
// also tried callback(null, data);
}, (err) => {
console.error(err, err.stack);
callback(JSON.stringify(err) );
});
as far as I am understanding, this function should return me a template? an object, anything? when I use the lambda test functionality I always got null in the request response
does anyone know what I am doing wrong here?
edit: and It is not creating the e-mail template, I check the SES Panel - email templates and it is empty
edit2: if I try to return a string eg: callback(null, "some success message"); it does return the string, so my guess is something wrong with the SES, but this function is exactly what we have in the AWS docs, so I assume it should just work..
Try not to resolve the Promise and change your code to just returning it as-is:
return await templatePromise;
which should present you some more detail of what is really going wrong in your code - it might be some hidden access issue - so you might need to adjust the role your lambda function is using. createTemplate on the other side should not return much in case of successful execution but just create the template.
Also try to follow the following try/catch pattern when using async (as described here in more detail: https://aws.amazon.com/de/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/)
exports.handler = async (event) => {
try {
data = await lambda.getAccountSettings().promise();
}
catch (err) {
console.log(err);
return err;
}
return data;
};
Migrating and EC2 LAMP stack into serverless architecture with Lambda handling the SMS text messages.
My PHP code also works fine and from the console:
aws sns publish --phone-number 0044xxxxxxxxxx --message boo --region eu-west-1
Also works
When I run the NodeJS in Lambda from the command line I get "Status": 202 but no text.
When I run that same code from the TEST button in the Inline Code Editor on AWS I get "Successful", clean logs but no text..
The code failing is:
// dependencies
var AWS = require('aws-sdk');
var sns = new AWS.SNS();
exports.handler = function(event, context) {
var params = {
MessageStructure: 'string',
PhoneNumber: '0044xxxxxxxxxx',
Message:'An error occurred'
};
sns.publish(params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
context.done(err, 'Finished with Errors!');
return;
}
else console.log(data); // successful response
});
context.done(null,'finished');
};
This is also code I got from another question in SO which was shown to work for another user.
Any idea what could be the issue?
Many thanks
Kevin
A few things you might want to look into:
You'll need to make sure you're using either Node.js runtime v6.10 or v4.3. (Node v0.10.42 is currently marked as deprecated. AWS recommends migrating existing functions to the newer Node.js runtime versions as soon as possible)
The IAM role for your lambda function needs to have an Allow rule for the sns:Publish action.
AWS recommends that specify the phone number using the E.164 format. For example: +44xxxxxxxxxx. (more info)
Also, AWS strongly recommends updating any use of the context method and replacing it with the callback approach (more info). For example:
const AWS = require("aws-sdk");
const sns = new AWS.SNS({apiVersion: "2010-03-31"});
exports.handler = (event, context, callback) => {
const params = {
PhoneNumber: "+44xxxxxxxxxx", // E.164 format.
Message: "STRING_VALUE",
MessageStructure: "STRING_VALUE"
}
sns.publish(params, (err, data) => {
if (err) {
console.error(`Error ${err.message}`);
callback(err);
} else {
console.log("Success");
callback(null, data); // callback instead of context.
}
}
};
Please refer to the answer SO Link, it worked for me. By default the message type is set as Promotional and you have to override it as transactional.