I'm trying to mock SES with Sinon, but facing below error. Tried using aws-sdk-mock, but it's not working.
Error: TypeError: Cannot stub non-existent own property sendEmail
Code snippet of test class:
import * as AWS from 'aws-sdk';
const sandbox = sinon.createSandbox();
sandbox.stub(AWS.SES, 'sendEmail').returns({promise: () => true});
Actual class:
import * as AWS from 'aws-sdk';
import * as _ from 'lodash';
export async function sendAlertMailOnFailure(status:any)
{
// load AWS SES
var ses = new AWS.SES();
const params = {
Destination: {
ToAddresses: <to_address>
},
Message: {...},
Source: <sender_address>
}
ses.sendEmail(params, (err, data) => {
if (err) {
log.error("Error sending mail::");
log.error(err, err.stack);
}
})
}
Is there any way to mock SES with Sinon or with aws-sdk-mock?
My answer here is not a direct solution for SES, but it is a working solution I'm using for mocking DynamoDB.DocumentClient and SQS. Perhaps you can adapt my working example for SES and other aws-sdk clients in your unit tests.
I just spent hours trying to get AWS SQS mocking working, without resorting to the aws-sdk-mock requirement of importing aws-sdk clients inside a function.
The mocking for AWS.DynamoDB.DocumentClient was pretty easy, but the AWS.SQS mocking had me stumped until I came across the suggestion to use rewire.
My lambda moves bad messages to a SQS FailQueue (rather than letting the Lambda fail and return the message to the regular Queue for retries, and then DeadLetterQueue after maxRetries). The unit tests needed to mock the following SQS methods:
SQS.getQueueUrl
SQS.sendMessage
SQS.deleteMessage
I'll try to keep this example code as concise as I can while still including all the relevant parts:
Snippet of my AWS Lambda (index.js):
const AWS = require('aws-sdk');
AWS.config.update({region:'eu-west-1'});
const docClient = new AWS.DynamoDB.DocumentClient();
const sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
// ...snip
Abridged Lambda event records (event.json)
{
"valid": {
"Records": [{
"messageId": "c292410d-3b27-49ae-8e1f-0eb155f0710b",
"receiptHandle": "AQEBz5JUoLYsn4dstTAxP7/IF9+T1S994n3FLkMvMmAh1Ut/Elpc0tbNZSaCPYDvP+mBBecVWmAM88SgW7iI8T65Blz3cXshP3keWzCgLCnmkwGvDHBYFVccm93yuMe0i5W02jX0s1LJuNVYI1aVtyz19IbzlVksp+z2RxAX6zMhcTy3VzusIZ6aDORW6yYppIYtKuB2G4Ftf8SE4XPzXo5RCdYirja1aMuh9DluEtSIW+lgDQcHbhIZeJx0eC09KQGJSF2uKk2BqTGvQrknw0EvjNEl6Jv56lWKyFT78K3TLBy2XdGFKQTsSALBNtlwFd8ZzcJoMaUFpbJVkzuLDST1y4nKQi7MK58JMsZ4ujZJnYvKFvgtc6YfWgsEuV0QSL9U5FradtXg4EnaBOnGVTFrbE18DoEuvUUiO7ZQPO9auS4=",
"body": "{ \"key1\": \"value 1\", \"key2\": \"value 2\", \"key3\": \"value 3\", \"key4\": \"value 4\", \"key5\": \"value 5\" }",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1536763724607",
"SenderId": "AROAJAAXYIAN46PWMV46S:steve.goossens#bbc.co.uk",
"ApproximateFirstReceiveTimestamp": "1536763724618"
},
"messageAttributes": {},
"md5OfBody": "e5b16f3a468e6547785a3454cfb33293",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:eu-west-1:123456789012:sqs-queue-name",
"awsRegion": "eu-west-1"
}]
}
}
Abridged unit test file (test/index.test.js):
const AWS = require('aws-sdk');
const expect = require('chai').expect;
const LamdbaTester = require('lambda-tester');
const rewire = require('rewire');
const sinon = require('sinon');
const event = require('./event');
const lambda = rewire('../index');
let sinonSandbox;
function mockGoodSqsMove() {
const promiseStubSqs = sinonSandbox.stub().resolves({});
const sqsMock = {
getQueueUrl: () => ({ promise: sinonSandbox.stub().resolves({ QueueUrl: 'queue-url' }) }),
sendMessage: () => ({ promise: promiseStubSqs }),
deleteMessage: () => ({ promise: promiseStubSqs })
}
lambda.__set__('sqs', sqsMock);
}
describe('handler', function () {
beforeEach(() => {
sinonSandbox = sinon.createSandbox();
});
afterEach(() => {
sinonSandbox.restore();
});
describe('when SQS message is in dedupe cache', function () {
beforeEach(() => {
// mock SQS
mockGoodSqsMove();
// mock DynamoDBClient
const promiseStub = sinonSandbox.stub().resolves({'Item': 'something'});
sinonSandbox.stub(AWS.DynamoDB.DocumentClient.prototype, 'get').returns({ promise: promiseStub });
});
it('should return an error for a duplicate message', function () {
return LamdbaTester(lambda.handler)
.event(event.valid)
.expectReject((err, additional) => {
expect(err).to.have.property('message', 'Duplicate message: {"Item":"something"}');
});
});
});
});
You need to use prototype in AWS to stub it:
import AWS from 'aws-sdk';
const sandbox = sinon.createSandbox();
sandbox.stub(AWS.prototype, 'SES').returns({
sendEmail: () => {
return true;
}
});
The error seems to indicate that AWS is being imported as undefined.
It might be that your ES6 compiler isn't automatically turning this line:
import AWS from 'aws-sdk';
...into an import of everything in aws-sdk into AWS.
Change it to this:
import * as AWS from 'aws-sdk';
...and that may fix the issue.
(Disclaimer: I can't reproduce the error in my environment which is compiling with Babel v7 and automatically handles either approach)
Using require & without using prototype. This is working for me for mocking DynamoDB.
const aws = require('aws-sdk');
const sinon = require('sinon');
const sandbox = sinon.createSandbox();
this.awsStub = sandbox.stub(aws, 'DynamoDB').returns({
query: function() {
return {
promise: function() {
return {
Items: []
};
}
};
}
});
Packages:
"aws-sdk": "^2.453.0"
"sinon": "^7.3.2"
I was able to use awk-sdk-mock by doing the following:
test class
const AWSMock = require('aws-sdk-mock');
const AWS = require('aws-sdk');
AWSMock.setSDKInstance(AWS);
...
AWSMock.mock('SES', 'sendRawEmail', mockSendEmail);
// call method that needs to mock send an email goes below
sendEmail(to, from, subject, body, callback);
function mockSendEmail(params, callback) {
console.log('mock email');
return callback({
MessageId: '1234567',
});
}
Actual class
const aws = require('aws-sdk');
const nodemailer = require('nodemailer');
function sendEmail(to, from, subject, body, callback) {
let addresses = to;
if (!Array.isArray(addresses)) {
addresses = [addresses];
}
let replyTo = [];
if (from) {
replyTo.push(from);
}
let data = {
to: addresses,
replyTo,
subject,
text: body,
};
nodemailer.createTransport({ SES: new aws.SES({ apiVersion: '2010-12-01' }) }).sendMail(data, callback);
}
const AWS = require('aws-sdk');
...
const sandbox = sinon.createSandbox();
sandbox.stub(AWS, 'SES').returns({
sendRawEmail: () => {
console.log("My sendRawEmail");
return {
promise: function () {
return {
MessageId: '987654321'
};
}
};
}
});
let ses = new AWS.SES({ region: 'us-east-1' });
let result = ses.sendRawEmail(params).promise();
Related
I am trying to mock the S3 class inside the aws-sdk module while macking sure that the methods inside the class S3 can be spyed.
I am able to mock the S3 class inside aws-sdk however I cannot spy the methods inside the class.
Any ideas on how to approach this problem?
These are my code snippets:
services/s3.js
const AWS = require('aws-sdk');
const uploadAsset = async (param) => {
try {
const response = await s3.upload(param).promise();
return response;
} catch (e) {
console.log(e);
}
}
module.exports = { uploadAsset }
services.s3.test.js
const AWS = require('aws-sdk');
const { uploadAsset } = require('../services/s3')
jest.mock('aws-sdk', () => {
return {
S3: class {
constructor() { }
upload(param) { // 👈 I want to make sure that this method is called
return {
promise: () => {
return Promise.resolve(
{
Location: `http://${param.Bucket}.s3.amazonaws.com/${param.Key}`,
Key: param.Key
}
)
}
}
}
}
}
});
describe('uploadAsset() functionality', () => {
it('should upload an asset', async () => {
const uploadPath = 'users/profilePicture';
const base64Str = '/9j/4AAQSkZJRgABAQAAAQABAAD/';
const buffer = Buffer.from(base64Str, 'base64');
const s3 = new AWS.S3();
const response = await uploadAsset({
Bucket: 'BucketName,
Key: `KeyName`,
Body: buffer,
});
const spy = jest.spyOn(s3, 'deleteObject')
expect(spy).toBeCalled(); // 🚨 This spy nevers gets called
});
});
Any insights would be helpful.
Thanks.
I mocked the aws-sdk successfully. However my spy in the S3 never gets called.
I am almost positive that this is a scope problem. I think my spyOn method only affects my local S3 class instance. However I still have no idea how to test this specific case scenario.
I have an AWS Lambda function which is triggered by an API Gateway event. The API Gateway is configured to use X-Ray.
As the Lambda tracing configuration defaults to PassTrough it is also shown in X-Ray (service map, etc.).
The invoked Lambda uses the node.js aws-sdk to invoke another lambda. If I understand correctly the Tracing ID has to be passed on to the next Invocation in order to show this Lambda also in X-Ray. In the API of the SDK I found no option for this.
const result = await lambda
.invoke(lambdaParamsCreateUser)
.promise()
How can I achieve this? How can I trace also the invocation of the original request?
With the tips of #Balu Vyamajala I changed the AWS-SDK import to the following:
import AWS from "aws-sdk";
import AwsXRay from "aws-xray-sdk-core";
const aws = AwsXRay.captureAWS(AWS);
export default aws;
I use it when I invoice my second function like this:
import AWS from "aws";
const Lambda = AWS.Lambda;
// ...
const lambda = new Lambda({ region: "eu-central-1" });
const lambdaPromise = lambda
.invoke({
FunctionName: AUTH_CREATE_USER_FUNC,
InvocationType: "RequestResponse",
Qualifier: AUTH_CREATE_USER_FUNC_VERSION,
Payload: JSON.stringify({
eMail: eMail,
device: device,
customerId: customerId,
}),
LogType: "Tail",
})
.promise()
But in X-Ray there is no invocation chain :-(
https://imgur.com/wDMlNzb
Do I make a mistake?
if we enable X-Ray for both Lambda functions , trace-id is automatically passed and will be same for both Lambdas.
In the code, we can enable X-Ray simply by wrapping it around aws-sdk
JavaScript:
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
Typescript:
import AWSXRay from 'aws-xray-sdk';
import aws from 'aws-sdk';
const AWS = AWSXRay.captureAWS(aws)
Here is a sample test to confirm.
balu-test >> sample-test
Lambda 1 (balu-test) :
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
const lambda = new AWS.Lambda();
exports.handler = async function (event, context) {
var params = {
FunctionName: "sample-test",
InvocationType: "RequestResponse",
Payload: '{ "name" : "foo" }',
};
const response = await lambda.invoke(params).promise();
console.log('response',response);
return "sucess";
};
Lambda 2(sample-test):
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
let region = "us-east-1"
let secretName = "SomeSecret"
let secret
let decodedBinarySecret
var client = new AWS.SecretsManager({
region: region,
});
exports.handler = (event, context, callback) => {
client.getSecretValue({ SecretId: secretName }, function (err, data) {
if (err) {
callback(err);
} else {
if ("SecretString" in data) {
secret = data.SecretString;
} else {
let buff = new Buffer(data.SecretBinary, "base64");
decodedBinarySecret = buff.toString("ascii");
}
callback(null, secret);
}
});
};
TraceId is same and X-Ray points to same graph for both Lambda invocations. Same thing happens when first api is called from Api-Gateway. First time trace-id is generated and is passed along as http header to downstream processes.
I'm new to sinon and can't achieve the results I want.
I'm trying to stub AWS S3 API getObject to return a test-provided object.
My production code has:
let s3 = new AWS.S3({ apiVersion: '2006-03-01' });
let params = {
Bucket: aws_bucket,
Key: path
};
s3.getObject(params, function(err, data) {
...
});
My test code has:
describe('GET /image', function() {
beforeEach(function() {
let stub = sinon.createStubInstance(AWS.S3);
stub.getObject.callsFake(() => { console.log('stubbed'); });
});
The AWS S3 class instance is fully stubbed when I run the test, which is great, but it is not calling my fake.
What am I missing?
I found a working approach.
Step 1 Wrap AWS.S3 instance in another module.
// s3.js
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ apiVersion: '2006-03-01' });
module.exports = s3;
Step 2 Change production code to use this instance instead of making its own.
// image.js
var s3 = require('./s3');
// ... other code ...
s3.getObject(...);
Step 3 Stub what needs to be stubbed.
// image-test.js
var s3 = require('./s3');
var getObjectStub;
describe('GET /image', function() {
beforeEach(function() {
getObjectStub = sinon.stub(s3, 'getObject').callsFake(() => { console.log('stubbed'); });
});
afterEach(() => {
getObjectStub.restore();
});
// test code continues
});
im having some issues stubbing this dependency. I know there ir a aws-sdk-mock modules but mi goal its to stub it with sinon and chai.
Here is mi code,
Test code
const chai = require('chai');
const sinon = require('sinon');
const chaiHttp = require('chai-http');
const app= require('./app');
chai.use(chaiHttp);
const queryMock =sinon.stub();
const dynamoMock = {
DocumentClient:sinon.fake.returns({
query:queryMock
})
}
let awsDynamoMock;
describe.only('Integration test for activation',()=>{
beforeEach(() => {
awsDynamoMock = sinon.stub(require('aws-sdk'),'DynamoDB');
awsDynamoMock.returns(dynamoMock);
})
afterEach(() => {
awsDynamoMock.restore();
})
it('Request /endpoint returns HTTP 200 with {} when user exist and all task are done',(done)=>{
const params = {
TableName:'table',
KeyConditionExpression: `client_id= :i`,
ExpressionAttributeValues: {
':i': '23424234'
},
ConsistentRead:true
};
const userWithNoPendingsMock = {
Items: [
{
client_id: "23424234",
},
],
Count: 1,
ScannedCount: 1,
}
queryMock.withArgs(params).returns({
promise:() =>sinon.fake.resolves(userWithNoPendingsMock)
})
chai
.request(app)
.post("/endpoint")
.end( (err, res) => {
chai.expect(res.status).to.be.eql(200);
chai.expect(res.body).to.eql({});
done();
});
});
})
Connection to dynamoDB to stub
const AWS = require('aws-sdk');
AWS.config.update({region:'REGION'});
let docClient = false;
const getDynamoSingleton = async () => {
if (docClient) return docClient;
docClient = new AWS.DynamoDB.DocumentClient();
console.log(docClient)
return docClient
}
module.exports = getDynamoSingleton
Using DynamoDB example
const getElementById = async (TableName,key,id)=>{
const docClient = await getDynamoSingleton();
//Make query params.
const params = {
TableName,
KeyConditionExpression: `${key} = :i`,
ExpressionAttributeValues: {
':i': id
},
ConsistentRead:true
};
//Run query as promise.
return docClient.query(params).promise();
}
Im really stuck on this problem, so any help would be useful. I know the problem has something to do with de documentclient
Thanks for the help
I realize this is an old question, but you can set up a resolvable object with a little trickery. Some inspiration from this answer.
const sandbox = require('sinon').createSandbox();
const AWS = require('aws-sdk');
describe('...', () => {
it('...',
 (done) => {
// Create a dummy resolver, which returns an empty object.
const dummy = {func: () => {}};
sandbox.stub(dummy, 'func').resolves({some: 'fake response'});
// Mock docClient.query. Binding to .prototype should make this apply to any `new AWS.DynamoDB.DocumentClient()` calls.
sandbox.stub(AWS.DynamoDB.DocumentClient.prototype, 'query').returns({promise: dummy.func});
// Run your tests here.
});
});
This is cut down to remove a lot of the extra configuration you are doing (and probably need). We create a dummy object with the function func which returns a sinon promise.
Next, we stub the AWS.DynamoDB.DocumentClient prototype so that new AWS.DynamoDB.DocumentClient() will receive our sinon stub.
Third, we configure our DocumentClient prototype stub to return a plain javascript object with a property called promise. This property points to the first dummy object's promise-returning func method.
Now calls to docClient.query(params).promise() should receive a mocked promise. docClient.query(params) will receive the stub sandbox.stub(AWS.DynamoDB.DocumentClient.prototype, ...). And .promise() will be processed from {promise: dummy.func} to refer to the dummy resolver.
I am currently working on some code in NodeJS.
My code looks like this:
const AWS = require('aws-sdk');
const PARAMS_ACCOUNT = {
AttributeNames: [
"max-instances"
]
};
const ec2 = new AWS.EC2();
const getAccountAttributes = ec2.describeAccountAttributes(PARAMS_ACCOUNT).promise();
function maxInstances() {
return getAccountAttributes.then(data => {
return data.AccountAttributes[0].AttributeValues[0].AttributeValue;
});
}
I've tried to use aws-sdk-mock, beside chaiJS and mochaJS but I'am not sure how to mock the .promise() of the aws-sdk.
My attempt looks like the following:
const chai = require('chai');
const mocha = require('mocha');
const awsSDK = require('aws-sdk');
const aws = require('aws-sdk-mock');
const server = require('../instance_limit.js');
const expect = chai.expect;
describe('Sample test', () => {
it('this is also a test', () => {
aws.mock('EC2','describeAccountAttributes',function (params, callback){
callback(null, {AccountAttributes:[{AttributeValues:[{AttributeValue: 10}]}]});
});
var awsEC2 = new awsSDK.EC2();
const instances = server.maxInstances();
expect(instances).to.equal(10);
});
});
But with this I am not able to mock the aws-sdk call.
Thanks for your help.
I was able to solve the problem by using the principle of dependency injection which prevents from mocking the aws sdk in this case, for that it was necessary to adjust my function a little bit and set a default value.
It now looks like this:
const AWS = require('aws-sdk');
const PARAMS_ACCOUNT = {
AttributeNames: [
"max-instances"
]
};
const ec2 = new AWS.EC2();
const getAccountAttributes = ec2.describeAccountAttributes(PARAMS_ACCOUNT).promise();
function maxInstances(accountAttributes = getAccountAttributes()) {
    return accountAttributes.then(data => {
        return data.AccountAttributes[0].AttributeValues[0].AttributeValue;
    });
}
This now allows me to provide a sample value during the tests, like this:
const chai = require('chai');
const mocha = require('mocha');
const server = require('../instance_limit.js');
const expect = chai.expect;
describe('data calculation based on ec2 values', () => {
it(' expects to return max Instances value equal 10', () => {
const accountAttributeObject = {AccountAttributes:[{AttributeValues:[{AttributeValue: 10}]}]}
const accountAttributes = Promise.resolve(accountAttributeObject);
const instances = server.maxInstances(accountAttributes);
return instances.then( data => {
expect(data).to.equal(10);
});
});
})