I am trying to mock SQS call defined in my file.js. It is a global instance in the file. So, while testing as I have to require file.js, its instance is set and my mock method is not called. However, if I set that SQS instance locally in the function inside which it is required, I am able to mock. But that would be wrong as that instance will be set every time that method is called. How can I mock SQS in my test? I have tried all the ways which were given there in the issues. None of them is working for me.
//file.js
const AWS = require('aws-sdk');
const sqs = new AWS.SQS();
const queueURL = config.sqs_connect.queue_url;
const params = {
MaxNumberOfMessages: 10,
QueueUrl: queueURL
};
exports.receiveMessages = async function () {
// let sqs = new AWS.SQS();
return new Promise((resolve, reject) => {
sqs.receiveMessage(params, function (err, data) {
if (err) {
console.log("error")
reject(err);
} else if (data.Messages) {
try {
consumeAndDeleteMessages(data.Messages, err => {
if (err) reject(err);
else resolve();
});
} catch (error) {
reject(error);
}
} else {
// logger.log("No data in queue");
resolve();
}
});
})
}
// file.test.js
const AWS = require('aws-sdk');
const consumer = require('path-to-file');
describe("foo", () => {
it("updates all info", async () => {
let delete_stack = [];
AWSMock.setSDKInstance(AWS);
AWSMock.mock('SQS', 'receiveMessage', (params, callback) => {
callback(null, { Messages: [{ MessageId: '1234', ReceiptHandle: 'qwertyu', Body: JSON.stringify(update_payload) }] });
});
AWSMock.mock('SQS', 'deleteMessageBatch', (params, callback) => {
delete_stack.push(params.Entries);
callback(null, {});
});
await consumer.receiveMessages();
AWSMock.restore('SQS');
expect(delete_stack).toStrictEqual([
[{ "Id": "1234", "ReceiptHandle": "qwertyu" }]
]);
});
});
If I define sqs locally in receiveMessage, the test will work file. I have tried all the ways provided, none of them is working. Am I doing something wrong?
Related
We have the following code used as lambda Function in Serverless Framework triggered every 2min with cron. The issue we are facing is that the writing in DynamoDB is inconsistent , we want to have 3 writings but instead we receive 1 or 2 writings every 2 minutes.
DynamoDB has a HASH key the HOUR and SORT KEY the DATE and Billing mode: PROVISIONED. Has someone faced the same behavior from DynamoDB or the same issue to share how he sovled it. Thanks
"use strict";
const AWS = require("aws-sdk");
const axios = require("axios");
const dynamoDb = new AWS.DynamoDB.DocumentClient();
const lambda = new AWS.Lambda({
region: "us-east-1",
});
module.exports.getWeather = async (event, context, callback) => {
const openWeatherMapAPIURL = `http://api.openweathermap.org/data/2.5/weather?id=${event}&appid=XXXXXXXXXXXXXXXXXXXXXXX&units=metric`;
const currentWeather = await axios
.get(openWeatherMapAPIURL)
.then((records) => {
console.log(records);
const d = new Date(records.headers.date);
let hour = d.getHours();
const params = {
TableName: process.env.DYNAMODB_TABLE_NAME,
Item: {
hour: hour,
date: records.headers.date,
city: records.data.name,
temp: records.data.main.temp,
feelsLike: records.data.main.feels_like,
description: records.data.weather[0].description,
},
};
setTimeout(function () {
dynamoDb.put(params, (error) => {
// handle potential errors
console.log(`zapis na: ${records.data.name} ${records.headers.date}`);
if (error) {
console.log(error);
console.error(error);
return;
}
});
}, 3000);
})
.catch((error) => {
console.log(error);
return;
});
const response = {
statusCode: 200,
body: JSON.stringify({
message: `Weather from ${event} was requested!`,
}),
};
callback(null, response);
};
module.exports.cron_launcher = (event, context, callback) => {
const requestedID = ["786735", "792578", "785842"];
requestedID.forEach((requestedID) => {
const params = {
FunctionName: process.env.HANDLER_LOCATION + "-getWeather",
InvocationType: "RequestResponse",
Payload: JSON.stringify(requestedID),
};
return lambda.invoke(params, function (error, data) {
if (error) {
console.error(JSON.stringify(error));
return new Error(`Error printing messages: ${JSON.stringify(error)}`);
} else if (data) {
console.log(data);
}
});
});
};
You are not waiting for the dynamodb.put operation to finish. Additionally, you are wrapping the call in a setTimeout. Your lambda function is returning before the network operation can be made. Make sure the put operation succeeds before returning a result from your lambda.
I see no reason for you to use a setTimeout here.
You can call dynamodb.put(...).promise() to get a promise from the dynamodb SDK and await that promise.
2.a Or you can continue using a callback, but wrap the entire section of code in a new promise object, calling the resolve method after the dynamodb.put call finishes.
The following code works great locally, but after deploying it to AWS Lambda and running it my records are not saving to DynamoDB and I'm getting a return of null from Lambda.
I know it's not a permissions issue with the Lambda execution role because I can successfully insert one individual record into DynamoDB from the AWS console.
I think the issue has to do with the .forEach loop and how the aws-sdk works. I'm not sure I'm completely wrapping my head around how to properly use JavaScript promises with Lambda. Any help is greatly appreciated!
module.exports.handler = async event => {
const getItems = () => {... // return items //...}
const addToDb = (items) => {
items.forEach(item => {
var params = {
Item: {"id": {S: item.id}, "title": {S: item.title}},
ReturnConsumedCapacity: "TOTAL",
TableName: "my-table"
};
dynamodb.putItem(params, (err, data) => {
if (err) console.log(err, err.stack);
else console.log(data);
});
});
};
const getItemsPromise = new Promise((resolve) => {
const items = getItems();
const itemsAddedToDb = addToDb(items);
resolve(itemsAddedToDb);
});
return getItemsPromise
.catch(err => console.log(err));
};
This should work!
exports.handler = (event) => {
const getItems = () => {...} // assuming getItems returns promise
const addToDb = (items) => {
asyncForEach(items, async (item) => {
const params = {
Item: {
id: {
S: item.id
},
title: {
S: item.title
}
},
ReturnConsumedCapacity: 'TOTAL',
TableName: 'my-table'
}
await dynamodb.putItem(params, (err, data) => {
if (err) console.log(err, err.stack)
else console.log(data)
})
})
}
const getItemsPromise = new Promise(async (resolve) => { // rule eslintno-async-promise-executor - use then instead
const items = await getItems()
const itemsAddedToDb = await addToDb(items)
resolve(itemsAddedToDb)
})
const asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array)
}
}
return getItemsPromise.catch((err) => console.log(err))
}
Notice:
async for export.handler has no use. use async only if function has await inside it.
async await doesn't support for forEach try for loop instead
I am trying to write an async lambda function which is calling a function for sign up a user in cognito.
my problem is that in my lambda function, it is not waiting for the result and finish the execution. would you mind check what is my issue? I am new to rxjs. please help me.
mylambda function
exports.handler = async (event, context) => {
//poolData and params will fetch from event
let source = await signup(poolData, params);
console.log(source);
});
my signup function
function signup(poolData, body) {
const userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
const { username, password, attributes } = body;
const attributesList = [];
if (Array.isArray(attributes)) {
attributesList.push(
...attributes.map(item => new AmazonCognitoIdentity.CognitoUserAttribute(item))
);
}
let source = Observable.create(observer => {
let output = (err, res) => {
if (err)
{
observer.error(err);
}
else
{
const cognitoUser = res.user;
const data = {
username: cognitoUser.getUsername(),
};
observer.next(data);
}
observer.complete();
}
userPool.signUp(username, password, attributesList, null, output);
});
let respond;
let subscriber = {
next(value) {
console.log('Subscriber - next: ', value);
respond = {
'statusCode': 200,
'body': JSON.stringify({
"username": value.username,
})
}
}, error(err) {
console.log('Subscriber - err: ', err);
respond = err;
},
complete() {
console.log('Subscriber - complete');
return response;
}
};
source.subscribe(subscriber);
}
module.exports = signup;
This behavior is totally normal.
So first thing first, an observable is not a promise which means you are not able to await a response with the await keyword, also I don't see anything to be returned from the signup function, which will probably lead to undefined to be logged anyways.
So how to fix that, one way to fix this issue is to use toPromise() which will turn your observable into a promise which then can be awaited wherever needed.
The other way (which is the rxjs way) will be to return from the signup function the observable and inside your handler function to subscribe for the response.
let subscriber = {
next(value) {
console.log('Subscriber - next: ', value);
respond = {
'statusCode': 200,
'body': JSON.stringify({
"username": value.username,
})
}
}, error(err) {
console.log('Subscriber - err: ', err);
respond = err;
},
complete() {
console.log('Subscriber - complete');
return response;
}
};
exports.handler = (event, context) => {
//poolData and params will fetch from event
signup(poolData, params).subscribe(subscriber);
})
I have the following two JS file. My problem is when i call the Calls.js which calls the Archive.js for archiving logs into DynamoDB the request times out.
I have tried out, many things, read about many things, tried in local/AWS environment without luck. What am i missing?
Link1, Link2, Link3, Link4, Link5,
Archive.js
module.exports.archive = archive;
...
function archive(input, callback){
AWS.config.update({
region: "eu-west-1",
endpoint: "http://localhost:8000"
});
var documentClient = new AWS.DynamoDB.DocumentClient({
httpOptions: {
agent: new https.Agent({
rejectUnauthorized: true,
secureProtocol: "TLSv1_method",
ciphers: "ALL"
})
}
});
...
var paramsPUT = {
TableName: "Logging",
Item: {
HashKey: dbID,
archiveEntry: archiveEntry
}
};
...
documentClient.put(paramsPUT, function(err, data) {
if (err) console.log(err);
if (data) console.log(data);
...
callback(data);
});
}
Calls.js
exports.handler(event, context, callback) => {
const archive = require("./..path..").archive;
...
context.callbackWaitsForEmptyEventLoop = false;
...
archive(input, callback);
...
}
I can not reproduce a timeout condition with your code. Your code is talking to an AWS endpoint at http://localhost:8000, so I assume you have DynamoDB local up and running, don't you ? Failling to have local DynamoDB running would cause the timeout.
That being said, I would strongly suggest to refactor your code to use Promise and the new async/await provided by NodeJS 8 instead of passing the Lambda callback around.
Here is the modified code.
const AWS = require("aws-sdk");
async function archive(input) {
return new Promise( (resolve, reject) => {
AWS.config.update({
region: "eu-west-1",
endpoint: 'http://localhost:8000'
});
//use client specific AWS configuration instead of the global one
const documentClient = new AWS.DynamoDB.DocumentClient();
var paramsPUT = {
TableName: "Logging",
Item: {
HashKey: "123",
archiveEntry: input
}
};
documentClient.put(paramsPUT, function (err, data) {
if (err) {
console.log("ERROR " + err);
reject(err);
}
console.log("Returned from DDB " + JSON.stringify(data, null,2));
resolve(data);
});
});
}
exports.handler = async (event, context, callback) => {
const result = await archive("abc");
callback(result);
}
// stuffs to test locally
callback = function (data) {
console.log("callback called with " + JSON.stringify(data,null,2));
}
event = context = {}
exports.handler(event, context, callback);
I'm new with Lambda & SQS and I'm trying to create a function to send emails, queued in an SQS service, but I don't understand how to call the process function that contains the send + delete queue methods.
Here bellow I paste my code:
'use strict';
const AWS = require('aws-sdk');
const SQS = new AWS.SQS({ apiVersion: '2012-11-05' });
const Lambda = new AWS.Lambda({ apiVersion: '2015-03-31' });
const ses = new AWS.SES({ accessKeyId: "xxxxxxxx", secretAccesskey: "xxxxxxx/xxxxxxxxx" });
const s3 = new AWS.S3({ apiVersion: "2006-03-01", region: "us-west-2" });
const QUEUE_URL = 'https://sqs.us-west-2.amazonaws.com/xxxxxxx/queue';
const PROCESS_MESSAGE = 'process-message';
function getPieceOfMail (path, mapObj, replace) {
return new Promise(function (resolve, reject) {
s3.getObject({
Bucket: "myBucket",
Key: "myKey/" + path
}, function (err, data) {
if (err) {
reject(err);
} else {
if (replace === true) {
var re = new RegExp(Object.keys(mapObj).join("|"), "gi");
data = data.Body.toString().replace(re, function (matched) {
return mapObj[matched.toLowerCase()];
});
resolve(data);
} else {
resolve(data.Body.toString());
}
}
});
});
}
function getRegisterSource (nickname, activate_link) {
var activate_link, pieces;
pieces = [
getPieceOfMail("starts/start.html", {}, false),
getPieceOfMail("headers/a.html", {}, false),
getPieceOfMail("footers/a.html", {}, false),
];
return Promise.all(pieces)
.then(function (data) {
return (data[0] + data[1] + data[2]);
})
.catch(function (err) {
return err;
});
}
function sendEmail (email, data) {
return new Promise(function (resolve, reject) {
var params = {
Destination: { ToAddresses: [email] },
Message: {
Body: {
Html: {
Data: data
},
Text: {
Data: data
}
},
Subject: {
Data: "myData"
}
},
Source: "someone <noreply#mydomain.co>",
};
ses.sendEmail(params, function (err, data) {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
function process(message, callback) {
console.log(message);
// process message
getRegisterSource(event['nickname'], event['user_id'])
.then(function (data) {
return sendEmail(event["email"], data);
})
.catch(function (err) {
console.log("==ERROR==");
callback(err, err);
})
.finally(function () {});
// delete message
const params = {
QueueUrl: QUEUE_URL,
ReceiptHandle: message.ReceiptHandle,
};
SQS.deleteMessage(params, (err) => callback(err, message));
}
function invokePoller(functionName, message) {
const payload = {
operation: PROCESS_MESSAGE,
message,
};
const params = {
FunctionName: functionName,
InvocationType: 'Event',
Payload: new Buffer(JSON.stringify(payload)),
};
return new Promise((resolve, reject) => {
Lambda.invoke(params, (err) => (err ? reject(err) : resolve()));
});
}
function poll(functionName, callback) {
const params = {
QueueUrl: QUEUE_URL,
MaxNumberOfMessages: 10,
VisibilityTimeout: 10,
};
// batch request messages
SQS.receiveMessage(params, (err, data) => {
if (err) {
return callback(err);
}
// for each message, reinvoke the function
const promises = data.Messages.map((message) => invokePoller(functionName, message));
// complete when all invocations have been made
Promise.all(promises).then(() => {
const result = `Messages received: ${data.Messages.length}`;
callback(null, result);
});
});
}
exports.handler = (event, context, callback) => {
try {
if (event.operation === PROCESS_MESSAGE) {
console.log("Invoked by poller");
process(event.message, callback);
} else {
console.log("invoked by schedule");
poll(context.functionName, callback);
}
} catch (err) {
callback(err);
}
};
can somebody throw me some light to this?
Thanks in advice.
UPDATE
After so much misconception, I've decided to start looking on how the example of polling-SQS works provided by AWS.
There I've found that I lacked some basic SQS permissions, but solved now by adding the right policy:
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction"
],
"Resource": ["*"]
}]
}
This allows Lambda.invoke() to call process().
When the process(message, callback) is called, if I console.log(message);, it seems that there's no message, although the queue is being cleared by the line SQS.deleteMessage(params, (err) => callback(err, message));
What I was trying was to combine my sendMail function that is currently working with a SQS service so I only have to push each message to the queue.
This is a common requirement where AWS SES has its own limitations in sending emails at once. If these limitations are violated, the SES account will sandbox itself. It seems like you have solved the problem using proper access credentials.
This code contains a Python3 Lambda code that can be used to handle a situation like this, where a Lambda polls from SQS using threading, and sends emails using SES, without exceeding the given limitations.
Link to Github Project.
You can also consider using the new feature in SQS, which is capable of invoking lambdas, when a new message is placed within SQS. But, be careful not to exceed the maximum number of lambda functions within the AWS Account region. (See this document)