SQS to Lambda + SES - node.js

I'm new with Lambda & SQS and I'm trying to create a function to send emails, queued in an SQS service, but I don't understand how to call the process function that contains the send + delete queue methods.
Here bellow I paste my code:
'use strict';
const AWS = require('aws-sdk');
const SQS = new AWS.SQS({ apiVersion: '2012-11-05' });
const Lambda = new AWS.Lambda({ apiVersion: '2015-03-31' });
const ses = new AWS.SES({ accessKeyId: "xxxxxxxx", secretAccesskey: "xxxxxxx/xxxxxxxxx" });
const s3 = new AWS.S3({ apiVersion: "2006-03-01", region: "us-west-2" });
const QUEUE_URL = 'https://sqs.us-west-2.amazonaws.com/xxxxxxx/queue';
const PROCESS_MESSAGE = 'process-message';
function getPieceOfMail (path, mapObj, replace) {
return new Promise(function (resolve, reject) {
s3.getObject({
Bucket: "myBucket",
Key: "myKey/" + path
}, function (err, data) {
if (err) {
reject(err);
} else {
if (replace === true) {
var re = new RegExp(Object.keys(mapObj).join("|"), "gi");
data = data.Body.toString().replace(re, function (matched) {
return mapObj[matched.toLowerCase()];
});
resolve(data);
} else {
resolve(data.Body.toString());
}
}
});
});
}
function getRegisterSource (nickname, activate_link) {
var activate_link, pieces;
pieces = [
getPieceOfMail("starts/start.html", {}, false),
getPieceOfMail("headers/a.html", {}, false),
getPieceOfMail("footers/a.html", {}, false),
];
return Promise.all(pieces)
.then(function (data) {
return (data[0] + data[1] + data[2]);
})
.catch(function (err) {
return err;
});
}
function sendEmail (email, data) {
return new Promise(function (resolve, reject) {
var params = {
Destination: { ToAddresses: [email] },
Message: {
Body: {
Html: {
Data: data
},
Text: {
Data: data
}
},
Subject: {
Data: "myData"
}
},
Source: "someone <noreply#mydomain.co>",
};
ses.sendEmail(params, function (err, data) {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
function process(message, callback) {
console.log(message);
// process message
getRegisterSource(event['nickname'], event['user_id'])
.then(function (data) {
return sendEmail(event["email"], data);
})
.catch(function (err) {
console.log("==ERROR==");
callback(err, err);
})
.finally(function () {});
// delete message
const params = {
QueueUrl: QUEUE_URL,
ReceiptHandle: message.ReceiptHandle,
};
SQS.deleteMessage(params, (err) => callback(err, message));
}
function invokePoller(functionName, message) {
const payload = {
operation: PROCESS_MESSAGE,
message,
};
const params = {
FunctionName: functionName,
InvocationType: 'Event',
Payload: new Buffer(JSON.stringify(payload)),
};
return new Promise((resolve, reject) => {
Lambda.invoke(params, (err) => (err ? reject(err) : resolve()));
});
}
function poll(functionName, callback) {
const params = {
QueueUrl: QUEUE_URL,
MaxNumberOfMessages: 10,
VisibilityTimeout: 10,
};
// batch request messages
SQS.receiveMessage(params, (err, data) => {
if (err) {
return callback(err);
}
// for each message, reinvoke the function
const promises = data.Messages.map((message) => invokePoller(functionName, message));
// complete when all invocations have been made
Promise.all(promises).then(() => {
const result = `Messages received: ${data.Messages.length}`;
callback(null, result);
});
});
}
exports.handler = (event, context, callback) => {
try {
if (event.operation === PROCESS_MESSAGE) {
console.log("Invoked by poller");
process(event.message, callback);
} else {
console.log("invoked by schedule");
poll(context.functionName, callback);
}
} catch (err) {
callback(err);
}
};
can somebody throw me some light to this?
Thanks in advice.
UPDATE
After so much misconception, I've decided to start looking on how the example of polling-SQS works provided by AWS.
There I've found that I lacked some basic SQS permissions, but solved now by adding the right policy:
{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"lambda:InvokeFunction"
],
"Resource": ["*"]
}]
}
This allows Lambda.invoke() to call process().
When the process(message, callback) is called, if I console.log(message);, it seems that there's no message, although the queue is being cleared by the line SQS.deleteMessage(params, (err) => callback(err, message));
What I was trying was to combine my sendMail function that is currently working with a SQS service so I only have to push each message to the queue.

This is a common requirement where AWS SES has its own limitations in sending emails at once. If these limitations are violated, the SES account will sandbox itself. It seems like you have solved the problem using proper access credentials.
This code contains a Python3 Lambda code that can be used to handle a situation like this, where a Lambda polls from SQS using threading, and sends emails using SES, without exceeding the given limitations.
Link to Github Project.
You can also consider using the new feature in SQS, which is capable of invoking lambdas, when a new message is placed within SQS. But, be careful not to exceed the maximum number of lambda functions within the AWS Account region. (See this document)

Related

How to fetch user information from aws SSO

I am trying to fetch the users using my VPN setup. So in the AWS SSO / AWS IAM Identity Center, I have a group called __ with 2 users. Both users have attribute information such as email, phone number, first name.. etc.
As I try to fetch the information, on lambda I use the below code.
const AWS = require("aws-sdk");
async function getUserInformation(){
const identityStore = new AWS.IdentityStore({
accessKeyId: process.env.accessKeyId,
secretAccessKey: process.env.secretAccessKey,
region: process.env.region,
});
let params = {
IdentityStoreId: process.env.identityStoreId
};
const userIdPromise = () => {
return new Promise((resolve, reject) => {
identityStore.listUsers(params, (err, data) => {
if (err) {
return reject(err);
}
return resolve(data.Users);
});
});
};
const userInfoPromise = (Users) => {
const usersInfo = [];
return new Promise((resolve, reject) => {
Users.map(({ UserId }) => {
let params = {
IdentityStoreId: process.env.identityStoreId,
UserId: UserId,
};
identityStore.describeUser(params, function (err, data) {
if (err) {
return reject(err);
}
usersInfo.push(data);
if (usersInfo.length == Users.length) {
return resolve(usersInfo);
}
});
});
});
};
const Users = await userIdPromise();
const usersInfo= await userInfoPromise(Users);
return usersInfo;
}
exports.handler = async (event) => {
try {
const users = await getUserInformation();
return users;
} catch (err) {
return err;
}
};
The output i get from this is the following response, but I was expecting a response as in the documentation .
Response
[
{
"UserName": "jakob-2",
"UserId": "a34498c2-4011-70bd-1a24-*"
},
{
"UserName": "JakobCI",
"UserId": "2334c812-6051-70cd-a983-*"
}
]
i hope you can help as i have now tried for 3 days to get the correct responce information

mocking SQS with Jest

I am trying to mock SQS call defined in my file.js. It is a global instance in the file. So, while testing as I have to require file.js, its instance is set and my mock method is not called. However, if I set that SQS instance locally in the function inside which it is required, I am able to mock. But that would be wrong as that instance will be set every time that method is called. How can I mock SQS in my test? I have tried all the ways which were given there in the issues. None of them is working for me.
//file.js
const AWS = require('aws-sdk');
const sqs = new AWS.SQS();
const queueURL = config.sqs_connect.queue_url;
const params = {
MaxNumberOfMessages: 10,
QueueUrl: queueURL
};
exports.receiveMessages = async function () {
// let sqs = new AWS.SQS();
return new Promise((resolve, reject) => {
sqs.receiveMessage(params, function (err, data) {
if (err) {
console.log("error")
reject(err);
} else if (data.Messages) {
try {
consumeAndDeleteMessages(data.Messages, err => {
if (err) reject(err);
else resolve();
});
} catch (error) {
reject(error);
}
} else {
// logger.log("No data in queue");
resolve();
}
});
})
}
// file.test.js
const AWS = require('aws-sdk');
const consumer = require('path-to-file');
describe("foo", () => {
it("updates all info", async () => {
let delete_stack = [];
AWSMock.setSDKInstance(AWS);
AWSMock.mock('SQS', 'receiveMessage', (params, callback) => {
callback(null, { Messages: [{ MessageId: '1234', ReceiptHandle: 'qwertyu', Body: JSON.stringify(update_payload) }] });
});
AWSMock.mock('SQS', 'deleteMessageBatch', (params, callback) => {
delete_stack.push(params.Entries);
callback(null, {});
});
await consumer.receiveMessages();
AWSMock.restore('SQS');
expect(delete_stack).toStrictEqual([
[{ "Id": "1234", "ReceiptHandle": "qwertyu" }]
]);
});
});
If I define sqs locally in receiveMessage, the test will work file. I have tried all the ways provided, none of them is working. Am I doing something wrong?

AWS SNS not working from lambda, but working locally

I've got a problem I cannot solve myself. My lambda function works as expected when invoked locally, but it does not send the text message when called from AWS Lambda. It doesn't log any error either.
Here's my code, I've only starred the private stuff out:
import request from 'request';
import AWS from "aws-sdk";
const options = {***};
const sentAlert = async msg => {
const sns = new AWS.SNS();
await sns.publish({
Message: msg,
PhoneNumber: '***',
MessageAttributes: {
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': '***'
}
}
}, function (err, data) {
if (err) {
console.log(err.stack);
return;
}
});
console.log('sms sent');
};
export const getAlert = async (event, context, callback) => {
request(options, (err, res, body) => {
if (err) { return console.log('error: ', err); }
if (body.length === 0 ) { return }
console.log(`***`);
const optionsId = {*** };
request(optionsId, (err, res, body) => {
const msg = body.current.indexes[0].description;
console.log('msg: ', msg);
sentAlert(msg);
});
});
};
I test it locally using serverless invoke local --function getSmogAlert and it works just as expected, I get the sms from AWS, but when I call it with serverless invoke --function getSmogAlert - it returns null and doesn't send any text message.
I've had similar problems with Nexmo and thought that maybe AWS.SNS will help me, but nope.
Any help, please?
As I wrote in my comment, I think you confuse the promises and callbacks in the execution. Try this changes:
const options = {***};
const sentAlert = (msg, callback) => {
const sns = new AWS.SNS();
await sns.publish({
TopicArn: ***
Message: msg,
PhoneNumber: '***',
MessageAttributes: {
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': '***'
}
}
}, function (err, data) {
if (err) {
console.log(err.stack);
callback(err);
}
});
console.log('sms sent');
callback(null)
};
export const getAlert = (event, context, callback) => {
request(options, (err, res, body) => {
if (err) {
console.log('error: ', err);
callback(err);
}
if (body.length === 0 ) {
console.log('Got no body!')
callback(null)
}
console.log(`***`);
const optionsId = {*** };
request(optionsId, (err, res, body) => {
if (err) {
console.log(err.stack);
callback(err);
}
const msg = body.current.indexes[0].description;
console.log('msg: ', msg);
sentAlert(msg, callback);
});
});
};
But in general, I would prefer to use async/await mechanism supported by AWS Lambda nodejs8.10 image. That would make your code simple and easier to reason about.

DynamoDB, Lambda function / custom module Timeout

I have the following two JS file. My problem is when i call the Calls.js which calls the Archive.js for archiving logs into DynamoDB the request times out.
I have tried out, many things, read about many things, tried in local/AWS environment without luck. What am i missing?
Link1, Link2, Link3, Link4, Link5,
Archive.js
module.exports.archive = archive;
...
function archive(input, callback){
AWS.config.update({
region: "eu-west-1",
endpoint: "http://localhost:8000"
});
var documentClient = new AWS.DynamoDB.DocumentClient({
httpOptions: {
agent: new https.Agent({
rejectUnauthorized: true,
secureProtocol: "TLSv1_method",
ciphers: "ALL"
})
}
});
...
var paramsPUT = {
TableName: "Logging",
Item: {
HashKey: dbID,
archiveEntry: archiveEntry
}
};
...
documentClient.put(paramsPUT, function(err, data) {
if (err) console.log(err);
if (data) console.log(data);
...
callback(data);
});
}
Calls.js
exports.handler(event, context, callback) => {
const archive = require("./..path..").archive;
...
context.callbackWaitsForEmptyEventLoop = false;
...
archive(input, callback);
...
}
I can not reproduce a timeout condition with your code. Your code is talking to an AWS endpoint at http://localhost:8000, so I assume you have DynamoDB local up and running, don't you ? Failling to have local DynamoDB running would cause the timeout.
That being said, I would strongly suggest to refactor your code to use Promise and the new async/await provided by NodeJS 8 instead of passing the Lambda callback around.
Here is the modified code.
const AWS = require("aws-sdk");
async function archive(input) {
return new Promise( (resolve, reject) => {
AWS.config.update({
region: "eu-west-1",
endpoint: 'http://localhost:8000'
});
//use client specific AWS configuration instead of the global one
const documentClient = new AWS.DynamoDB.DocumentClient();
var paramsPUT = {
TableName: "Logging",
Item: {
HashKey: "123",
archiveEntry: input
}
};
documentClient.put(paramsPUT, function (err, data) {
if (err) {
console.log("ERROR " + err);
reject(err);
}
console.log("Returned from DDB " + JSON.stringify(data, null,2));
resolve(data);
});
});
}
exports.handler = async (event, context, callback) => {
const result = await archive("abc");
callback(result);
}
// stuffs to test locally
callback = function (data) {
console.log("callback called with " + JSON.stringify(data,null,2));
}
event = context = {}
exports.handler(event, context, callback);

Cant insert data into DynamoDB using new nodejs 8.10

I want to use new nodejs 8.10 for developing my lambdas. A simple piece of code when written in node 6.10 style works but the same(similar) code doesn't work when I use node 8.10.
Below is working code which successfully inserts data into dynamodb table(nodejs 6.10)
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
var documentClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.handler = (event, context, callback) => {
// TODO implement
var params = {
Item: {
client: 'client_'+Math.random(),
Type: 1,
Status: true,
json: { foo: 'bar', address:{ city:'Pune', street: 'ABC Nagar', pin:'411099'} }
},
TableName: 'clients'
};
documentClient.put(params, function(err, data) {
if (err) {
console.log("Error", err);
callback(err, null);
} else {
console.log("Success", data);
// return "Hi, insert data completed";
callback(null, data);
}
});
};
And below one which is node 8.10 style which doesn't work(means doesn't insert data into dynamodb table). I keep getting null as return value.
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
var documentClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
// TODO implement
var params = {
Item: {
client: 'client_'+Math.random(),
Type: 1,
Status: true,
json: { foo: 'bar', address:{ city:'Pune', street: 'ABC Nagar', pin:'411099'} }
},
TableName: 'clients'
};
documentClient.put(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
return "Hi, insert data completed";
}
});
};
I spent searching 2-3 hours searching.. couldn't find any article or any question similar. Can anyone tell me what am I doing wrong?
Async / Await is a syntactical sugar for promise, Your documentClient.put should be wraped with promise. Since documentClient.put is based on callback appoach, you have to wrap it with promise
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-1'});
var documentClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
// TODO implement
var params = {
Item: {
client: 'client_'+Math.random(),
Type: 1,
Status: true,
json: { foo: 'bar', address:{ city:'Pune', street: 'ABC Nagar', pin:'411099'} }
},
TableName: 'clients'
};
let putItem = new Promise((res, rej) => {
documentClient.put(params, function(err, data) {
if (err) {
console.log("Error", err);
rej(err);
} else {
console.log("Success", data);
res("Hi, insert data completed");
}
});
});
const result = await putItem;
console.log(result);
return result
};
Note: Its advisable to use DB operations in separate file,rather than using in handler function itself
Did you look in your table to see if it's inserting data? I think it is.
The problem with your async-style code is that you aren't returning a value. Returning "Hi, insert data completed" from the put callback doesn't return a value from handler.
You could manually create a promise and return that from handler, but I'd try using promisify.
This code is untested but should be close:
...
const util = require('util');
...
documentClient.putPromise = util.promisify(documentClient.put);
...
try {
const data = await documentClient.putPromise(params);
console.log("Success", data);
return "Hi, insert data completed";
}
catch (err) {
console.log("Error", err);
}
Here's more on promisify: http://2ality.com/2017/05/util-promisify.html
Calling await dynamo.put(params).promise(); is how I solved this issue after some googling. Specifically, it seems like calling foo.promise(); in the aws sdk is supported now.

Resources