i'm working on project where i need to write a lambda function which provides AWS API to handles GraphQL query and send the payload to AWS SQS everything is working fine but when i check my AWS SQS queue it shows 2 messages every single time instead of 1 and cloud watch also shows function trigger only once. below i'm sharing my code with you any help would be very much appreciated.
index.js
const { graphql } = require("graphql");
const { schema } = require("./graphql/schema");
exports.handler = async (event) => {
// getting query from lambda event
const query = event.query;
// getting query variables from lambda event
const variables = event.variables;
return await graphql(schema, query, null, null, variables);
};
sqs.js
const AWS = require("aws-sdk");
AWS.config.update({ region: "us-east-1"});
// Create an SQS service object
const sqs = new AWS.SQS({apiVersion: '2012-11-05', "accessKeyId": process.env.ACCESS_KEY_ID, "secretAccessKey": process.env.SECRET_ACCESS_KEY});
const QueueUrl = process.env.SQS_QUEUE_URL;
const sendPayloadToSQS = message => {
const params = {
MessageBody: JSON.stringify(message),
QueueUrl
};
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
};
module.exports = sendPayloadToSQS;
graphql mutation file
const { GraphQLNonNull } = require("graphql");
const { mutationWithClientMutationId } = require("../../common");
const { JobRequestEventResponse } = require("../jobRequestEventResponse");
const { JobRequestInput, JobEventMetadataInput } = require("../jobSchema");
const sendPayloadToSQS = require("../../../sqs");
const { newId } = require("../../../newId");
const JobRequestEvent = mutationWithClientMutationId({
name: "JobRequestEvent",
inputFields: {
eventMetadataInput: {
type: new GraphQLNonNull(JobEventMetadataInput),
},
eventInput: {
type: new GraphQLNonNull(JobRequestInput),
},
},
outputFields: {
JobRequestEventResponse: {
type: JobRequestEventResponse,
},
},
mutateAndGetPayload: async (params) => {
const new_id = newId();
if(params.eventInput.jobId === null || params.eventInput.jobId === undefined) {
params.eventInput.jobId = new_id;
}
const payload = {
_id: new_id,
transactionId: new_id,
name: params.eventMetadataInput.name,
userRole: params.eventMetadataInput.userRole,
date: params.eventMetadataInput.date,
languageCode: params.eventMetadataInput.languageCode,
eventInput: params.eventInput,
};
//send payload to sqs
await sendPayloadToSQS(payload);
return {
JobRequestEventResponse: {
id: payload._id,
transactionId: payload.transactionId,
status: "Success",
},
};
},
});
module.exports = {
JobRequestEvent,
};
I read the documentation again and found the callback is the root cause of my problem: if I provide a callback to sendMessage it triggers my function and after when I write promise() again it triggers my function so I remove the callback only as you can see below.
Refer : AWS Official documentation
Instead of this:
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
I write this:
const request = sqs.sendMessage(params);
const result = await request.promise();
if(result) {
console.log("Message queued to SQS successfully : ", result.MessageId);
} else {
console.log("Message queued failed");
}
Related
I try mocking sqs.receiveMessage function which imported from aws-sdk.
Here is my code(sqsHelper.js):
const AWS = require("aws-sdk");
export default class SqsHelper {
static SqsGetMessagesTest = () => {
const sqs = new AWS.SQS({
apiVersion: serviceConfig.sqs.api_version,
region: serviceConfig.sqs.region,
});
const queueURL =
"https://sqs.us-west-2.amazonaws.com/<1234>/<4567>";
const params = {
AttributeNames: ["SentTimestamp"],
MaxNumberOfMessages: 10,
MessageAttributeNames: ["All"],
QueueUrl: queueURL,
VisibilityTimeout: 20,
WaitTimeSeconds: 20,
};
return new Promise((resolve, reject) => {
sqs.receiveMessage(params, async (recErr, recData) => {
if (recErr) {
reject(recErr);
} else if (recData.Messages) {
console.info(`Message count: ${recData.Messages.length}`);
resolve(recData.Messages);
}
});
});
};
}
And here is the test file(sqsHelper.test.js):
import SqsHelper from "../../helpers/sqsHelper.js";
import { SQS } from "aws-sdk";
const dumyData = { Messages: [{ name: "123", lastName: "456" }] };
const sqs = new SQS();
describe("Test SQS helper", () => {
test("Recieve message", async () => {
jest.spyOn(sqs, 'receiveMessage').mockReturnValue(dumyData);
// check 1
const res1 = await sqs.receiveMessage();
console.log(`res: ${JSON.stringify(res1, null, 2)}`)
expect(res1).toEqual(dumyData);
// check 2
const res2 = await SqsHelper.SqsGetMessagesTest();
console.log(`res2: ${JSON.stringify(res2, null, 2)}`);
expect(res2).toBe(dumyData);
});
});
The problem is that on the first check( which i call the function directly from the test file) i can see that the receiveMessage has been mocked and the results is as expected.
But on the second check(which the function called from the second module "sqsHelper.js") looks that the mock function doe's work and the originalreceiveMessage has been called and it still ask me about credentials.
This is the error:
InvalidClientTokenId: The security token included in the request is
invalid.
what I'm doing wrong?
Thanks
The receiveMessage should trigger a callback that comes in the params. receiveMessage does not return a Promise
Try something like this:
const dummyData = { Messages: [{ name: "123", lastName: "456" }] };
const mockReceiveMessage = jest.fn().mockImplementation((params, callback) => callback("", dummyData));
jest.mock("aws-sdk", () => {
const originalModule = jest.requireActual("aws-sdk");
return {
...originalModule,
SQS: function() { // needs to be function as it will be used as constructor
return {
receiveMessage: mockReceiveMessage
}
}
};
})
describe("Test SQS helper", () => {
test("Recieve message", async () => {
const res = await SqsHelper.SqsGetMessagesTest();
expect(res).toBe(dummyData.Messages);
});
test("Error response", async () => {
mockReceiveMessage.mockImplementation((params, callback) => callback("some error"));
await expect(SqsHelper.SqsGetMessagesTest()).rejects.toEqual("some error");
});
});
I have gone through similar threads to fix this issue but I have had no luck. Both lambdas can be trigger independently of one another, and I am able to invoke the second Lambda through the command line, but my code does not work.
'use strict'
/* eslint max-statements: ['error', 100, { 'ignoreTopLevelFunctions': true }] */
const RespHelper = require('../../lib/response')
const { uuid } = require('uuidv4')
const AWS = require('aws-sdk')
const DB = require('./dynamo')
const respHelper = new RespHelper()
const Dynamo = new DB()
const lambda = new AWS.Lambda({
region: 'us-west-2'
})
const secondLambda = async (lambdaData) => {
var params = {
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData)
}
lambda.invoke(params, function (err, data) {
if (err) {
console.log(err)
} else {
console.log(`Success: ${data.Payload}`)
}
})
}
exports.handler = async event => {
const id = uuid()
let bodyData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'IN_PROGRESS'
}
let payloadData = {
uuid: id,
user: 'owner#email.com',
processingStatus: 'COMPLETE'
}
try {
await Dynamo.writeRecordToDB(bodyData)
await secondLambda(payloadData)
return respHelper.sendResponse(200, { message: bodyData })
} catch (err) {
console.log(`Failure: ${err}`)
return respHelper.sendResponse(400, { message: 'ERROR' })
}
}
I have double checked the lambda role and it has the Invoke Lambda and Invoke Asynchronous Invoke permission on all resources. Console outputs don't give me any indication of why this is not working. Any help is appreciated.
You're awaiting a callback when you need to await a promise
const secondLambda = async lambdaData =>
lambda
.invoke({
LogType: 'Tail',
FunctionName: 'second_lambda_name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(lambdaData),
})
.promise()
I am attempting to create an orchestration AWS lambda that calls two other AWS lambdas. These two other AWS lambdas can be invoked in their own right but in certain cases, there is a need for orchestration.
My orchestration lambda looks like this:
module.exports.orchestration = async (event, context, callback) => {
const lambdaAPromise = lambdaA();
const lambdaBPromise = lambdaB();
const lambdaAResponse = await lambdaAPromise;
const lambdaBResponse = await lambdaBPromise;
if (lambdaAResponse && lambdaBResponse) {
console.log(
"Both streams responsed with: ",
lambdaAResponse,
lambdaBResponse
);
var orchestrationResponse = [];
orchestrationResponse.push(lambdaAResponse);
orchestrationResponse.push(lambdaBResponse);
const orchestrationSucceeded = {
statusCode: 200,
isBase64Encoded: false,
body: orchestrationResponse
};
callback(null, orchestrationSucceeded);
} else {
console.log(
"At least one stream not responded: ",
lambdaAResponse,
lambdaBResponse
);
const orchestrationFailed = {
statusCode: 400,
isBase64Encoded: false,
body: someresponse
};
callback(null, orchestrationFailed);
}
};
function lambdaA() {
var payload = {
groupNumber: requestBody.groupNumber
};
var params = {
FunctionName: process.env.CCE_FUNCTION_NAME,
InvocationType: "RequestResponse",
LogType: "Tail",
Payload: JSON.stringify(payload)
};
return lambda
.invoke(params)
.promise()
.then(({ Payload }) => {
var payload = JSON.parse(Payload);
return payload.body;
});
}
function lambdaB() {
var payload = {
groupNumber: requestBody.groupNumber
};
var params = {
FunctionName: process.env.CCE_FUNCTION_NAME,
InvocationType: "RequestResponse",
LogType: "Tail",
Payload: JSON.stringify(payload)
};
return lambda
.invoke(params)
.promise()
.then(({ Payload }) => {
var payload = JSON.parse(Payload);
return payload.body;
});
}
Both lambdaA and lambdaB functions look like this:
module.exports.lambdaA = (event) => {
return new Promise((resolve) => {
do something ...
resolve(boolean value);
});
};
My issue was that the await function did not occur as I had an incorrect signature (was still using callback rather than promise). I have updated the code snippets that are now working correctly.
Just wrapping up from comment:
Issue was lambdaA and lambdaB use callbacks, hence you cannot await them. [From Snippet #1]
I'm creating an invalidation to CloudFront via API Gateway call to Lambda, but the invalidation only gets processed about 1 in 5 attempts. I'm certain I am missing something that would properly wait for the createInvalidation method to be called.
'use strict';
var AWS = require('aws-sdk');
var cloudfront = new AWS.CloudFront();
var distID = process.env.DISTRIBUTION_ID;
exports.handler = async(event) => {
var path = (event.queryStringParameters.path);
console.log(event.queryStringParameters.path);
var params = {
DistributionId: distID,
InvalidationBatch: {
CallerReference: (event.requestContext.requestId),
Paths: {
Quantity: 1,
Items: [
path
]
}
}
};
await cloudfront.createInvalidation(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
return data;
});
};
The Lambda function completes every time and is receiving and parsing the API Gateway call just fine to populate the variables, but the createInvalidation only happens about 1 in 5 attempts, though no errors result from the other 4 attempts.
Try the following snippet:
// import * as AWS from 'aws-sdk';
exports.handler = async(event) =>
{
const cloudfront = new AWS.CloudFront();
const distID = process.env.DISTRIBUTION_ID;
const path = (event.queryStringParameters.path);
console.log(event.queryStringParameters.path);
const params =
{
DistributionId: distID,
InvalidationBatch:
{
CallerReference: (event.requestContext.requestId),
Paths: { Quantity: 1, Items: [ path ] }
}
};
try
{
const resp = await cloudfront.createInvalidation(params).promise();
console.log(resp);
}
catch (ex)
{
console.error(ex);
}
};
I am working project there we are creating a serverless architecture for handle computation task. How to pass parameters to event object in aws lambda function.
Is there any reference link, if any please suggest.
index.js - handler function of lambda
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const mysql = require("mysql");
const pool = mysql.createPool({
host: "test_db.xxxxxxxxxxx.us-east-2.rds.amazonaws.com",
user: "root",
password: "xxxxxxxx",
database: "test_db",
port: 3306
});
pool.getConnection((err, connection) => {
if (err) throw err;
// const { fund_name, returns, aum, rating, expense_ratio } = event.body; // BODY PARAMETER REMAINING
const randomNumber = () => Math.random()*10+1;
const fund_name = 'example';
const returns = randomNumber();
const aum = randomNumber();
const rating = randomNumber();
const expense_ratio = randomNumber();
if (fund_name && returns && aum && rating && expense_ratio) {
pool.getConnection((err, connection) => {
if (err) {
throw err;
}
const query = `INSERT INTO mutual_fund_tb(fund_name, returns, aum, rating, expense_ratio) VALUES ('${fund_name}', '${returns}', '${aum}', '${rating}', '${expense_ratio}')`;
connection.query(query, (err, results, fields) => {
if (err) {
throw err;
}
if (results.affectedRows === 1) {
const response = {
data: {...event.body},
message: "Data successfully inserted",
status: "ok"
};
callback(null, response);
} else {
const response = {
data: {},
message: "Data unable to insert into database.",
status: "failed"
};
callback(null, response);
}
});
});
} else {
const response = {
data: {},
message: "Invalid arguments passed. Please pass valid arguments",
status: "failed"
};
callback(null, response);
}
});
};
IF you are using API Gateway then on the method execution diagram click on the the integration Response (Integration type Lambda Function) and under that we have Body mapping template field where in you can add application/json or txt/xml and then enter the mapping manually for e.g. in our case we use application/json
{
"abc" : $input.params('$def')
}
So abc can be accessed in you lambda function and def can be passed on the url when calling the API Gateway
As of now you can use lambda proxy integration and then you don't need to provide this mapping, and still you will be able to access the parameters
aws docs