S3 triggers Lambda Function twice with unique request id - node.js

I used s3 to trigger my lambda function every time I uploaded a file in s3 bucket, I've tested it and it works but I noticed that S3 is triggering my lambda function twice with unique request id, even if I removed the entire codes and just add console.info('something') it's still trigger twice so clearly it's not lambda making retries if there's error. I also set retry to 0.
1st request id : awsRequestId: '9f73e49f-6cc7-454e-a89f-7d88122a7166'
2nd request id : awsRequestId: '1c8572d5-61ee-4b0b-93d9-4f8a3dcd28bf'
here's my code
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
region: process.env.region,
accessKeyId: process.env.accessKeyId,
secretAccessKey: process.env.secretAccessKey
});
const axios = require('axios');
const csvtojson = require('csvtojson');
const _ = require('lodash');
exports.handler = async (event, context) => {
const { normalizeAndHashData } = require('./hash');
const params = {Bucket: 'myBucket', 'myKey'}
const data = await s3.getObject(params).promise();
const eventsJson = await csvtojson().fromString(data.Body.toString());
const result1 = await axios.post(`https://myurl`, eventJson);
if (result1) {
const sampleDataToUpload = {id: 1, name: 'test'}
const result2 = await axios.post(`https://myurl2`, sampleDataToUpload);
}
return context.succeed()
};

Related

How to get the DisconnectTimestamp from Amazon Connect call in NodeJS

My call recordings are being pushed at S3 and stored with contactId_timestamp.wav as filename.
For now i can get/download the files by specifically providing the file name as key, now i wanted to create the filename by myself as contactId + disconnecttimestamp i can get ge contactId through getContactId() but how to get the disconnecttimestamp?
My goal is same what we are experiencing in Contact Flow Search the recordings can be played with respect to contactId.
Here is how i am downloading the recordings from S3.
require("dotenv").config();
const expres = require("express");
const app = expres();
app.listen(3001);
const aws = require("aws-sdk");
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const BUCKET = process.env.BUCKET
const s3 = new aws.S3(secretAccessKey = process.env.ACCESS_SECRET, accessKeyId = process.env.ACCESS_KEY);
app.get("/download/filename", async(req, res)=>{
const filename = req.params.filename
let x = await s3.getObject({Bucket:BUCKET, Key:filename}).promise();
res.send(x.Body);
})
And Than hitting the http://localhost:3001/download/0989c085-16d1-478b-8858-1ccddb2990f4_20220303T16:46_UTC.wav
If you have the ContactID for the call you can use describeContact to get the contact info which includes the DisconnectTimestamp.
Something along these lines should work.
const AWS = require('aws-sdk');
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const connect = new AWS.Connect({ region: process.env.REGION });
var params = {
ContactId: 'STRING_VALUE', /* required */
InstanceId: 'STRING_VALUE' /* required - the connect instance ID */
};
connect.describeContact(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else var DisconnectTimestamp = data.Contact.DisconnectTimestamp); // successful response
});
more info here https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Connect.html#describeContact-property

How to trace a lambda invokes a lambda with AWS X-Ray?

I have an AWS Lambda function which is triggered by an API Gateway event. The API Gateway is configured to use X-Ray.
As the Lambda tracing configuration defaults to PassTrough it is also shown in X-Ray (service map, etc.).
The invoked Lambda uses the node.js aws-sdk to invoke another lambda. If I understand correctly the Tracing ID has to be passed on to the next Invocation in order to show this Lambda also in X-Ray. In the API of the SDK I found no option for this.
const result = await lambda
.invoke(lambdaParamsCreateUser)
.promise()
How can I achieve this? How can I trace also the invocation of the original request?
With the tips of #Balu Vyamajala I changed the AWS-SDK import to the following:
import AWS from "aws-sdk";
import AwsXRay from "aws-xray-sdk-core";
const aws = AwsXRay.captureAWS(AWS);
export default aws;
I use it when I invoice my second function like this:
import AWS from "aws";
const Lambda = AWS.Lambda;
// ...
const lambda = new Lambda({ region: "eu-central-1" });
const lambdaPromise = lambda
.invoke({
FunctionName: AUTH_CREATE_USER_FUNC,
InvocationType: "RequestResponse",
Qualifier: AUTH_CREATE_USER_FUNC_VERSION,
Payload: JSON.stringify({
eMail: eMail,
device: device,
customerId: customerId,
}),
LogType: "Tail",
})
.promise()
But in X-Ray there is no invocation chain :-(
https://imgur.com/wDMlNzb
Do I make a mistake?
if we enable X-Ray for both Lambda functions , trace-id is automatically passed and will be same for both Lambdas.
In the code, we can enable X-Ray simply by wrapping it around aws-sdk
JavaScript:
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
Typescript:
import AWSXRay from 'aws-xray-sdk';
import aws from 'aws-sdk';
const AWS = AWSXRay.captureAWS(aws)
Here is a sample test to confirm.
balu-test >> sample-test
Lambda 1 (balu-test) :
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
const lambda = new AWS.Lambda();
exports.handler = async function (event, context) {
var params = {
FunctionName: "sample-test",
InvocationType: "RequestResponse",
Payload: '{ "name" : "foo" }',
};
const response = await lambda.invoke(params).promise();
console.log('response',response);
return "sucess";
};
Lambda 2(sample-test):
const AWSXRay = require("aws-xray-sdk-core");
const AWS = AWSXRay.captureAWS(require("aws-sdk"));
let region = "us-east-1"
let secretName = "SomeSecret"
let secret
let decodedBinarySecret
var client = new AWS.SecretsManager({
region: region,
});
exports.handler = (event, context, callback) => {
client.getSecretValue({ SecretId: secretName }, function (err, data) {
if (err) {
callback(err);
} else {
if ("SecretString" in data) {
secret = data.SecretString;
} else {
let buff = new Buffer(data.SecretBinary, "base64");
decodedBinarySecret = buff.toString("ascii");
}
callback(null, secret);
}
});
};
TraceId is same and X-Ray points to same graph for both Lambda invocations. Same thing happens when first api is called from Api-Gateway. First time trace-id is generated and is passed along as http header to downstream processes.

Cannot upload to AWS S3 inside my Lambda function

I have the following lambda function. It received an XML, looks through it, finds a base64 pdf file and tries to upload it to S3.
index.js
const AWS = require('aws-sdk');
const xml2js = require('xml2js');
const pdfUpload = require('./upload_pdf');
const s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
let attachment;
xml2js.parseString(event.body, function(err, result) {
attachment =
result.Attachment[0].Data[0];
if (attachment) {
pdfUpload(attachment);
}
});
return {
statusCode: 200
}
};
upload_pdf.js
/**
*
* #param {string} base64 Data
* #return {string} Image url
*/
const pdfUpload = async (base64) => {
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const base64Data = new Buffer.from(base64, 'base64');
// With this setup, each time your user uploads an image, will be overwritten.
// To prevent this, use a different Key each time.
// This won't be needed if they're uploading their avatar, hence the filename, userAvatar.js.
const params = {
Bucket: 'mu-bucket',
Key: `123.pdf`,
Body: base64Data,
ACL: 'public-read',
ContentEncoding: 'base64',
ContentType: `application/pdf`
}
let location = '';
let key = '';
try {
const { Location, Key } = await s3.upload(params).promise();
location = Location;
key = Key;
} catch (error) {
// console.log(error)
}
console.log(location, key);
return location;
}
module.exports = pdfUpload;
No matter what I do, the file does not get uploaded. I have checked the permissions, and the lambda has access to the bucket. Running the lambda I'm not receiving any errors either. Can anybody see what might be wrong here?
First, as an advice, I think you should put more logs to see at which steps the function is stuck / failing
The second thing you can try is to put await
await pdfUpload(attachment);

AWS textract methods in node js are not getting invoked

I want to extract text from image using node js so created a lambda in aws. Please find the below code snippet. Issue is that the textract method detectDocumentText is not getting invoked.
As far as permission I had given s3 full access and textract full access to the lambda. Am I missing anything?
var AWS = require("aws-sdk");
var base64 = require("base-64");
var fs = require("fs");
exports.handler = async (event, context, callback) => {
// Input for textract can be byte array or S3 object
AWS.config.region = "us-east-1";
//AWS.config.update({ region: 'us-east-1' });
var textract = new AWS.Textract({ apiVersion: "2018-06-27" });
//var textract = new AWS.Textract();
console.log(textract);
var params = {
Document: {
/* required */
//'Bytes': imageBase64
S3Object: {
Bucket: "717577",
Name: "Picture2.png"
}
}
};
textract.detectDocumentText(params, function(err, data) {
if (err) {
console.log(err); // an error occurred
} else {
console.log(data); // successful response
callback(null, data);
}
});
};
As well as I don't see any error logs in cloudwatch logs.
The problem is that you have marked your method as async which means that you are returning a promise. In your case you are not returning a promise so for lambda there is no way to complete the execution of the method. You have two choices here
Remove async
Or more recommended way is to convert your callback style to use promise. aws-sdk support .promise method on all methods so you could leverage that. The code will look like this
var AWS = require("aws-sdk");
var base64 = require("base-64");
var fs = require("fs");
exports.handler = async (event, context) => {
// Input for textract can be byte array or S3 object
AWS.config.region = "us-east-1";
//AWS.config.update({ region: 'us-east-1' });
var textract = new AWS.Textract({ apiVersion: "2018-06-27" });
//var textract = new AWS.Textract();
console.log(textract);
var params = {
Document: {
/* required */
//'Bytes': imageBase64
S3Object: {
Bucket: "717577",
Name: "Picture2.png"
}
}
};
const data = await textract.detectDocumentText(params).promise();
return data;
};
Hope this helps.

Lambda#Edge when triggered Dynamodb giving 503 Error

I am trying to invoke Lambda through cloudfront viewer request . Here is my Lambda code
'use strict';
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = (event, context, callback) => {
/* Get request */
const request = event.Records[0].cf.request;
const requestbody = Buffer.from(request.body.data, 'base64').toString();
const data = JSON.parse(requestbody);
const Id = data.Name;
console.log(Id);
/* Generate body for response */
const body =
'<html>\n'
+ '<head><title>Hello From Lambda#Edge</title></head>\n'
+ '<body>\n'
+ '<h1>You clicked more than 10 Times </h1>\n'
+ '</body>\n'
+ '</html>';
var params = {
TableName: "Test",
ProjectionExpression: "#V,#N",
KeyConditionExpression: "#N = :v1",
ExpressionAttributeNames: {
"#N" : "Name",
"#V" : "Value"
},
ExpressionAttributeValues: {
":v1": Id
}
};
var querydb = docClient.query(params).promise();
querydb.then(function(data) {
console.log(data.Items[0].Value);
if(data.Items[0].Value >= 11){
const response = {
status: '200',
body: body,
};
callback(null, response);
}else {
callback(null,request);
}
}).catch(function(err) {
console.log(err);
});
};
When i triggered the same lambda through console it is giving correct response. But when i deployed through Cloudfront it is giving 503 Error. But i had tried the same code withcode Dynamodb Client it worked perfectly fine. Here is the working one
'use strict';
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = (event, context, callback) => {
/* Get request */
const request = event.Records[0].cf.request;
const requestbody = Buffer.from(request.body.data, 'base64').toString();
const data = JSON.parse(requestbody);
/* Generate body for response */
const body =
'<html>\n'
+ '<head><title>Hello From Lambda#Edge</title></head>\n'
+ '<body>\n'
+ '<h1>You clicked more than 10 Times </h1>\n'
+ '</body>\n'
+ '</html>';
if(data.Value >= 10){
const response = {
status: '200',
body: body,
};
callback(null, response);
}
else {
callback(null, request);
}
};
I had given full dynamodb permissions to the lambda#edge.
Any help is appreciated
Thanks
Where have you specified region for DyanamoDB?
It is possible that Lambda#Edge is executing in a region where your DDB table is missing.
Have a look at AWS doc on region's order of precedence. You can also look at this L#E workshop code and documentation for more details on calling DDB.
On a side note: A viewer facing Lambda function, making a call to a cross region dynamodb table will have negative effects on your latency. Not sure about your use case but see if it is possible to move this call to an origin facing event or make async call to ddb.

Resources