Im trying to get a list of all the API usage plans on my account, running the cli command returns my desired result, however I cant get the JS SDK version to work in Lambda. What's going wrong? I see in he sdk its paginated but it doesn't return data after I include that kind of info either.
CLI:
aws apigateway get-usage-plans
Output:
{
"items": [
{
"id": "3hhulv",
"name": "testplan",
"apiStages": [
{
"apiId": "dp6ounv3jd",
"stage": "default"
}
],
"throttle": {
"burstLimit": 10,
"rateLimit": 10.0
},
"quota": {
"limit": 10000,
"offset": 0,
"period": "MONTH"
}
}
]
}
In node:
const AWS = require('aws-sdk');
exports.handler = async (event) => {
var apigateway = new AWS.APIGateway();
var params = {};
var usageplans = apigateway.getUsagePlans(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
const response = {
statusCode: 200,
things : usageplans.data
};
return response;
};
output:
{
"statusCode": 200
}
I resolved the issue by removing the callback function and adding a .promise() onto it.
const AWS = require('aws-sdk');
exports.handler = async (event) => {
var apigateway = new AWS.APIGateway();
var params = {};
var usageplans = await apigateway.getUsagePlans(params).promise();
const response = {
things : usageplans.position,
statusCode: 200
};
return response;
};
Related
How to configure AWS S3 bucket policies so that uploaded files are readable as public files.
Could somebody please give an example for node js code?
I am a novice to the field so I do not have prior experience regarding this. Any help is highly appreciated.
There are several steps in this process.This instructions are applicable for nodejs 14 (runtime: nodejs14.x)
First you have to follow the below link format after logging in to your AWS account
You should follow the below format.
https://s3.console.aws.amazon.com/s3/buckets/{BUCKET-NAME}?region={REGION}&tab=permissions#
This is an example link
https://s3.console.aws.amazon.com/s3/buckets/logo?region=us-east-1&tab=permissions#
image: This is an example of entering the path
Second step is creating bucket policies. Replace the word "BUCKET-NAME" with the name of your bucket.
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AllowPublicRead",
"Effect": "Allow",
"Principal": "*",
"Action": "s3:PutObject",
"Resource": "arn:aws:s3:::BUCKET-NAME/*",
"Condition": {
"StringEquals": {
"s3:x-amz-acl": "public-read"
}
}
}
]
}
image: Bucket policies JSON configuration
Then create an endpoint on serverless.yml following the below code. file_upload function is included in handler.js file.
serverless.yml
file_upload:
handler: handler.file_upload
timeout: 15
events:
- httpApi:
path: /file-upload
method: post
After that you have to program handler.js file as below
handler.js
'use strict';
const AWS = require("aws-sdk");
const s3 = new AWS.S3()
const { Validator } = require('node-input-validator');
const MAX_SIZE = 2097152 // 2MB
const bucket = 'S3_BUCKET-NAME' // Name of your bucket.
const Busboy = require("busboy")
s3.config.update({
region: "us-east-1",
accessKeyId: 'S3_ACCESS_KEY_ID',
secretAccessKey: 'S3_SECRET_ACCESS_KEY'
});
const sendJSON = (code, message, data) => {
let resData = {
"status": code < 400 ? 'Success' : 'Error',
"message": message,
}
data ? (resData["data"] = data) : null;
return {
statusCode: code,
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
...resData
})
};
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: MAX_SIZE, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
} else {
reject(new Error('File not found.'));
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
data['FileName'] = info.filename;
}
});
});
bb.on('field', (name, val, info) => {
data[name] = val;
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};
const uploadToS3 = (bucket, key, buffer, mimeType) =>
new Promise((resolve, reject) => {
s3.upload(
{ Bucket: bucket, Key: key, Body: buffer, ContentType: mimeType, ACL: 'public-read' },
function (err, data) {
if (err) reject(err);
resolve(data)
})
});
module.exports.file_upload = async (event) => {
try {
const data = await FORM.parse(event['body'], event['headers']);
const validations = new Validator(data, {
File: 'required'
});
const.path = data.path? data.path : null;
const matched = await validations.check();
if (!matched) {
return sendJSON(400, validations.errors);
}
const date = Math.floor(Date.now() / 1000);
const list = data.FileName.split(".");
const originalKey = `${PATH}/${Date.now()}_${md5(list[0])}.${list[list.length-1]}`; // "PATH" is your sub-folder path in S3.
const originalFile = await Promise.all([
uploadToS3(bucket, originalKey, data.File, data.ContentType)
]);
const file_name = originalFile[0]['Key'];
return sendJSON(201, 'Successfully saved.', originalFile);
} catch (e) {
return sendJSON(400, e.message);
}
};
A link for AWS documentation is attached below
https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-bucket-policies.html
You can check out this official documentation of AWS s3 for NodeJS that explains how to update bucket policies
I am pretty new to node and facing a problem while trying to do a simple test on AWS lambda-dynamo DB integration in order to get a response for Amazon Lex request. If someone can say what needs to be changed that would be much appreciated. thanks..
Runtime - Node js 10.x and also tried on node js 8.10..
Below is the node JS sample code :
const AWS = require('aws-sdk');
var DBHandler = require("./DBHandler")
exports.handler = async (event) => {
console.log('This event is' +JSON.stringify(event))
var intent = event.currentIntent.name;
DBHandler.getalldetails(intent , function (err , data ) {
if (err) {
context.fail(err);
} else {
var response = {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": "data.Item.Message."
}
}
}
return response
//callback ( null, response );
}
});
};
Below is the ./DBHandler in another file under the same lamdba function folder.
const AWS = require('aws-sdk');
AWS.config.update({
region:"eu-west"
});
var docClient = new AWS.DynamoDB.DocumentClient();
var tableName = "testholly";
//exports.handler = (event,context,callback) => {
var getalldetails = (Intent,callback) => {
var params = {
TableName : tableName,
Key: {
"Intent":Intent
}
};
docClient.get(params,function (err,data) {
callback (err , data);
});
};module.exports = {
getalldetails
};
First check the Dynamo DB access permissions to that lambada. If at all not given, create a role to access the dynamoDB table and assign it to the lambda function.
If you want to access the dynamodb without role then use cognito pool ID or AWS access key Secret access key in AWS.config();
Sample:
AWS.config.update({
accessKeyId: "",
secretAccessKey: "",
region: "us-east-1"
});
OR
AWS.config.update({
"region":"us-east-1"
});
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId:"Your identity_pool_id"
});
your response is not within your callback from getAllDetails()....it should be. So something like:
exports.handler = async (event, context, callback) => {
console.log('This event is' + JSON.stringify(event))
var intent = event.currentIntent.name;
DBHandler.getalldetails(intent, function (err, data) {
if (err) {
context.fail(err);
} else {
var response = {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": data.Item.Message
}
}
}
callback(null, response)
}
});
};
You cannot use await w/ callbacks, you would need to "promisify" that bad boy. In the above, I pass the callback to the handler.
I'm creating an invalidation to CloudFront via API Gateway call to Lambda, but the invalidation only gets processed about 1 in 5 attempts. I'm certain I am missing something that would properly wait for the createInvalidation method to be called.
'use strict';
var AWS = require('aws-sdk');
var cloudfront = new AWS.CloudFront();
var distID = process.env.DISTRIBUTION_ID;
exports.handler = async(event) => {
var path = (event.queryStringParameters.path);
console.log(event.queryStringParameters.path);
var params = {
DistributionId: distID,
InvalidationBatch: {
CallerReference: (event.requestContext.requestId),
Paths: {
Quantity: 1,
Items: [
path
]
}
}
};
await cloudfront.createInvalidation(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
return data;
});
};
The Lambda function completes every time and is receiving and parsing the API Gateway call just fine to populate the variables, but the createInvalidation only happens about 1 in 5 attempts, though no errors result from the other 4 attempts.
Try the following snippet:
// import * as AWS from 'aws-sdk';
exports.handler = async(event) =>
{
const cloudfront = new AWS.CloudFront();
const distID = process.env.DISTRIBUTION_ID;
const path = (event.queryStringParameters.path);
console.log(event.queryStringParameters.path);
const params =
{
DistributionId: distID,
InvalidationBatch:
{
CallerReference: (event.requestContext.requestId),
Paths: { Quantity: 1, Items: [ path ] }
}
};
try
{
const resp = await cloudfront.createInvalidation(params).promise();
console.log(resp);
}
catch (ex)
{
console.error(ex);
}
};
i'm working on project where i need to write a lambda function which provides AWS API to handles GraphQL query and send the payload to AWS SQS everything is working fine but when i check my AWS SQS queue it shows 2 messages every single time instead of 1 and cloud watch also shows function trigger only once. below i'm sharing my code with you any help would be very much appreciated.
index.js
const { graphql } = require("graphql");
const { schema } = require("./graphql/schema");
exports.handler = async (event) => {
// getting query from lambda event
const query = event.query;
// getting query variables from lambda event
const variables = event.variables;
return await graphql(schema, query, null, null, variables);
};
sqs.js
const AWS = require("aws-sdk");
AWS.config.update({ region: "us-east-1"});
// Create an SQS service object
const sqs = new AWS.SQS({apiVersion: '2012-11-05', "accessKeyId": process.env.ACCESS_KEY_ID, "secretAccessKey": process.env.SECRET_ACCESS_KEY});
const QueueUrl = process.env.SQS_QUEUE_URL;
const sendPayloadToSQS = message => {
const params = {
MessageBody: JSON.stringify(message),
QueueUrl
};
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
};
module.exports = sendPayloadToSQS;
graphql mutation file
const { GraphQLNonNull } = require("graphql");
const { mutationWithClientMutationId } = require("../../common");
const { JobRequestEventResponse } = require("../jobRequestEventResponse");
const { JobRequestInput, JobEventMetadataInput } = require("../jobSchema");
const sendPayloadToSQS = require("../../../sqs");
const { newId } = require("../../../newId");
const JobRequestEvent = mutationWithClientMutationId({
name: "JobRequestEvent",
inputFields: {
eventMetadataInput: {
type: new GraphQLNonNull(JobEventMetadataInput),
},
eventInput: {
type: new GraphQLNonNull(JobRequestInput),
},
},
outputFields: {
JobRequestEventResponse: {
type: JobRequestEventResponse,
},
},
mutateAndGetPayload: async (params) => {
const new_id = newId();
if(params.eventInput.jobId === null || params.eventInput.jobId === undefined) {
params.eventInput.jobId = new_id;
}
const payload = {
_id: new_id,
transactionId: new_id,
name: params.eventMetadataInput.name,
userRole: params.eventMetadataInput.userRole,
date: params.eventMetadataInput.date,
languageCode: params.eventMetadataInput.languageCode,
eventInput: params.eventInput,
};
//send payload to sqs
await sendPayloadToSQS(payload);
return {
JobRequestEventResponse: {
id: payload._id,
transactionId: payload.transactionId,
status: "Success",
},
};
},
});
module.exports = {
JobRequestEvent,
};
I read the documentation again and found the callback is the root cause of my problem: if I provide a callback to sendMessage it triggers my function and after when I write promise() again it triggers my function so I remove the callback only as you can see below.
Refer : AWS Official documentation
Instead of this:
await sqs.sendMessage(params, function(err, data) {
if (err) {
console.log("Message sending failed : ", err);
} else {
console.log("Message queued to SQS successfully : ", data.MessageId);
}
}).promise();
I write this:
const request = sqs.sendMessage(params);
const result = await request.promise();
if(result) {
console.log("Message queued to SQS successfully : ", result.MessageId);
} else {
console.log("Message queued failed");
}
Folks, New to Javascript... trying to do simple dynamo queries from node:
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
var db = new AWS.DynamoDB();
var params = {
"TableName" : 'admins',
"Key" : [
{ "username" : { "S" : "foo" } },
],
}
db.getItem(params, function(err, data) {
console.log('error: '+ err);
console.log(data);
return next();
res.send(data);
});
}
Output:
error: UnexpectedParameter: Unexpected key 'username' found in params.Key['0']
Thanks! Any help would be greatly appreciated!
Must follow the SDK and Docs, its simple:
http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_GetItem.html
var params = {
AttributesToGet: [
"password"
],
TableName : 'foo',
Key : {
"username" : {
"S" : "bar"
}
}
}
db.getItem(params, function(err, data) {
if (err) {
console.log(err); // an error occurred
}
else {
console.log(data); // successful response
res.send(data);
}
return next();
});
I was trying to do it as it was suggested in the documentation, but also got errors.
At the end the following worked:
var aws = require('aws-sdk');
var db = new aws.DynamoDB({
region: 'eu-central-1',
maxRetries: 1
});
exports.handler = event => {
return queryMyThings();
}
const queryMyThings = async (event) => {
var params = {
Key: {
"ColumnByWhichYouSearch": {
S: "ValueThatYouAreQueriing"
}
},
TableName: "YourTableName"
};
return await db.getItem(params).promise();
}
Here are great resources for DynamoDB using NodeJS:
Getting Started guide
Documentaion and examples
If you're using the DocumentClient