My problem
I am writing a simple js function that reads some information from AWS CloudWatch Logs.
Following the answer at Configuring region in Node.js AWS SDK, and the AWS nodejs SDK documentation, I came up with the following:
Code
var AWS = require('aws-sdk');
var cloudwatchlogs = new AWS.CloudWatchLogs();
console.log(AWS.config.region) // Undefined
AWS.config.region = 'eu-central-1' // Define the region with dot notation
console.log(AWS.config.region) . // eu-central-1
AWS.config.update({region:'eu-central-1'}); // Another way to update
console.log(AWS.config.region) . // eu-central-1
var params = {
limit: 0,
// logGroupNamePrefix: 'STRING_VALUE',
// nextToken: 'STRING_VALUE'
};
// This call is failing
cloudwatchlogs.describeLogGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
Output and error
undefined
eu-central-1
eu-central-1
{ ConfigError: Missing region in config
at Request.VALIDATE_REGION (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:91:45)
at Request.callListeners (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at callNextListener (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/sequential_executor.js:95:12)
at /Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:85:9
at finish (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:315:7)
at /Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:333:9
at SharedIniFileCredentials.get (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/credentials.js:126:7)
at getAsyncCredentials (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:327:24)
at Config.getCredentials (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:347:9)
at Request.VALIDATE_CREDENTIALS (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:80:26)
message: 'Missing region in config',
code: 'ConfigError',
time: 2017-07-11T09:57:55.638Z } ...
Environment
The code is running locally under node v8.1.2.
My question
How can I correctly configure the region in the AWS js SDK?
Addendum
Opened an issue on github and got some response.
Or, alternatively, you can specify that when creating your cloudwatch object:
var AWS = require('aws-sdk');
var cloudwatchlogs = new AWS.CloudWatchLogs({region: 'eu-central-1'});
Write code in following way it will work.
var AWS = require('aws-sdk');
// assign AWS credentials here in following way:
AWS.config.update({
accessKeyId: 'asdjsadkskdskskdk',
secretAccessKey: 'sdsadsissdiidicdsi',
region: 'eu-central-1'
});
var cloudwatchlogs = new AWS.CloudWatchLogs({apiVersion: '2014-03-28'});
Use following.
AWS.config.update({region: 'eu-central-1'});
You can find more information in following link.
http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-region.html
Related
I have a NodeJS lambda function that is trying to list IAM users via the IAM listUsers API method . The call is firing off but never returning until the lambda function itself times out and I'm unsure why. Here is the code:
exports.handler = async (event) => {
console.log('before call');
var ret = await listIAMUsers();
console.log('ret: ' + JSON.stringify(ret));
};
async function listIAMUsers() {
var AWS = require('aws-sdk');
AWS.config.update({region: 'REGION'});
var iam = new AWS.IAM({apiVersion: '2010-05-08'});
var params = {MaxItems: 10};
return iam.listUsers(params).promise();
}
Managing IAM Users. Pay close attention where we initialize the client and add require statements.
For the optimization purpose. it is good to load and initialize the. client while loading the function
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'REGION'});
// Create the IAM service object
var iam = new AWS.IAM({apiVersion: '2010-05-08'});
As JS is single-threaded so you reuse the clients.
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
exports.handler = async (event, context) => {
console.log('before call');
var ret = await listIAMUsers();
console.log('ret: ' + JSON.stringify(ret));
};
async function listIAMUsers() {
// Set the region
AWS.config.update({region: 'REGION'});
// Create the IAM service object
var iam = new AWS.IAM({apiVersion: '2010-05-08'});
var params = {MaxItems: 10};
return iam.listUsers(params).promise();
}
START RequestId: 933ca3dd-2feb-4f98-8f8e-87286f59748d Version: $LATEST
2021-02-22T15:57:14.867Z 933ca3dd-2feb-4f98-8f8e-87286f59748d INFO before call
2021-02-22T15:57:15.560Z 933ca3dd-2feb-4f98-8f8e-87286f59748d INFO ret: {"ResponseMetadata":{"RequestId":"bf65a9f5-e4a6-493b-a6a2-c3081d88f7be"},
"Users":[
{"Path":"/","UserName":"username","UserId":"sadsadsadsa",
"Arn":"arn:aws:iam::1234567890:user/username",
"CreateDate":"2020-11-27T13:09:20.000Z","Tags":[]},
{"Path":"/","UserName":"tempuser","UserId":"adsadasdasdsad",
"Arn":"arn:aws:iam::1234567890:user/tempuser",
"CreateDate":"2021-01-24T21:24:47.000Z","Tags":[]}],"IsTruncated":false}
END RequestId: 933ca3dd-2
There are a few possibilities here:
Lambda Role Permission - Try to check if your lambda role is attached with the correct IAM Policy such as arn:aws:iam::aws:policy/IAMReadOnlyAccess
If your Lambda function is placed within a Public Subnet or Private Subnet without any internet access, you would require arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole attached as well as the internet or aws services access (shown here)
I'm trying to read from a S3 Bucket, currently I have the code:
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'myAccesID', secretAccessKey: 'superRandomSecretKey', region: 'us-west-2'});
var s3 = new AWS.S3();
var params = {
Bucket: 'my-bucket',
Delimiter: '/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
But I get Access Denied, I know that my named profile works because I can list my files with the aws cli command:
aws s3 ls s3://my-bucket --recursive --profile my-named-profile
So, how can I initialize my aws instance with a named profile?
The recommended way to access S3 from an instance is through IAM roles for Amazon EC2.
The basic role could just contain AWS Managed policy AmazonS3ReadOnlyAccess:
Having the role attached to your instance, you don't need to do anything special for aws-sdk to use it. The SDK will automatically recognize it. Therefore, you could be:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var params = {
Bucket: 'my-bucket',
Delimiter: '/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
I'm trying to get the ssm parameter inside my nodejs project, is IAM credentials I and wrote a test in my elastic beanstalk instance and works. The problem is inside the project. Any ideas why?
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
var ssm = new AWS.SSM();
var options = {
Name: '/test/test', /* required */
WithDecryption: false
};
var parameterPromise = ssm.getParameter(options).promise();
parameterPromise.then(function(data, err) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
I discovered, the same of this https://github.com/localstack/localstack/issues/1107
need to pass the region in the SSM constructor
var ssm = new AWS.SSM({region: 'us-east-1'});
seems to be a bug
tks
I have a Node 4.3 Lambda function in AWS. I want to be able to write a text file to S3 and have read many tutorials about how to integrate with S3. However, all of them are about how to call Lambda functions after writing to S3.
How can I create a text file in S3 from Lambda using node? Is this possible? Amazons documentation doesn't seem to cover it.
Yes it is absolutely possible!
var AWS = require('aws-sdk');
function putObjectToS3(bucket, key, data){
var s3 = new AWS.S3();
var params = {
Bucket : bucket,
Key : key,
Body : data
}
s3.putObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
Make sure that you give your Lambda function the required write permissions to the target s3 bucket / key path by selecting or updating the IAM Role your lambda executes under.
IAM Statement to add:
{
"Sid": "Stmt1468366974000",
"Effect": "Allow",
"Action": "s3:*",
"Resource": [
"arn:aws:s3:::my-bucket-name-goes-here/optional-path-before-allow/*"
]
}
Further reading:
AWS JavaScript SDK
The specific "Put Object" details
After long long time of silence-failing of 'Task timed out after X' without any good error message, i went back to the beginning, to Amazon default template example, and that worked!
> Lambda > Functions > Create function > Use a blueprints > filter: s3.
Here is my tweaked version of amazon example:
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
async function uploadFileOnS3(fileData, fileName){
const params = {
Bucket: "The-bucket-name-you-want-to-save-the-file-to",
Key: fileName,
Body: JSON.stringify(fileData),
};
try {
const response = await s3.upload(params).promise();
console.log('Response: ', response);
return response;
} catch (err) {
console.log(err);
}
};
IAM Statement for serverless.com - Write to S3 to specific bucket
service: YOURSERVICENAME
provider:
name: aws
runtime: nodejs8.10
stage: dev
region: eu-west-1
timeout: 60
iamRoleStatements:
- Effect: "Allow"
Action:
- s3:PutObject
Resource: "**BUCKETARN**/*"
- Effect: "Deny"
Action:
- s3:DeleteObject
Resource: "arn:aws:s3:::**BUCKETARN**/*"
You can upload file on s3 using
aws-sdk
If you are using IAM user then you have to provide access key and secret key and make sure you have provided necessary permission to IAM user.
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: "ACCESS_KEY",secretAccessKey: 'SECRET_KEY'});
var s3bucket = new AWS.S3({params: {Bucket: 'BUCKET_NAME'}});
function uploadFileOnS3(fileName, fileData){
var params = {
Key: fileName,
Body: fileData,
};
s3bucket.upload(params, function (err, res) {
if(err)
console.log("Error in uploading file on s3 due to "+ err)
else
console.log("File successfully uploaded.")
});
}
Here I temporarily hard-coded AWS access and secret key for testing purposes. For best practices refer to the documentation.
One more option (export file as multipartFormFata):
React > Node.js (AWS Lambda) > S3 Bucket
https://medium.com/#mike_just_mike/aws-lambda-node-js-export-file-to-s3-4b35c400f484
Using node js I'm trying to list the buckets I have in my AWS S3 by following this basic examples.
http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-examples.html
My code looks like this, and is runned from localhost.
var AWS = require("aws-sdk"),
con = require('./../lib/config.js');
var s3 = new AWS.S3({
accessKeyId: con.fig.AWSAccessKeyId,
secretAccessKey: con.fig.AWSSecretKey,
});
s3.listBuckets(function(err, data) {
console.log(data);
});
But data is null.
What have I missed?
Is there some permission to set? I have set the permission AmazonS3FullAccess on the user.
I want to be able to upload files from a website to a S3 bucket.
Try this. The documentation says if err is null then the request was successful.
s3.listBuckets(function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#listBuckets-property
ok load your config using the following
var AWS = require("aws-sdk"),
con = require('./../lib/config.js');
AWS.config.update({
accessKeyId: con.fig.AWSAccessKeyId,
secretAccessKey: con.fig.AWSSecretKey
})
var s3 = new AWS.S3();
s3.listBuckets(function(err,data){
if(err)console.log(err);
else console.log (data)
});