I'm trying to read from a S3 Bucket, currently I have the code:
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'myAccesID', secretAccessKey: 'superRandomSecretKey', region: 'us-west-2'});
var s3 = new AWS.S3();
var params = {
Bucket: 'my-bucket',
Delimiter: '/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
But I get Access Denied, I know that my named profile works because I can list my files with the aws cli command:
aws s3 ls s3://my-bucket --recursive --profile my-named-profile
So, how can I initialize my aws instance with a named profile?
The recommended way to access S3 from an instance is through IAM roles for Amazon EC2.
The basic role could just contain AWS Managed policy AmazonS3ReadOnlyAccess:
Having the role attached to your instance, you don't need to do anything special for aws-sdk to use it. The SDK will automatically recognize it. Therefore, you could be:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var params = {
Bucket: 'my-bucket',
Delimiter: '/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
Related
I am trying to upload file in aws-s3 but it shows me some error like
NodeCode:
const AWS = require('aws-sdk');
const uploadFile = async (file) => {
const s3 = new AWS.S3({
accessKeyId: "<AWSS3_AccessKey>",
secretAccessKey: "<AWSS3_SecretKey>",
region: "ap-south-1"
});
const params = {
Bucket: "test123", // pass your bucket name
Key: file.name, //filename
Body: file.data, //data
};
s3.upload(params, function(s3Err, data) {
if (s3Err) throw s3Err
//console.log(`File uploaded successfully at ${data.Location}`)
});
};
var files = [];
var fileKeys = Object.keys(req.files);
fileKeys.forEach(function(key) {
var file = req.files[key];
files.push(file.name);
uploadFile(file);
});
your aws cli config file in not configured properly the location of config file is
at ~/.aws/config on Linux, macOS, or Unix, or at C:\Users\USERNAME.aws\config on Windows.
you need to setup this file before you use any sdk to call aws services i'm posting a link which will guide you how to setup aws cli in different operating systems
Setup AWS CLI
My problem
I am writing a simple js function that reads some information from AWS CloudWatch Logs.
Following the answer at Configuring region in Node.js AWS SDK, and the AWS nodejs SDK documentation, I came up with the following:
Code
var AWS = require('aws-sdk');
var cloudwatchlogs = new AWS.CloudWatchLogs();
console.log(AWS.config.region) // Undefined
AWS.config.region = 'eu-central-1' // Define the region with dot notation
console.log(AWS.config.region) . // eu-central-1
AWS.config.update({region:'eu-central-1'}); // Another way to update
console.log(AWS.config.region) . // eu-central-1
var params = {
limit: 0,
// logGroupNamePrefix: 'STRING_VALUE',
// nextToken: 'STRING_VALUE'
};
// This call is failing
cloudwatchlogs.describeLogGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
Output and error
undefined
eu-central-1
eu-central-1
{ ConfigError: Missing region in config
at Request.VALIDATE_REGION (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:91:45)
at Request.callListeners (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at callNextListener (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/sequential_executor.js:95:12)
at /Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:85:9
at finish (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:315:7)
at /Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:333:9
at SharedIniFileCredentials.get (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/credentials.js:126:7)
at getAsyncCredentials (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:327:24)
at Config.getCredentials (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/config.js:347:9)
at Request.VALIDATE_CREDENTIALS (/Users/adam/binaris/adam-test-sls/node_modules/aws-sdk/lib/event_listeners.js:80:26)
message: 'Missing region in config',
code: 'ConfigError',
time: 2017-07-11T09:57:55.638Z } ...
Environment
The code is running locally under node v8.1.2.
My question
How can I correctly configure the region in the AWS js SDK?
Addendum
Opened an issue on github and got some response.
Or, alternatively, you can specify that when creating your cloudwatch object:
var AWS = require('aws-sdk');
var cloudwatchlogs = new AWS.CloudWatchLogs({region: 'eu-central-1'});
Write code in following way it will work.
var AWS = require('aws-sdk');
// assign AWS credentials here in following way:
AWS.config.update({
accessKeyId: 'asdjsadkskdskskdk',
secretAccessKey: 'sdsadsissdiidicdsi',
region: 'eu-central-1'
});
var cloudwatchlogs = new AWS.CloudWatchLogs({apiVersion: '2014-03-28'});
Use following.
AWS.config.update({region: 'eu-central-1'});
You can find more information in following link.
http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-region.html
I have a Node 4.3 Lambda function in AWS. I want to be able to write a text file to S3 and have read many tutorials about how to integrate with S3. However, all of them are about how to call Lambda functions after writing to S3.
How can I create a text file in S3 from Lambda using node? Is this possible? Amazons documentation doesn't seem to cover it.
Yes it is absolutely possible!
var AWS = require('aws-sdk');
function putObjectToS3(bucket, key, data){
var s3 = new AWS.S3();
var params = {
Bucket : bucket,
Key : key,
Body : data
}
s3.putObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
Make sure that you give your Lambda function the required write permissions to the target s3 bucket / key path by selecting or updating the IAM Role your lambda executes under.
IAM Statement to add:
{
"Sid": "Stmt1468366974000",
"Effect": "Allow",
"Action": "s3:*",
"Resource": [
"arn:aws:s3:::my-bucket-name-goes-here/optional-path-before-allow/*"
]
}
Further reading:
AWS JavaScript SDK
The specific "Put Object" details
After long long time of silence-failing of 'Task timed out after X' without any good error message, i went back to the beginning, to Amazon default template example, and that worked!
> Lambda > Functions > Create function > Use a blueprints > filter: s3.
Here is my tweaked version of amazon example:
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
async function uploadFileOnS3(fileData, fileName){
const params = {
Bucket: "The-bucket-name-you-want-to-save-the-file-to",
Key: fileName,
Body: JSON.stringify(fileData),
};
try {
const response = await s3.upload(params).promise();
console.log('Response: ', response);
return response;
} catch (err) {
console.log(err);
}
};
IAM Statement for serverless.com - Write to S3 to specific bucket
service: YOURSERVICENAME
provider:
name: aws
runtime: nodejs8.10
stage: dev
region: eu-west-1
timeout: 60
iamRoleStatements:
- Effect: "Allow"
Action:
- s3:PutObject
Resource: "**BUCKETARN**/*"
- Effect: "Deny"
Action:
- s3:DeleteObject
Resource: "arn:aws:s3:::**BUCKETARN**/*"
You can upload file on s3 using
aws-sdk
If you are using IAM user then you have to provide access key and secret key and make sure you have provided necessary permission to IAM user.
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: "ACCESS_KEY",secretAccessKey: 'SECRET_KEY'});
var s3bucket = new AWS.S3({params: {Bucket: 'BUCKET_NAME'}});
function uploadFileOnS3(fileName, fileData){
var params = {
Key: fileName,
Body: fileData,
};
s3bucket.upload(params, function (err, res) {
if(err)
console.log("Error in uploading file on s3 due to "+ err)
else
console.log("File successfully uploaded.")
});
}
Here I temporarily hard-coded AWS access and secret key for testing purposes. For best practices refer to the documentation.
One more option (export file as multipartFormFata):
React > Node.js (AWS Lambda) > S3 Bucket
https://medium.com/#mike_just_mike/aws-lambda-node-js-export-file-to-s3-4b35c400f484
Using node js I'm trying to list the buckets I have in my AWS S3 by following this basic examples.
http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-examples.html
My code looks like this, and is runned from localhost.
var AWS = require("aws-sdk"),
con = require('./../lib/config.js');
var s3 = new AWS.S3({
accessKeyId: con.fig.AWSAccessKeyId,
secretAccessKey: con.fig.AWSSecretKey,
});
s3.listBuckets(function(err, data) {
console.log(data);
});
But data is null.
What have I missed?
Is there some permission to set? I have set the permission AmazonS3FullAccess on the user.
I want to be able to upload files from a website to a S3 bucket.
Try this. The documentation says if err is null then the request was successful.
s3.listBuckets(function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#listBuckets-property
ok load your config using the following
var AWS = require("aws-sdk"),
con = require('./../lib/config.js');
AWS.config.update({
accessKeyId: con.fig.AWSAccessKeyId,
secretAccessKey: con.fig.AWSSecretKey
})
var s3 = new AWS.S3();
s3.listBuckets(function(err,data){
if(err)console.log(err);
else console.log (data)
});
I'm using AWS SDK for Node.js to create a folder or key on s3. I searched on google, but I got nothing. Does anybody know how can I create a folder under my bucket with AWS SDK for Node.js?
and how can you check if this folder exists in your bucket already?
if you use console.aws.amazon.com, you can create a folder in your bucket easily. it seems I didn't figure it out how to create it with AWS SDK for Node.js?
S3 is not your typical file system. It's an object store. It has buckets and objects. Buckets are used to store objects, and objects comprise data (basically a file) and metadata (information about the file). When compared to a traditional file system, it's more natural to think of an S3 bucket as a drive rather than as a folder.
You don't need to pre-create a folder structure in an S3 bucket. You can simply put an object with the key cars/ford/focus.png even if cars/ford/ does not exist.
It's valuable to understand what happens at the API level in this case:
the putObject call will create an object at cars/ford/focus.png but it will not create anything representing the intermediate folder structure of cars/ or cars/ford/.
the actual folder structure does not exist, but is implied through delimiter=/ when you call listObjects, returning folders in CommonPrefixes and files in Contents.
you will not be able to test for the ford sub-folder using headObject because cars/ford/ does not actually exist (it is not an object). Instead you have 2 options to see if it (logically) exists:
call listObjects with prefix=cars/ford/ and find it in Contents
call listObjects with prefix=cars/, delimiter=/ and find it in CommonPrefixes
It is possible to create an S3 object that represents a folder, if you really want to. The AWS S3 console does this, for example. To create myfolder in a bucket named mybucket, you can issue a putObject call with bucket=mybucket, key=myfolder/, and size 0. Note the trailing forward slash.
Here's an example of creating a folder-like object using the awscli:
aws s3api put-object --bucket mybucket --key cars/ --content-length 0
In this case:
the folder is actually a zero-sized object whose key ends in /. Note that if you leave off the trailing / then you will get a zero-sized object that appears to be a file rather than a folder.
you are now able to test for the presence of cars/ in mybucket by issuing a headObject call with bucket=mybucket and key=cars/.
Finally, note that your folder delimiter can be anything you like, for example +, because it is simply part of the key and is not actually a folder separator (there are no folders). You can vary your folder delimiter from listObjects call to call if you like.
The code from #user2837831 doesn't seem to work anymore, probably with the new version of javascript sdk. So I am adding here the version of code that I am using to create a folder inside a bucket using node.js. This works with the 2.1.31 sdk. What is important is the '/' at the end of the Key value in params - using that it thinks you are trying to create a folder and not a file.
var AWS = require('aws-sdk');
AWS.config.region = 'us-east-1';
var s3Client = new AWS.S3();
var params = { Bucket: 'your_bucket_goes_here', Key: 'folderInBucket/', ACL: 'public-read', Body:'body does not matter' };
s3Client.upload(params, function (err, data) {
if (err) {
console.log("Error creating the folder: ", err);
} else {
console.log("Successfully created a folder on S3");
}
});
This is really straightforward you can do it by using the following, just remember the trailing slash.
var AWS = require("aws-sdk");
var s3 = new AWS.S3();
var params = {
Bucket: "mybucket",
Key: "mykey/"
};
s3.putObject(params).promise();
I find that we do not need an explicit directory creation call anymore.
Just the following works for me and automatically creates a directory hierarchy as I need.
var userFolder = 'your_bucket_name' + '/' + variable-with-dir-1-name + '/' + variable-with-dir-2-name;
// IMPORTANT : No trailing '/' at the end of the last directory name
AWS.config.region = 'us-east-1';
AWS.config.update({
accessKeyId: 'YOUR_KEY_HERE',
secretAccessKey: 'your_secret_access_key_here'
});
var bucket = new AWS.S3({
params: {
Bucket: userFolder
}
});
var contentToPost = {
Key: <<your_filename_here>>,
Body: <<your_file_here>>,
ContentEncoding: 'base64',
ContentType: <<your_file_content_type>>,
ServerSideEncryption: 'AES256'
};
bucket.putObject(contentToPost, function (error, data) {
if (error) {
console.log("Error in posting Content [" + error + "]");
return false;
} /* end if error */
else {
console.log("Successfully posted Content");
} /* end else error */
})
.on('httpUploadProgress',function (progress) {
// Log Progress Information
console.log(Math.round(progress.loaded / progress.total * 100) + '% done');
});
In console, the link generated first would be the bucket created path and second would be the folder structure.
var AWS = require("aws-sdk");
var path = require('path')
// Set the region
AWS.config.update({
region: "us-east-2",
accessKeyId: "your aws acces id ",
secretAccessKey: "your secret access key"
});
s3 = new AWS.S3();
var bucketParams = {
Bucket: "imageurrllll",
ACL: "public-read"
};
s3.createBucket(bucketParams, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.Location);
var folder_name = 'root_folder'
//this is for local folder data path
var filePath = "./public/stylesheets/user.png"
//var child_folder='child'
var date = Date.now()
var imgData = `${folder_name}_${date}/` +
path.basename(filePath);
var params = {
Bucket: 'imageurrllll',
Body: '', //here you can give image data url from your local directory
Key: imgData,
ACL: 'public-read'
};
//in this section we are creating the folder structre
s3.upload(params, async function(err, aws_uploaded_url) {
//handle error
if (err) {
console.log("Error", err);
}
//success
else {
console.log("Data Uploaded in:", aws_uploaded_url.Location)
}
})
}
});