Can't upload images in nodejs using aws-sdk - node.js

I've tried using aws-sdk and knox and I get status code 301 trying to upload images. I get status code 301 and message - 'The bucket you are attempting to access must be addressed using the specified endpoint. Please send all future requests to this endpoint. This works in php.
AWS.config.loadFromPath(__dirname + '/config/config.json');
fs.readFile(source, function (err, data) {
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'mystuff'}, function() {
var d = {
Bucket: 'mystuff',
Key: 'img/test.jpg',
Body: data,
ACL: 'public-read'
};
s3.client.putObject(d, function(err, res) {
if (err) {
console.log("Error uploading data: ", err);
callback(err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
callback(res);
}
});
});
});

I actually solved this problem. In your config you have to have a region, since my bucket was "US Standard", I left my region blank and it worked.
config.json -
{ "accessKeyId": "secretKey", "secretAccessKey": "secretAccessKey", "region": ""}
go to s3 management console select one of your files and click on proporties - > look at the file link.
US standard
https://s3.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3.amazonaws.com/
us-west-1
https://s3-us-west-1.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3-us-west-1.amazonaws.com/

Did you try .send()?
I can upload to S3 by below code.
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/AWSRequest.html
var s3object = {Bucket: 'mystuff', Key: name, Body : data['data']};
s3.client.putObject(s3object).done(function(resp){
console.log("Successfully uploaded data");
}).fail(function(resp){
console.log(resp);
}).send();

I have the same problem with the new SDK and solved it by setting the endpoint option explicitly.
Reference : http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor_details
Snippet:
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ endpoint :'https://s3-your-region-varies.amazonaws.com' }),
myBucket = 'your-bucket-name';
var params = {Bucket: myBucket, Key: 'myUpload', Body: "Test"};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to "+myBucket+"/testKeyUpload");
}
});
Alternatively, you can solve this by setting the region in your config file and you just have to be precise of your region name.

Related

How do I set my images uploaded to S3 with nodejs script to display instead of download?

I have a node js script that uploads files to AWS S3 through the command line. The problem Im having is when I try to view the file in the browser it automatically downloads it.
I have done some research and most other posts point out the headers, but I have verified the headers are correct (image/png)
Additionally, when I upload the same file through the AWS console (log into AWS), I am able to view the file within the browser.
var fs = require('fs');
var path = require('path');
AWS.config.update({region: myRegion});
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: process.argv[2],
Key: '', // Key set below
Body: '', // Body set below after createReadStream
ContentType: 'image/jpeg',
ACL: 'public-read',
ContentDisposition: 'inline'
};
var file = process.argv[3];
var fileStream = fs.createReadStream(file);
fileStream.on('error', function(err) {
console.log('File Error', err);
});
uploadParams.Body = fileStream;
uploadParams.Key = path.basename(file);
s3.putObject(uploadParams, function(errBucket, dataBucket) {
if (errBucket) {
console.log("Error uploading data: ", errBucket);
} else {
console.log(dataBucket);
}
});
I get successful upload, but unable to view file in browser as it auto downloads.
You have to specify the contentDisposition as part of request headers. You can not specify it as part of request paramenters. Specify it in headers explicitly as below .
var params = {Bucket : "bucketname" , Key : "keyName" , Body : "actualData"};
s3.putObject(params).
on('build',function(req){
req.httpRequest.headers['Content-Type'] = 'application/pdf' ; // Whatever you want
req.httpRequest.headers['ContentDisposition'] = 'inline';
}).
send( function(err,data){
if(err){
console.log(err);
return res.status(400).json({sucess: false});
}else{
console.log(success);
return res.status(200).json({success: true});
}
});
Code to upload obejcts/images to s3
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = "ACESS KEY HERE";
var secretAccessKey = "SECRET KEY HERE";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'bucketname',
Key: 'keyname.png',
Body: "GiveSomeRandomWordOraProperBodyIfYouHave"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
The above code will make sure the object has been uploaded to s3. You cab see it listed in s3 bucket in the browser but you cant view its contents in s3 bucket.
You can not view items within S3. S3 is a storage box. you can only download and upload elements in it. If you need to view the contents you would have to download and view it in the browser or any explorer of your choice. If you simply need to list the objects in s3. Use the below code.
Code to list objects of s3
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'mykey', secretAccessKey: 'mysecret', region: 'myregion'});
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketName',
Delimiter: '/',
Prefix: 's/prefix/objectPath/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
Use S3 list to list the elements of S3. This way you can view them. Create a hyperlink for each of the listed item and make it point to s3 download url. This way you can view in the browser and also download it if you need.
In case if you need to view the contents of it via node JS, use the code below to load the image as if you are loading it from a remote URL.
Code to Download contents:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('httpo://s3/URL' 'name.png', function(){
console.log('done');
});
Code to load image into a buffer :
const request = require('request');
let url = 'http://s3url/image.png';
request({ url, encoding: null }, (err, resp, buffer) => {
// typeof buffer === 'object'
// Use the buffer
// This buffer will now contains the image data
});
Use the above to load the image into a buffer. Once its in buffer, you can manipulate it the way you need. The above code wont downloads the image but it help you to manipuate the image in s3 using a buffer.
Contains Example Code. The link will contain Specific Node JS code examples for uploading and Manipulating objects of s3. use it for reference.

Combine Jimp-module with Amazon AWS S3 in NodeJS

The following I want to achieve:
Upload a picture to my backoffice
Resize this picture to reduce image-size.
Upload this picture to Amazon AWS S3.
I couldn't figure out how to directly store the picture to Amazon AWS S3 and therefore I upload it first to my backoffice.
My code:
router.post('/fileupload', function(req, res){
// Prepare data
var file = req.files.upfile;
var uploadpath = 'profilepicture/' + req.session.user + '.jpg';
var AWS = require('aws-sdk');
var fs = require('fs');
AWS.config.update({
accessKeyId: **,
secretAccessKey: **
});
// Upload file
file.mv(uploadpath,function(err){
if (err) throw err;
// Read in the file, convert it to base64, store to S3
Jimp.read(uploadpath, function (err, data) {
if (err) throw err;
// Reduce size
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath);
var s3 = new AWS.S3();
var stream = fs.createReadStream(uploadpath);
s3.putObject({
Bucket: bucketAmazon,
Key: req.session.user + '.jpg',
ContentType: 'image/jpg',
Body: stream,
ContentEncoding: 'base64',
ACL: 'public-read',
Metadata: {
'Content-Type': 'image/jpeg'
}
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded package.');
return res.render('settings', {
user: req.session.user,
logged: true,
wrongPicture: false
});
});
});
});
});
However, when I run this code: the file is uploaded to my backoffice and cropped correctly but in Amazon AWS S3 it shows that the size is '0 B'.
If I remove the line data.resize(400, Jimp.AUTO).quality(100).write(uploadpath), then the file is uploaded correctly to Amazon AWS S3, but ofcourse the picture is not reduced.
You can use the write callback to guarantee that the file is written before the upload starts.
...
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath, () => {
// upload to s3 code here
});

Upload a file to AWS S3 using node js and Postman

I am new to AWS,and trying to figure out how to upload a file using the AWS S3 API (http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html), which is incorporated in my own api.
I can create a bucket, and get a list of all the buckets- however I am struggling with a file upload.
This is by code:
router.post('/upload', function(req, res, next) {
var params = {
Bucket: req.body.bucketName,
Key: req.body.key,
Body: req.body.body
}
s3.putObject(params, function(err, data) {
if (err) {
return next(err)
} else {
res.json(data)
}
})
})
So when I run my server, I try to make a post request using postman to localhost:8080/upload with the following: attaching a file, and the key and body - but I think I do this part wrong.
And I also attach the file:
Question is:
Do I correctly understand the following- Bucket = the bucket name I want to upload to, Key = the file name, Body = the file contents?
If yes, how do I get this to upload to S3 bucket, as with the current code I get a file added to s3 called 'text.txt' with the contents 'heello' rather than my 'test.txt' file.
are you trying to upload a file correct? So you should use a multipart/form-data content-type, and in body you can point to your file buffer.
In my case, I use with swagger:
upload: (req, res) => {
const params = {
Bucket: 'bucket-name',
Key: req.swagger.params.file.value.originalname,
ACL: 'public-read',
Body: req.swagger.params.file.value.buffer
};
s3.putObject(params, function(err, data) {
if (err) {
console.log('Error uploading image: ', err);
res.status(500).json().end();
} else {
res.status(200).json('File is uploaded').end();
}
})
}

NodeJS: Uploading PDF to S3 via Knox; putFile returns 505

I'm trying to upload a PDF to an S3 bucket using the Knox library, but I keep getting 505 errors and the PDFs won't save. My code:
// all of this works well
var knox = require('knox');
var client = knox.createClient(require('../path/to/config.js').knox);
client.putFile('tmp/file', '/prefix/key',
function(err, res) {
if (err) {
console.log("Error PUTing file in S3:", err);
}
console.log("S3 RESPONSE:", res.statusCode); // returns 505
}
);
Anyone have any insight into what I'm doing wrong? I've also tried setting my own headers using client.put(..), but I got the same 505 response.
2 Possible reasons.
1) If this is your complete code, then you forgot to enter the key,secret and bucket.
var client = knox.createClient({
key: '<api-key-here>'
, secret: '<secret-here>'
, bucket: 'learnboost'
});
2) There is a space in the file name that you are trying to upload.
This isn't an answer per se, and I'm still unsure about the 505 response above, but the AWS SDK that Amazon puts out works great if anyone is having similar issues with Knox. The above just becomes:
var aws = require('aws-sdk');
aws.config.loadFromPath('./path/to/config.json');
var s3 = new aws.S3();
var params = { Bucket: 'your-bucket',
Key: 'your-key',
Body: fs.readFileSync('/path/to/file.pdf') };
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error PUTing file:", err);
}
console.log("S3 RESPONSE:", data);
});

AWS node.js not creating S3 bucket?

I'm trying to use the basic tutorial to create an S3 bucket as follows
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./myawsconfig.json');
AWS.config.update({region: 'eu-west-1'});
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'pBucket'}, function() {
var data = {Bucket: 'pBucket', Key: 'myKey', Body: 'Hello!'};
s3.client.putObject(data, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
But I'm receiving the following error
node createbucket.js
Error uploading data: { [NoSuchBucket: The specified bucket does not exist]
message: 'The specified bucket does not exist',
code: 'NoSuchBucket',
name: 'NoSuchBucket',
statusCode: 404,
retryable: false }
I just ran into this problem, apparently the Node.js tutorial code doesn't compile. I got a var Object doesn't have createBucket method.
This worked:
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./credentials.json');
// Set your region for future requests.
AWS.config.update({region: 'us-east-1'});
// Create a bucket and put something in it.
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'hackathon-test'}, function() {
var data = {Bucket: 'hackathon-test', Key: 'myKey', Body: 'Hello!'};
s3.client.putObject(data, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
I had this issue, discovering that my api-user didn't have permission to create the bucket.
Slightly more thorough error checking revealed this...
s3.client.createBucket({Bucket: 'someBucket'}, function(err) {
if (err) {
console.log("Error creating bucket: ", err);
} else {
console.log("Successfully created bucket 'someBucket'");
}
// ...
According to aws S3 bucket name restrictions.
your bucket name shouldn't contain any uppercase letter. so 'pBucket' is invalid.
http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
The rules for DNS-compliant bucket names are:
Bucket names must be at least 3 and no more than 63 characters long.
Bucket names must be a series of one or more labels. Adjacent labels are separated by a single period (.). Bucket names can contain
lowercase letters, numbers, and hyphens. Each label must start and end
with a lowercase letter or a number.
Bucket names must not be
formatted as an IP address (e.g., 192.168.5.4).
When using virtual
hosted–style buckets with SSL, the SSL wild card certificate only
matches buckets that do not contain periods. To work around this, use
HTTP or write your own certificate verification logic.
A couple of pointers that I missed and someone may find useful
if you set the region as part of the S3 object var s3 = new AWS.S3({region: 'us-west-1'}); then the call will fail (in my experience).
You can therefore set the region via either
a) AWS.config.update({ region: 'eu-west-1' });
b) as part of the params on createBucket
s3.createBucket({
Bucket: bucketName,
CreateBucketConfiguration: {
LocationConstraint: "eu-west-1"
}
}, function () {
also, watch out for caps or underscores in the bucket name as that took an hour of my life too (DNS compliant only).

Resources