Uploading files to S3 using node.js - node.js

I'm creating a portal where users can select and upload single files from their PC to S3 on AWS.
Below is my server.js code:
app.post('/submit_doc', function(req, res){
var FileName = req.body.fileName,
Filedescription = req.body.filediscrip,
InputFileName = req.body.inputfile;
AWS.config.region = 'eu-west-1';
var fileStream = fs.createReadStream(FileName);
fileStream.on('error', function (err) {
if (err){
console.log("Error reading file: ", err);
res.send(500);
}
else{
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'exampleassetcare.com',
Key: 'reports/'+FileName,
Body: fileStream
}, function (err) {
if (err) {
console.log("Error uploading data: ", err);
res.send(500);
}
});
});
I get the error: No such file or directory.
Can someone please help?

If I'm understanding you correctly, this code you've posted is running on the server. But the inputs are provided by the client, yes? If so, your server would be trying to find a file locally, based on a file path that the client gave you... So the file won't exist...
If I was a malicious user and I told your server to upload a file path /etc/passwd, your server would go and expose the hashed passwords (assuming it was a Linux system, and assuming there were proper permissions, etc... But you get the idea).

change it to
var FileName = req.body.fileName,
Filedescription = req.body.filediscrip,
InputFileName = req.body.inputfile;
AWS.config.region = 'eu-west-1';
console.log(FileName)
var fileStream = fs.createReadStream(FileName);
and check that your file exists, looks like something wrong with path to file.

Related

Return Data After S3 Upload in Node

I finally have file uploads working through Node and the AWS SDK...there's just one thing that's missing now that I haven't been able to crack yet, which is that I need to get the URL to the newly uploaded file on S3 and save it to my database.
const s3 = new AWS.S3();
const fileContent = Buffer.from(req.files.listPDF.data, 'binary');
const params = {
Bucket: 'my_bucket',
Key: filename,
Body: fileContent
};
s3.upload(params, function(err, data){
if (err) {
throw err;
}
});
I'm guessing it's promise-related, but I haven't had success with "await" yet. The data parameter in the function has a "Location" attribute, which I need. Originally, I was trying to set a previously-declared var to it. However, it wasn't doing anything since the upload was not yet completed. If anyone's grappled with this and cracked the code, I'd really appreciate your thoughts!

How do I set my images uploaded to S3 with nodejs script to display instead of download?

I have a node js script that uploads files to AWS S3 through the command line. The problem Im having is when I try to view the file in the browser it automatically downloads it.
I have done some research and most other posts point out the headers, but I have verified the headers are correct (image/png)
Additionally, when I upload the same file through the AWS console (log into AWS), I am able to view the file within the browser.
var fs = require('fs');
var path = require('path');
AWS.config.update({region: myRegion});
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: process.argv[2],
Key: '', // Key set below
Body: '', // Body set below after createReadStream
ContentType: 'image/jpeg',
ACL: 'public-read',
ContentDisposition: 'inline'
};
var file = process.argv[3];
var fileStream = fs.createReadStream(file);
fileStream.on('error', function(err) {
console.log('File Error', err);
});
uploadParams.Body = fileStream;
uploadParams.Key = path.basename(file);
s3.putObject(uploadParams, function(errBucket, dataBucket) {
if (errBucket) {
console.log("Error uploading data: ", errBucket);
} else {
console.log(dataBucket);
}
});
I get successful upload, but unable to view file in browser as it auto downloads.
You have to specify the contentDisposition as part of request headers. You can not specify it as part of request paramenters. Specify it in headers explicitly as below .
var params = {Bucket : "bucketname" , Key : "keyName" , Body : "actualData"};
s3.putObject(params).
on('build',function(req){
req.httpRequest.headers['Content-Type'] = 'application/pdf' ; // Whatever you want
req.httpRequest.headers['ContentDisposition'] = 'inline';
}).
send( function(err,data){
if(err){
console.log(err);
return res.status(400).json({sucess: false});
}else{
console.log(success);
return res.status(200).json({success: true});
}
});
Code to upload obejcts/images to s3
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = "ACESS KEY HERE";
var secretAccessKey = "SECRET KEY HERE";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'bucketname',
Key: 'keyname.png',
Body: "GiveSomeRandomWordOraProperBodyIfYouHave"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
The above code will make sure the object has been uploaded to s3. You cab see it listed in s3 bucket in the browser but you cant view its contents in s3 bucket.
You can not view items within S3. S3 is a storage box. you can only download and upload elements in it. If you need to view the contents you would have to download and view it in the browser or any explorer of your choice. If you simply need to list the objects in s3. Use the below code.
Code to list objects of s3
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'mykey', secretAccessKey: 'mysecret', region: 'myregion'});
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketName',
Delimiter: '/',
Prefix: 's/prefix/objectPath/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
Use S3 list to list the elements of S3. This way you can view them. Create a hyperlink for each of the listed item and make it point to s3 download url. This way you can view in the browser and also download it if you need.
In case if you need to view the contents of it via node JS, use the code below to load the image as if you are loading it from a remote URL.
Code to Download contents:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('httpo://s3/URL' 'name.png', function(){
console.log('done');
});
Code to load image into a buffer :
const request = require('request');
let url = 'http://s3url/image.png';
request({ url, encoding: null }, (err, resp, buffer) => {
// typeof buffer === 'object'
// Use the buffer
// This buffer will now contains the image data
});
Use the above to load the image into a buffer. Once its in buffer, you can manipulate it the way you need. The above code wont downloads the image but it help you to manipuate the image in s3 using a buffer.
Contains Example Code. The link will contain Specific Node JS code examples for uploading and Manipulating objects of s3. use it for reference.

Uploading Files in Node

I'm allowing users to upload files and would like to know if it is possible to only allow the user who uploaded their files access to it.
Currently I am just uploading these to a static folder (Public).
Simply put a route to render the file (this will decrease performance)
Save the file as : filename.extension.userid in a non public folder for example a directory named upload.
Put a route to catch the request on the directory upload :
app.get("/upload/:filename", function(req, res){
var fname = req.params.filename;
var userid = req.user.id; // RETRIEVE AUTHENTICATED USER ID
var fullname = fname+"."+userid;
fs.readFile(__dirname+"/../public/upload/"+fullname, 'base64', function(err, data){
if(err || !data) return res.status(404);
res.set('Content-Disposition', 'attachment; filename="'+fname+'"');
res.set('Content-Type', 'application/pdf'); // example for pdf
return res.end(data, 'base64');
});
});
I couldn't find a reason why you were suggesting using another route to handle the upload. I was able to post to the same route. All that I needed to do was include the path and set it accordingly.
var folder = newPath = path.join(__dirname, '..', './uploads')
var newPath = folder + '/' + callback.fileName;
fs.writeFile(newPath, data, function(err) {
if (err) {
console.log(err)
} else {
//Do stuff
}
});
After the file was uploaded I was then able to complete my task.

NodeJS: Uploading PDF to S3 via Knox; putFile returns 505

I'm trying to upload a PDF to an S3 bucket using the Knox library, but I keep getting 505 errors and the PDFs won't save. My code:
// all of this works well
var knox = require('knox');
var client = knox.createClient(require('../path/to/config.js').knox);
client.putFile('tmp/file', '/prefix/key',
function(err, res) {
if (err) {
console.log("Error PUTing file in S3:", err);
}
console.log("S3 RESPONSE:", res.statusCode); // returns 505
}
);
Anyone have any insight into what I'm doing wrong? I've also tried setting my own headers using client.put(..), but I got the same 505 response.
2 Possible reasons.
1) If this is your complete code, then you forgot to enter the key,secret and bucket.
var client = knox.createClient({
key: '<api-key-here>'
, secret: '<secret-here>'
, bucket: 'learnboost'
});
2) There is a space in the file name that you are trying to upload.
This isn't an answer per se, and I'm still unsure about the 505 response above, but the AWS SDK that Amazon puts out works great if anyone is having similar issues with Knox. The above just becomes:
var aws = require('aws-sdk');
aws.config.loadFromPath('./path/to/config.json');
var s3 = new aws.S3();
var params = { Bucket: 'your-bucket',
Key: 'your-key',
Body: fs.readFileSync('/path/to/file.pdf') };
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error PUTing file:", err);
}
console.log("S3 RESPONSE:", data);
});

Can't upload images in nodejs using aws-sdk

I've tried using aws-sdk and knox and I get status code 301 trying to upload images. I get status code 301 and message - 'The bucket you are attempting to access must be addressed using the specified endpoint. Please send all future requests to this endpoint. This works in php.
AWS.config.loadFromPath(__dirname + '/config/config.json');
fs.readFile(source, function (err, data) {
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'mystuff'}, function() {
var d = {
Bucket: 'mystuff',
Key: 'img/test.jpg',
Body: data,
ACL: 'public-read'
};
s3.client.putObject(d, function(err, res) {
if (err) {
console.log("Error uploading data: ", err);
callback(err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
callback(res);
}
});
});
});
I actually solved this problem. In your config you have to have a region, since my bucket was "US Standard", I left my region blank and it worked.
config.json -
{ "accessKeyId": "secretKey", "secretAccessKey": "secretAccessKey", "region": ""}
go to s3 management console select one of your files and click on proporties - > look at the file link.
US standard
https://s3.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3.amazonaws.com/
us-west-1
https://s3-us-west-1.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3-us-west-1.amazonaws.com/
Did you try .send()?
I can upload to S3 by below code.
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/AWSRequest.html
var s3object = {Bucket: 'mystuff', Key: name, Body : data['data']};
s3.client.putObject(s3object).done(function(resp){
console.log("Successfully uploaded data");
}).fail(function(resp){
console.log(resp);
}).send();
I have the same problem with the new SDK and solved it by setting the endpoint option explicitly.
Reference : http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor_details
Snippet:
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ endpoint :'https://s3-your-region-varies.amazonaws.com' }),
myBucket = 'your-bucket-name';
var params = {Bucket: myBucket, Key: 'myUpload', Body: "Test"};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to "+myBucket+"/testKeyUpload");
}
});
Alternatively, you can solve this by setting the region in your config file and you just have to be precise of your region name.

Resources