How to update file permission uploaded on bucket of AWS S3? - node.js

I uploading videos using Multipart Upload to Amazon-S3 using the node.js. I am using this reference
The uploaded video by default is only accessible to owner.
When uploading file using below code
var params_to_upload_on_S3 = {
ACL: 'public-read',
Bucket: imageBucket,
Key: imgName + ".jpg",
Body: imageBuffer,
ContentType : 'Image/jpg'
}
s3.putObject(params_to_upload_on_S3 , function (err, res) {
if(err)
console.log('Thumnail Upload Error'+ err);
else
console.log('Thumbnail Successfully uploaded .'+ JSON.stringify(res));
fs.unlink(imagePath, function(){
console.log('File deleted from local storage');
});
});

Related

How to upload pdf file to s3 in javascript?

I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});

I am not able to upload large files using multer-s3. It is not giving me any error as well.

I am not able to upload large files using multer-s3. It is not giving me any error as well. It just doesn't upload the file, doesn't even enters the callback and gets timeout. Any way to handle uploading large files to s3 Bucket?
I am using it like this:
var uploadSingle = upload.single('uploadFile');
router.post('/uploadVideo',function(req,res,next){
uploadSingle(req,res,function(err){
// doesn't come here if the file is large
if(err){
//Error Response , Error while uploading Module PDF;
}
else{
//handling file upload
// success response
}
});
}
I had the same issue and after researching this page I found that I need to add contentLength as one of the parameters. Its value is for the length in bytes.
const s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY
});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'myBucket',
contentType: multerS3.AUTO_CONTENT_TYPE,
contentLength: 500000000,
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname});
},
key: function (req, file, cb) {
cb(null, file.originalname);
}
})
});
router.post('/uploadToS3', upload.array('photos', 30), function(req, res, next) {
res.send({"message": 'Successfully uploaded ' + req.files.length + ' files!'});
})

Handle error when putting an object on amazon S3

I am very new in nodejs. I use this code to upload files to an amazone S3.
s3.putObject({
Bucket: bucketName,
Key: key,
Body: content
}, (res) => {
console.log("One file added");
});
How can I handle an error if the upload of one file fails?
Everything is in the official documentation, just read it http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
s3.putObject({
Bucket : bucketName,
Key : key,
Body : content
}, (err, res) => {
if (err) {
return console.error(err);
}
console.log("One file added");
});

How to upload binary files to s3 using node js using s3 key attribute

I need to download and extracted tar.gz files from s3 bucket and upload to another bucket.The files are downloaded properly and the result is returning as binary data using s3.getObject.
I need to pass the binary data (files) to s3 bucket to upload using s3.putOBject.
But I dont know what "name" has to give for "Key" params in s3.putOBject when we push the binary files to s3.putObject. Kindly help me.
This is my code
var bucketName = "my.new.Bucket-Latest";
var fileKey = "Employee.tar.gz";
var params = { Bucket: bucketName, Key: fileKey };
s3.getObject(params, function(err, data,callback) {
if (err) {
console.log(err);
callback(err);
}
else {
zlib.gunzip(data.Body, function (err, result) {
if (err) {
console.log(err);
} else {
var extractedData = result;
s3.putObject({
Bucket: "bucketName",
Key: " ",
Body: extractedData,
ContentType: 'content-type'
}, function (err) {
console.log('uploaded file: ' + err);
});
}
});
}
});
Well even your own code says, that key is filename. Just generate some filename and assign to key property. It is like filesystem. When you creating a new file, you should give some name to it.

Using connect-busboy and node.js and upload to S3

Im really struggling with uploading to S3.
I want to upload a video file to S3 using STREAMING (ie not saving temp file on the server and then uploading to s3)
I dont know how to set the s3 upload function in order for it to read from stream.
It seems like connect-busboy creats a stream that no one reads for it.
This is my code:
app.post('/upload', function (req, res) {
console.log(req.body.FileBox);
req.busboy.on('file', function (fieldname, file, filename) {
//videoUpload.upload(req.body.FileBox);
var params = {
Bucket: 'videogamblerside',
Key: "chek",
Body: file,
ContentType: "video/mp4"
};
console.log(file);
//file.resume();
// file.resume();
s3.upload(params, function (err, data) {
if (err) throw err;
});
Probably you want to add ContentLength
This code sample works for me:
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
s3Client.putObject({
Bucket: bucket,
Key: filename,
ACL: 'public-read',
Body: file,
ContentLength: 3000,
}, function(err, data) {
if (err) throw err;
console.log("done", data);
console.log("https://s3-ap-southeast-1.amazonaws.com/" + bucket + '/' + random_name);
})
});
busboy.on('finish', function() {
res.send("That's all folks!");
});

Resources