Using connect-busboy and node.js and upload to S3 - node.js

Im really struggling with uploading to S3.
I want to upload a video file to S3 using STREAMING (ie not saving temp file on the server and then uploading to s3)
I dont know how to set the s3 upload function in order for it to read from stream.
It seems like connect-busboy creats a stream that no one reads for it.
This is my code:
app.post('/upload', function (req, res) {
console.log(req.body.FileBox);
req.busboy.on('file', function (fieldname, file, filename) {
//videoUpload.upload(req.body.FileBox);
var params = {
Bucket: 'videogamblerside',
Key: "chek",
Body: file,
ContentType: "video/mp4"
};
console.log(file);
//file.resume();
// file.resume();
s3.upload(params, function (err, data) {
if (err) throw err;
});

Probably you want to add ContentLength
This code sample works for me:
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
s3Client.putObject({
Bucket: bucket,
Key: filename,
ACL: 'public-read',
Body: file,
ContentLength: 3000,
}, function(err, data) {
if (err) throw err;
console.log("done", data);
console.log("https://s3-ap-southeast-1.amazonaws.com/" + bucket + '/' + random_name);
})
});
busboy.on('finish', function() {
res.send("That's all folks!");
});

Related

How to upload pdf file to s3 in javascript?

I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});

nodejs uploading pdf to S3 bucket corrupt file

I am currently using aws-sdk to upload pdf files to bucket S3, like this:
function uploadFile (filePath, remoteFilename, cb) {
var fileBuffer = fs.createReadStream(filePath); // ex.: 'temp/longFileName.pdf'
fileBuffer.on('error', function(err) {
logger.warn('Failed reading local pdf file');
cb(err);
});
s3.upload({
Bucket: 'someBucketName',
Key: remoteFilename,
Body: fileBuffer
}, function (error, response) {
cb(error, { response, remoteFilename });
});
}
the problem is that sometimes the file gets uploaded with 0B size, sometimes it gets uploaded with the correct size, but when I download it, it is corrupt, and of course sometimes it is correctly uploaded and opens properly.
I read the pdf file locally from system file, and that pdf file is correct.
could somebody help me to fix this issue?
update
I am creating a pdf using pdfkit:
function createPdf (data, cb) {
var fs = require('fs');
var PDFDocument = require('pdfkit');
var filePath = 'temp/longFileName.pdf';
var pdf = new PDFDocument({
size: 'LEGAL',
info: {
Title: 'Tile of File Here',
Author: 'Some Author',
}
});
// Write stuff into PDF
pdf.text('Hello World');
// Stream contents to a file
pdf.pipe(
fs.createWriteStream(filePath)
)
.on('finish', function () {
console.log('PDF closed');
});
// Close PDF and write file.
pdf.end();
cb(null, {filePath})
}
once the callback in this function is called, i call the uploadFile function:
function doAll (someData, cb) {
createPdf(someData, function(err, data) {
if (err) console.log(err)
uploadFile(data.filePath, function(err,data) {
if (err) console.log(err)
console.log('finished')
cb(null, 'done');
return;
})
})
}
The problem is that you're calling the callback immediately instead of waiting for the file to be fully written. Your callback function should be inside .on('finish')
pdf.pipe(
fs.createWriteStream('./path/to/file.pdf')
)
.on('finish', function () {
console.log('PDF closed');
cb(null, 'finished'); // The callback should e in here
});
// Close PDF and write file.
pdf.end();

Handle error when putting an object on amazon S3

I am very new in nodejs. I use this code to upload files to an amazone S3.
s3.putObject({
Bucket: bucketName,
Key: key,
Body: content
}, (res) => {
console.log("One file added");
});
How can I handle an error if the upload of one file fails?
Everything is in the official documentation, just read it http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
s3.putObject({
Bucket : bucketName,
Key : key,
Body : content
}, (err, res) => {
if (err) {
return console.error(err);
}
console.log("One file added");
});

Uploading Image from AWS Lambda to S3 via API Gateway in Binary format

My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};

How to update file permission uploaded on bucket of AWS S3?

I uploading videos using Multipart Upload to Amazon-S3 using the node.js. I am using this reference
The uploaded video by default is only accessible to owner.
When uploading file using below code
var params_to_upload_on_S3 = {
ACL: 'public-read',
Bucket: imageBucket,
Key: imgName + ".jpg",
Body: imageBuffer,
ContentType : 'Image/jpg'
}
s3.putObject(params_to_upload_on_S3 , function (err, res) {
if(err)
console.log('Thumnail Upload Error'+ err);
else
console.log('Thumbnail Successfully uploaded .'+ JSON.stringify(res));
fs.unlink(imagePath, function(){
console.log('File deleted from local storage');
});
});

Resources