Below is the Rest API to upload the video to S3,unable to play the video by downloading as players throwing invalid file format.
app.post('/insert-video', async(req, res) => {
const {
data,
name: fileName,
size: fileSize,
type: fileType
} = req.body
// ASW code start
const base64data = new Buffer(data, 'binary');
let params = {
Bucket: "uploadvidoe",
Key: fileName,
ContentType: fileType,
Body: base64data
};
try {
let uploadPromise = await new AWS.S3().putObject(params).promise();
console.log("Successfully uploaded data to bucket");
} catch (e) {
console.log("Error uploading data: ", e);
}
});
Related
I tried to upload mp4 video file using s3 pre-signed URL. The video is getting uploaded properly but when I download the same video and play the video, it does not play.
Here is how I did it
// generating pre-signed url
const getSingedUrlforPut = async (bucketName, filename) => {
const params = {
Bucket: bucketName,
Key: filename,
Expires: 60 * 5,
ContentType: "video/mp4",
};
try {
const url = await s3.getSignedUrlPromise('putObject', params)
return url
} catch (err) {
console.log("error generating s3 url ", err)
throw err
}
}
// uploading it ot s3
const url = await getSingedUrlforPut(buckets.toConvertCoursesVideos, fileId)
try {
// file is mp4 file uploaded using form-data
const resp = await axios.put(url, file})
} catch (err){
console.log(err)
}
I'm a bit lost on the way a video is being send from React Native to the backend and having it working on S3. All help would be appreciated especially where I might be going wrong.
Initially, from React Native I use the Expo Camera to record a video. Once it has stopped recording, we use fetch to send the data as follows:
const startRecording = async () => {
setIsRecording(true);
const video = await camera.recordAsync({
maxDuration: 15
});
const data = new FormData();
data.append('video', {
name: 'mobile-video-upload',
uri: video.uri
});
try {
const res = await fetch('url/users/testing', {
method: 'post',
body: data
});
} catch (error) {
console.log('error uploading');
}
};
the type of Data we get back from Camera component through IOS is:
Object {
"uri": "...Camera/D3B7B5F5-6A17-4C45-A0BE-897956A9E637.mov",
}
On the backend I'm using a middleware known as multer. My route for the backend looks like this
const multer = require('multer');
const upload = multer({ dest: 'uploads/' });
router.post('/testing', upload.single('video'), async (req, res) => {
let buffer = null;
fs.readFile(req.file.path, (err, data) => {
if (err) {
console.log('There was an error reading file: ', err);
}
buffer = data;
});
const params = {
Bucket: bucket_name,
Key: 'testing124.mov',
ContentType: req.file.mimetype,
Body: buffer
};
try {
let uploadPr = await s3.putObject(params).promise();
console.log(uploadPr);
} catch (error) {
console.log('There was an err ', error);
}
The data we see in req.file is:
{
fieldname: 'video',
originalname: 'mobile-video-upload',
encoding: '7bit',
mimetype: 'video/quicktime',
destination: 'uploads/',
filename: 'aed58f2dfbcc8daa7964fb3df7d3b4f4',
path: 'uploads/aed58f2dfbcc8daa7964fb3df7d3b4f4',
size: 480422
}
What might I be doing wrong in order to have a valid video uploaded? I'm unable to view the video from s3 whether I download the file or try using the link and viewing the video.
Thank you for all the help.
If your already using multer, use this:
https://www.npmjs.com/package/multer-s3
I need to uplaod a pdf file from UI(written in Javascript) to Amazon S3 but I am trying to upload the file to the S3, I am getting some unicode format text and when I copy that text to notepad, or say, any other text editor I can the human readable text
I am using pdfmake to get the content of the file and upload it using getBufffer method.
var content = generatePDF(base64Img);
pdfMake.createPdf(content).getBuffer(function (data) {//Code}
The code that i used to upload the file to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data.toString(),
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
The file is getting uploaded successfully but I am getting the text like
!
" #$%&!' ()*')+,
!
!
!
!
But I am pasting it to other text editor, I am getting
Date: 04/20/19
I solved the above problem by passing the data from getBuffer to S3.
In S3, I passed to a buffer like
var data = new Buffer(event.data, 'binary');
uploaded the data to S3.
var params = {
Bucket: bucketName,
Key: file_name,
Body: data,
ContentType: 'application/pdf'
}
s3.upload(params, function (err, data) {
if (err) {
// code
}else{
//code
}
To upload a file from client end directly to s3 bucket you can use multer-s3.
FROM CLIENT END:
axios.post(url, data, {
onUploadProgress: ProgressEvent => {
this.setState({
loaded: (ProgressEvent.loaded / ProgressEvent.total * 100),
})
},
})
.then(res => { // then print response status
toast.success('Upload Success!')
})
.catch(err => { // then print response status
toast.error('Upload Failed!')
})
SERVER SIDE:
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: BUCKET_NAME,
key: function (req, file, cb) {
UPLOADED_FILE_NAME = Date.now() + '-' + file.originalname;
cb(null, UPLOADED_FILE_NAME);
}
})
}).array('file');
app.post('/upload', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
return res.status(500).json(err)
// A Multer error occurred when uploading.
} else if (err) {
return res.status(500).json(err)
// An unknown error occurred when uploading.
}
console.log('REQUEST FILE IS', UPLOADED_FILE_NAME)
return res.status(200).send(UPLOADED_FILE_NAME)
// Everything went fine.
})
});
const fs = require('fs');
const pdf = require('html-pdf');
const AWS = require("aws-sdk");
//Read html file
function readHTMLfile(path, callback) {
fs.readFile(path, {encoding: 'utf-8'}, function(err, html) {
if(err) {
callback(err);
}
else {
callback(null, html)
}
})
}
//importing HTML file
readHTMLfile(__dirname + '/Nda.html', function(err, html) {
if(err) {
console.log('Error: ',err)
}
else {
var handlebars = require("handlebars");
var template = handlebars.compile(html);
var replacables = {
url: 'http://politicalmemorabilia.com/wp-content/uploads/2014/10/2000px-Aaron_Burr_signature.png'
}
var HtmlToSend = template(replacables);
//pdf.create(HtmlToSend).toStream(function(err, stream){
pdf.create(HtmlToSend).toBuffer(function(err, buf){
if(err){
return console.log(err);
}
else {
console.log('This is a buffer:', buf);
//stream.pipe(fs.createWriteStream('./Nda2.pdf'));
//var readableStream = fs.readFileSync('./Nda2.pdf', 'base64')
// console.log('Readable Stream: ',readableStream)
// var buf = new Buffer('readableStream', 'base64')
// console.log('Buffer String ',buf.toString())
//console.log('Readable Stream ',readableStream)
//console.log('Pdf Content', ./Nda2.pdf);
AWS.config.update({
accessKeyId: "AKIAIRJAKT756L72NPBA",
secretAccessKey: "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"
});
var s3Bucket = new AWS.S3({ params: { Bucket: "vizitor-profilepicture" } });
var options = {
Key: `nda/Nda`,
Body: 'buf',
ContentEncoding: "buffer",
//ContentDisposition: "inline",
ContentType: "application/pdf"
};
s3Bucket.upload(options, function(err, data) {
if (err) {
console.log(err);
console.log("Error uploading data: ", data);
} else {
console.log('Data: ',data)
console.log("data: ", data.Location);
console.log("succesfully uploaded pdf!");
}
});
}
});
}
})
I am importing a HTML file and converted it into pdf using "html-pdf" module and used handlebars to embed dynamic URL in HTML file.I then converted the HTML file to buffer and want to upload to amazon s3 as pdf. For this, I have used buffer in body option. I am getting the correct buffer but when I upload to amazon s3 as pdf file a link is generated but on opening link in browser I get an error "Failed to load pdf"
I have made s3 bucket for public view so that's not the issue here.
My Lambda is receiving binary data of an image from my user in request body (event.body).
I try uploading it to S3 with no error, but when I download, the image is corrupted/ can't be opened.
I also need to return the URl of the uploaded image to the user.
Please Help!
module.exports.uploadImage = (event, context, callback) => {
var buf = new Buffer(new Buffer(event.body).toString('base64').replace(/^data:image\/\w+;base64,/, ""),'base64');
var data = {
Key: Date.now()+"",
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/png',
ACL: 'public-read'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};
You can upload the image to S3 as node Buffer. The SDK does the converting for you.
const AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = (event, context, callback) => {
var buf = Buffer.from(event.body.replace(/^data:image\/\w+;base64,/, ""),"base64");
var data = {
Bucket: "sample-bucket",
Key: Date.now()+"",
Body: buf,
ContentType: 'image/png',
ACL: 'public-read'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
callback(null,data);
});
};