I am trying to upload a file to AWS S3 using [putObject][1] but it results in files of 0 byte size.
I do get successful response back from the putObject call.
Node.js code:
const aws = require("aws-sdk");
const s3 = new aws.S3();
module.exports = {
upload: function(req, res, next) {
console.log("Going to upload");
console.log(req.files);
let uploadFile = req.files.file;
const s3PutParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name,
Body: uploadFile.data,
ACL: "public-read"
};
const s3GetParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name
};
console.log(s3PutParams);
s3.putObject(s3PutParams, function(err, response) {
if (err) {
console.error(err);
} else {
console.log("Response is", response);
var url = s3.getSignedUrl("getObject", s3GetParams);
console.log("The URL is", url);
res.json({
returnedUrl: url,
publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
});
}
});
}
};
Testing through POSTMAN:
Backend Console log
Can anyone help me in figuring out what is wrong?
EDIT on 11/20:
#EmmanuelNK helped in spotting the fact that Buffer.byteLength(req.files.file.data) is 0. He had the below questions:
Are you trying to write the whole buffer into memory or are you trying to stream it to s3?
Sorry if the answer is not to the point, still getting my feet wet.
Basically I want to upload an image to S3 and then later use that URL to show it on a webpage. In other words like a photobucket
how you are using upload
For now I am just testing my backend code (posted in the question) using postman. Once I get that going, will have a file upload form on the front end calling this route.
Is that helpful? Thanks in advance for your help.
If you're using express-fileupload as the file uploading middleware, and you've set the useTempFiles option to true, keep in mind that your data file buffer will be empty (check usage), which correlates to the issue you're facing. To get around this, simply read the temp. file once more to get the intended file buffer.
import fs from 'fs';
// OR
const fs = require('fs');
// in your route
let uploadFile = req.files.file;
// THIS
fs.readFile(uploadedFile.tempFilePath, (err, uploadedData) => {
if (err) { throw err; }
const s3PutParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name,
Body: uploadData, // <--- THIS
ACL: "public-read"
};
const s3GetParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name
};
console.log(s3PutParams);
s3.putObject(s3PutParams, function(err, response) {
if (err) {
console.error(err);
throw err;
} else {
console.log("Response is", response);
var url = s3.getSignedUrl("getObject", s3GetParams);
console.log("The URL is", url);
res.json({
returnedUrl: url,
publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
});
}
});
});
Related
I'm trying to upload an array of photos to a server but the req.files array always shows up empty when it gets there.
req.body displays the array as expected.
The images are added through a Dropzone component. (I've tried switching this for a standard input but they both seem to pass files the same way)
<Dropzone
onDrop={onDrop}
onSubmit={uploadPhotos}
maxFiles={20}
inputContent="Drop 20 Images"
inputWithFilesContent={files => `${20 - files.length} more`}
/>
The images are applied to FormData with the name image files are appended before being sent via an Axios POST request with multipart/form-data headers set.
export const uploadPhotos = (files) => {
const formData = new FormData();
for (let i = 0; i < files.length; i += 1) {
formData.append("image[]", files[i]);
}
const config = {
headers: {
'Content-Type': `multipart/form-data`
}
}
return async (dispatch, getState) => {
try {
const response = await axios.post('/api/kite/upload',
formData, config)
.then(function(response) {
console.log(response.data);
dispatch({
type: ORDER_CHANGE,
payload: response.data
});
});
} catch (err) {
console.log(err);
} finally {
console.log('done');
}
}
}
once passed to the server only req.body seems to contain any data and req.files is empty despite using Multer middleware as the second parameter. Once passed to files.map() items are undefined undefined, presumably because req.files is an empty array.
var multer = require('multer');
var AWS = require('aws-sdk');
AWS.config.setPromisesDependency(bluebird);
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'upload')
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now())
}
});
const upload = multer({
storage: storage
}).array('image');
router.post('/upload', upload, function (req, res) {
const file = req.files;
let s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: 'BUCKETNAME'
});
s3bucket.createBucket(function () {
let Bucket_Path = 'https://console.aws.amazon.com/s3/buckets/BUCKETNAME?region=eu-west-1';
var ResponseData = [];
file.map((item) => {
// item.x are all undefined
var fileStream = fs.createReadStream(filePath);
var params = {
Bucket: Bucket_Path,
Key: item.originalname,
Body: item.buffer,
ACL: 'public-read'
};
s3bucket.upload(params, function (err, data) {
if (err) {
res.json({ "error": true, "Message": err});
} else{
ResponseData.push(data);
if(ResponseData.length == file.length){
res.json({ "error": false, "Message": "File Uploaded SuceesFully", Data: ResponseData});
}
}
});
});
});
});
My end goal is to pass the images to an Amazon S3 bucket. I don't think it impacts this since there is no data to send but I've included it incase it has somehow affecting this.
I've been through lots of other similar Stack Overflow questions and medium post and the main three resolutions to this issue seem to be included in the flow above.
Append file name to items of FormData array
Set POST request headers
Include Multer middleware in express parameter
Can anyone help me figure out why req.files is an empty array?
It might be that Dropzone isn't processing the files. Try adding this to the uploadPhotos function:
const acceptedFiles = myDropzone.getAcceptedFiles() // "myDropzone" is just the Dropzone instance
for (let i = 0; i < acceptedFiles.length; i++) {
myDropzone.processFile(acceptedFiles[i])
}
I kept trying to retrieve image from S3, and there is no error message... but I cannot see the actual image on my page.
I used the express, node.js to make a little application.
Here are my codes.. please help me to fix this....
upload image on S3-----------
(req, res, next) => {
const file = req.file; //to get this "file", I used a multer.diskStorage on routes
const fileData = fs.readFileSync(file.path);
const fileName = file.path.substring(8);
var params = {
Bucket: "test-s3-may",
Key: fileName,
Body: fileData,
ContentType: file.mimetype,
ACL: "public-read"
};
s3.upload(params, function(err, data)
{if(err) {return next(err);}
Retrieve image from S3-----
function viewAlbum(filename) {
var params = {"Bucket": 'test-s3-may',
"Key": filename};
s3.getObject(params, function(err, file){
if(err) {return "we got a error";}
else{
var url = "data:image/jpeg;base64,"+ encode(file.Body);
}
return url; })
}
function encode(data)
{
var res = (Buffer.from(data).toString('base64'));
return res;
}
and I used "url" on view with
<img src= >
Is there any problem on uploading?
It is so strange that there is a error message "This is not a supported format" on window explorer
when I download the image file what I uploaded through this app....
when uploadingimages, If i use the data thats in my req.file.buffer which is an array of numbers.. the buffer. It uploads correctly the image to aws s3.
But i need to resize theimage before... so im trying to use jimp, like so:
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
And then pass it to aws settings:
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// here in body is a buffer just like the one in req.file.buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log(err);
}
console.log('****************** success');
});
But if i do this.. it uploads the image to aws s3.. but the image is corrupted
What im i doing here? i think aws s3 needs in the budy a buffer... and i think after jimp finished scaling the image.. that new buffer would work.. but it doesnt.. any ideas?
Full code:
exports.resize = async (req, res, next) => {
// check if there is no new file to resize
if (!req.file) {
next(); // skip to the next middlewaree
return;
}
const extension = req.file.mimetype.split('/')[1]
req.body.photo = `${uuid.v4()}.${extension}`
// now we resize
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
AWS.config.update({
secretAccessKey: process.env.SECRETACCESSKEY,
accessKeyId: process.env.ACCESSKEYID,
region: 'us-east-1'
})
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// this line seems to be the issue..
// even though photo.bitmap.data its also a buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log('%%%%%%%%%%%%%%% error in callback');
console.log(err);
}
console.log('****************** success');
console.log(data);
});
// await photo.write(`./public/uploads/${req.body.photo}`);
// once we have written the photo to our filesystem, keep going!
next()
};
I had have this problem too, to get the correct buffer of the result image we have to use Jimp's getBuffer function.
image.getBuffer(mime, cb);
Supported MIME types
Jimp.MIME_PNG; // "image/png"
Jimp.MIME_JPEG; // "image/jpeg"
Jimp.MIME_BMP; // "image/bmp"
But with Jimp.AUTO can have the mime type of the original image and use it.
You can read more of getBuffer function in https://www.npmjs.com/package/jimp
photo.getBuffer(Jimp.AUTO, function(error, result){
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// correct buffer
Body: result
};
s3.upload(...);
});
I am trying to write a function that takes the mp3 url of the recording and then uploads that to S3. However, I keep getting a runtime error and the callback is never reached. If I move the callback below s3.upload(...) then the statement "attempting to upload mp3 is never logged.
exports.handler = function(context, event, callback) {
const twiml = new Twilio.twiml.VoiceResponse();
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var getUri = require('get-uri');
AWS.config.update({
accessKeyId: "...",
secretAccessKey: "..."
});
var client = context.getTwilioClient();
const recording_id = event.RecordingSid;
const uri = event.RecordingUrl + ".mp3";
getUri(uri, function (err, rs) {
if (err) {
console.log(err.message);
throw err;
}
var params = {
ACL: "public-read",
Body: rs,
Bucket: "...",
Key: "audio.mp3",
ContentType: 'audio/mp3'
};
s3.upload(params, function(err,data) {
console.log("attempting to upload mp3");
if (err) {
console.log("there is an error");
console.log(err.status);
throw err.message;
}
else {
console.log("Your upload has been successful.");
}
callback(null, twiml);
});
});
console.log("at the end");
};
Is there any other way to access the recording and put them in my public s3 bucket? Why is this never executing s3.upload(...).
Any insights into this is helpful! Thanks in advance!
app.get('/uploadsong',function(req,res){
console.log("Hi there")
var URI = 'http://sensongsmp3download.info/Kaala%20(2018)%20-%20Sensongsmp3.info/Thanga%20Sela%20--%20Sensongsmp3.Info.mp3';
var buffer = [];
request
.get(URI)
.on('error', function(err) {
console.log("error")
}).on('data',function(data){
buffer.push(data);
}).on('end',function(){
var completeSong = Buffer.concat(buffer);
var data = {
Body:completeSong,
Key: 'sample.mp3',
ContentType: 'audio/mp3'
}
s3Bucket.putObject(data, function(err, data){
if (err)
{
console.log('Error uploading data: ', data);
} else
{
console.log('upload successfull')
res.send('done');
}
})
})
})
here are the modules i have used
var request = require('request');
I contacted Twilio regarding this they responded that Twilio Functions have a strict 5 second time-out and the upload from the Twilio Function to S3 Bucket takes more than 5 seconds. My workaround was sending a string with all mp3 URLs separated by comma and a space. The lambda function would then parse through all the links and store all links in an array which would be used for audio playback.
Trying to upload an image using nodejs, express & request module but keep getting 415 from Amazon S3 instance.
fs.createReadStream(req.files.image.path).pipe(request.post(defaults.url, {
form: {
param_1: '',
param_2: ''
}
}, function(error, response, body){
if(error){
callback(error, null);
}else{
if(response.statusCode === 200){
callback({}, body);
}else{
callback(body, response);
}
}
}));
I think the image is not getting appended on the request but i'm not 100%. Any advise?
pipe expects a Writable stream as parameter. you can use res object of express directly as the pipe destination. But if you like to upload to s3, you can read the file from req stream and use putObject to write it to s3
var fs = require ('fs')
fs.readFile(req.files.image.path, function (err, data) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
var bucket = ''
s3.putObject({
ACL: 'public-read', // by default private access
Bucket: bucket,
Key: file_name,
Body: data
}, function (err, data) {
if (err) {
console.log(err)
res.send(500, {msg: 'image upload failed', error: err})
} else {
console.log('S3 upload Successful')
res.send({})
}
});
If you like to download you can use pipe to redirect the read object to response directly
app.get('/download/:file', function(req, res, next) {
var AWS = require('./aws_config')
var s3 = new AWS.S3()
s3.getObject({
Bucket: '',
Key: req.params.file
}, function (err, data) {
if (err) console.log (err)
var fs = require ('fs')
var filePath = __dirname + "/downloads/" + req.params.file
fs.writeFile(filePath, data.Body, function (err) {
if (err) console.log(err)
else {
res.attachment(filePath)
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
// TODO: delete file from server ?
}
});
})
})