Stuck in Retrieving image from AWS S3 - node.js

I kept trying to retrieve image from S3, and there is no error message... but I cannot see the actual image on my page.
I used the express, node.js to make a little application.
Here are my codes.. please help me to fix this....
upload image on S3-----------
(req, res, next) => {
const file = req.file; //to get this "file", I used a multer.diskStorage on routes
const fileData = fs.readFileSync(file.path);
const fileName = file.path.substring(8);
var params = {
Bucket: "test-s3-may",
Key: fileName,
Body: fileData,
ContentType: file.mimetype,
ACL: "public-read"
};
s3.upload(params, function(err, data)
{if(err) {return next(err);}
Retrieve image from S3-----
function viewAlbum(filename) {
var params = {"Bucket": 'test-s3-may',
"Key": filename};
s3.getObject(params, function(err, file){
if(err) {return "we got a error";}
else{
var url = "data:image/jpeg;base64,"+ encode(file.Body);
}
return url; })
}
function encode(data)
{
var res = (Buffer.from(data).toString('base64'));
return res;
}
and I used "url" on view with
<img src= >
Is there any problem on uploading?
It is so strange that there is a error message "This is not a supported format" on window explorer
when I download the image file what I uploaded through this app....

Related

Dropzone / React / NodeJS - Upload Image Array

I'm trying to upload an array of photos to a server but the req.files array always shows up empty when it gets there.
req.body displays the array as expected.
The images are added through a Dropzone component. (I've tried switching this for a standard input but they both seem to pass files the same way)
<Dropzone
onDrop={onDrop}
onSubmit={uploadPhotos}
maxFiles={20}
inputContent="Drop 20 Images"
inputWithFilesContent={files => `${20 - files.length} more`}
/>
The images are applied to FormData with the name image files are appended before being sent via an Axios POST request with multipart/form-data headers set.
export const uploadPhotos = (files) => {
const formData = new FormData();
for (let i = 0; i < files.length; i += 1) {
formData.append("image[]", files[i]);
}
const config = {
headers: {
'Content-Type': `multipart/form-data`
}
}
return async (dispatch, getState) => {
try {
const response = await axios.post('/api/kite/upload',
formData, config)
.then(function(response) {
console.log(response.data);
dispatch({
type: ORDER_CHANGE,
payload: response.data
});
});
} catch (err) {
console.log(err);
} finally {
console.log('done');
}
}
}
once passed to the server only req.body seems to contain any data and req.files is empty despite using Multer middleware as the second parameter. Once passed to files.map() items are undefined undefined, presumably because req.files is an empty array.
var multer = require('multer');
var AWS = require('aws-sdk');
AWS.config.setPromisesDependency(bluebird);
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'upload')
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now())
}
});
const upload = multer({
storage: storage
}).array('image');
router.post('/upload', upload, function (req, res) {
const file = req.files;
let s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: 'BUCKETNAME'
});
s3bucket.createBucket(function () {
let Bucket_Path = 'https://console.aws.amazon.com/s3/buckets/BUCKETNAME?region=eu-west-1';
var ResponseData = [];
file.map((item) => {
// item.x are all undefined
var fileStream = fs.createReadStream(filePath);
var params = {
Bucket: Bucket_Path,
Key: item.originalname,
Body: item.buffer,
ACL: 'public-read'
};
s3bucket.upload(params, function (err, data) {
if (err) {
res.json({ "error": true, "Message": err});
} else{
ResponseData.push(data);
if(ResponseData.length == file.length){
res.json({ "error": false, "Message": "File Uploaded SuceesFully", Data: ResponseData});
}
}
});
});
});
});
My end goal is to pass the images to an Amazon S3 bucket. I don't think it impacts this since there is no data to send but I've included it incase it has somehow affecting this.
I've been through lots of other similar Stack Overflow questions and medium post and the main three resolutions to this issue seem to be included in the flow above.
Append file name to items of FormData array
Set POST request headers
Include Multer middleware in express parameter
Can anyone help me figure out why req.files is an empty array?
It might be that Dropzone isn't processing the files. Try adding this to the uploadPhotos function:
const acceptedFiles = myDropzone.getAcceptedFiles() // "myDropzone" is just the Dropzone instance
for (let i = 0; i < acceptedFiles.length; i++) {
myDropzone.processFile(acceptedFiles[i])
}

AWS S3 uploading 0 B file - node.js

I am trying to upload a file to AWS S3 using [putObject][1] but it results in files of 0 byte size.
I do get successful response back from the putObject call.
Node.js code:
const aws = require("aws-sdk");
const s3 = new aws.S3();
module.exports = {
upload: function(req, res, next) {
console.log("Going to upload");
console.log(req.files);
let uploadFile = req.files.file;
const s3PutParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name,
Body: uploadFile.data,
ACL: "public-read"
};
const s3GetParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name
};
console.log(s3PutParams);
s3.putObject(s3PutParams, function(err, response) {
if (err) {
console.error(err);
} else {
console.log("Response is", response);
var url = s3.getSignedUrl("getObject", s3GetParams);
console.log("The URL is", url);
res.json({
returnedUrl: url,
publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
});
}
});
}
};
Testing through POSTMAN:
Backend Console log
Can anyone help me in figuring out what is wrong?
EDIT on 11/20:
#EmmanuelNK helped in spotting the fact that Buffer.byteLength(req.files.file.data) is 0. He had the below questions:
Are you trying to write the whole buffer into memory or are you trying to stream it to s3?
Sorry if the answer is not to the point, still getting my feet wet.
Basically I want to upload an image to S3 and then later use that URL to show it on a webpage. In other words like a photobucket
how you are using upload
For now I am just testing my backend code (posted in the question) using postman. Once I get that going, will have a file upload form on the front end calling this route.
Is that helpful? Thanks in advance for your help.
If you're using express-fileupload as the file uploading middleware, and you've set the useTempFiles option to true, keep in mind that your data file buffer will be empty (check usage), which correlates to the issue you're facing. To get around this, simply read the temp. file once more to get the intended file buffer.
import fs from 'fs';
// OR
const fs = require('fs');
// in your route
let uploadFile = req.files.file;
// THIS
fs.readFile(uploadedFile.tempFilePath, (err, uploadedData) => {
if (err) { throw err; }
const s3PutParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name,
Body: uploadData, // <--- THIS
ACL: "public-read"
};
const s3GetParams = {
Bucket: process.env.S3_BUCKET_NAME,
Key: uploadFile.name
};
console.log(s3PutParams);
s3.putObject(s3PutParams, function(err, response) {
if (err) {
console.error(err);
throw err;
} else {
console.log("Response is", response);
var url = s3.getSignedUrl("getObject", s3GetParams);
console.log("The URL is", url);
res.json({
returnedUrl: url,
publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
});
}
});
});

AWS S3 Image uploaded is corrupted

when uploadingimages, If i use the data thats in my req.file.buffer which is an array of numbers.. the buffer. It uploads correctly the image to aws s3.
But i need to resize theimage before... so im trying to use jimp, like so:
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
And then pass it to aws settings:
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// here in body is a buffer just like the one in req.file.buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log(err);
}
console.log('****************** success');
});
But if i do this.. it uploads the image to aws s3.. but the image is corrupted
What im i doing here? i think aws s3 needs in the budy a buffer... and i think after jimp finished scaling the image.. that new buffer would work.. but it doesnt.. any ideas?
Full code:
exports.resize = async (req, res, next) => {
// check if there is no new file to resize
if (!req.file) {
next(); // skip to the next middlewaree
return;
}
const extension = req.file.mimetype.split('/')[1]
req.body.photo = `${uuid.v4()}.${extension}`
// now we resize
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
AWS.config.update({
secretAccessKey: process.env.SECRETACCESSKEY,
accessKeyId: process.env.ACCESSKEYID,
region: 'us-east-1'
})
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// this line seems to be the issue..
// even though photo.bitmap.data its also a buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log('%%%%%%%%%%%%%%% error in callback');
console.log(err);
}
console.log('****************** success');
console.log(data);
});
// await photo.write(`./public/uploads/${req.body.photo}`);
// once we have written the photo to our filesystem, keep going!
next()
};
I had have this problem too, to get the correct buffer of the result image we have to use Jimp's getBuffer function.
image.getBuffer(mime, cb);
Supported MIME types
Jimp.MIME_PNG; // "image/png"
Jimp.MIME_JPEG; // "image/jpeg"
Jimp.MIME_BMP; // "image/bmp"
But with Jimp.AUTO can have the mime type of the original image and use it.
You can read more of getBuffer function in https://www.npmjs.com/package/jimp
photo.getBuffer(Jimp.AUTO, function(error, result){
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// correct buffer
Body: result
};
s3.upload(...);
});

Link to image uploaded in S3 bucket does not display image

Hi Im new to AWS lambda and S3. Im trying to create an API that will allow me to upload an image. I have following lambda code to upload the file. After upload i see that the file size is correct but the file is corrupted.
let encodedImage = event.body;
console.log(encodedImage);
let decodedImage = Buffer.from(encodedImage, "binary");
console.log(decodedImage.length);
const filePath = `${Date.now()}.jpg`;
const params = {
Bucket: "manufacturer-theme-assets",
Key: filePath,
"Body": decodedImage,
ContentType: "image/jpeg",
ACL: "public-read"
};
s3.putObject(params, (err, data) => {
if (err) {
callback(err, null);
} else {
let response = {
statusCode: 200,
"body": JSON.stringify(data)
"isBase64Encoded": false
};
callback(null, response);
}
});
Make sure you are using relevant content type for the image, and please share the corrupted image link from S3 or error which you get while opening the file
Else try this first place and check:
const filePath = `${Date.now()}.jpg`;
var params = {
ACL: "public-read",
Body: "decodedImage",
Bucket: "manufacturer-theme-assets",
Key: filePath
};
s3.putObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response

how to retrieve image from s3 with nodejs

Please let me know how to retrieve image from s3 with nodejs? Honestly, I could upload an image to s3 with nodejs as follows but the problem is how can I complete to retrieve image from s3?
router.get('/image/:imageId', function (req, res, next) {
// ????
});
var s3 = new aws.S3({ accessKeyId: config.awsAccessId, secretAccessKey: config.awsAccessKey});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: config.bucket,
key: function (req, file, cb) {
cb(null, file.originalname);
}
})
});
router.post('/upload/:id', upload.array('file', 3), function(req, res, next) {
res.send('Successfully uploaded ' + req.files.length + ' files!');
});
I've finally found that,
var params = { Bucket: config.bucket, Key: req.params.imageId };
s3.getObject(params, function(err, data) {
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.write(data.Body, 'binary');
res.end(null, 'binary');
});
If you use lambda with API gateway to retrieve images then there will be no need to using security keys with appropriate permissions.
Read an image from the bucket and send it as base64 to directly use it in source of image tag in HTML.
const AWS = require('aws-sdk');
//*/ get reference to S3 client
var s3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var params = {
"Bucket": "bucket-name",
"Key": "object-name"
};
s3.getObject(params, function(err, data){
if(err) {
callback(err, null);
} else {
let image = new Buffer(data.Body).toString('base64');
image = "data:"+data.ContentType+";base64,"+image;
let response = {
"statusCode": 200,
"headers": {
"Access-Control-Allow-Origin": "*",
'Content-Type': data.ContentType
},
"body":image,
"isBase64Encoded": true
};
callback(null, response);
}
});
};
You're looking for the getObject() method.
Assuming that you are using aws-sdk then you can use getObject method.
Here is sample code
exports.getObjects = function (req, res) {
var item = req.body;
var params = { Bucket: req.params.bucketName, Key: 'keyname'}; // keyname can be a filename
s3.getObject(params, function (err, data) {
if (err) {
return res.send({ "error": err });
}
res.send({ data });
});
}
This link may be helpful to you.
A better and faster approach is piping the stream to response, works on Minio S3 Client but I believe it also works on aws amazon js client.
const Minio = require('minio');
const s3Client = new Minio.Client({
endPoint: 'ep',
accessKey: 'ak',
secretKey: 'sk'
});
router.get('/image/:imageId', (req, res) => {
const { imageId } = req.params;
s3Client.getObject('bucket', imageId, (err, stream) => {
if (err) return res.status(500).send(err);
const contentType = stream.headers['content-type'];
contentType && res.setHeader('Content-Type', contentType);
stream.pipe(res);
});
});
This is what I use with aws-sdk
const params = { Bucket: "YOUR_BUCKET_NAME", Key: "YOUR_FILENAME"};
s3.getObject(params).createReadStream().pipe(res);

Resources