Upload Image directly to bucket on s3 amazon issue - node.js

I am a newbie on Amazon s3. I am facing the issue regarding uploading the image directly from frontend to amazon s3 bucket.
I also viewed many tutorials but there is no proper explanation of upload image directly to s3. They teach us how to upload image through local computer to s3.
I implemented the code but not able to identified that how I can upload image without saving to my local machine.
Here is the code which I implemented but not luck to achieve it:
s3ImageUpload: (req, res) => {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'key.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'testbucket'}});
var sampleFile = {
//"_id" : 345345,
//"fieldname" : "uploads[]",
//"originalname" : "female.JPG",
//"encoding" : "base64",
//"mimetype" : "image/jpeg",
"destination" : "./public/images",
"filename" : "female.jpeg",
"path" : "/images/female.jpeg",
//"size" : 251556,
//"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'image/png'
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
}

Pre-signed URLs could be a good solution for you.
On your backend, you can sign an URL using the AWS SDK. Then you just have to send this URL back to the client. Finally, the client can upload to file using this URL.
https://docs.aws.amazon.com/AmazonS3/latest/dev/PresignedUrlUploadObject.html

Related

Amazon S3 Bucket Policy Issue with Nodejs multer-s3

I've created an api to upload image to amazon s3 bucket with nodejs, multer, and multer-s3 .
Its working fine in development returning me a response of image URL which is downloadable and accessible but when I host my node app on Aws Lambda (API GATEWAY) the API returns me the same response with the image URL but this time when I open the downloaded image it shows me INVALID FILE FORMATTED
Here is my Code
const uploadImage = async (req,res) => {
let myFile = req.file.originalname.split(".")
const fileType = myFile[myFile.length - 1]
const params = {
Bucket: 'test-bucket2601',
Key: `${Date.now()}.${fileType}`,
Body: req.file.buffer
}
s3.upload(params, (error, data) => {
if(error){
res.status(500).send(error)
}
res.json({data})
})
}
Route of middleware
routes.route('/upload').post(upload,uploadImage);
in post method the first argument is upload middleware
Middleware Code:
const s3 = new aws.S3({
credentials: {
accessKeyId: awsKeys?.accessKeyId,
secretAccessKey: awsKeys?.secretAccessKey
}
});
const storage = multer.memoryStorage({
destination: function(req, file, callback) {
callback(null, '')
}})
const upload = multer({storage}).single('imageUrl')

How to upload image to S3 bucket locally and generating a thumbnail automatically?

Good evening
I have this task. I have to upload an image to the S3 bucket using Node JS and generates a thumbnail on the go and not by using a lambda trigger. Everything should be done on my local machine terminal (or) in the local server(postman). I tried this code.
const fs = require('fs');
const ACESS_ID = 'A**********KV';
const SECRET_ID = 'G***********0';
const BUCKET_NAME = 'node-image-bucket';
// Initializing s3 interface
const s3 = new AWS.S3({
accessKeyId: ACESS_ID,
secretAccessKey: SECRET_ID,
});
// File reading function to S3
const uploadFile = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket: BUCKET_NAME,
Key: 'scene2.jpg',
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data){
if(err){
throw err;
}
console.log(data);
console.log(`File uploaded Successfully. ${data.Location}`);
});
};
uploadFile('./images/bg-hd.jpg');
Above code is working fine with a single image and the problem is every time I upload a file to the S3 bucket I need to change the S3 params key string value
I want to upload multiple images at once and creating a buffer for performance and it should create thumbnails automatically in the same bucket at the different folder.
Could anyone help me, guys! Please Any help Appreciated!!!
You cannot upload multiple files with one s3 operation but you can use the sharp module before uploading https://www.npmjs.com/package/sharp
to resize your image before calling the s3 api.
import * as sharp from 'sharp';
async function resize(buffer , width, height) {
return sharp(buffer).resize(width, height).toBuffer();
}
const thumbnailWidthSize = 200;
const thumbnailWidthHeight = 200;
const thumbnailImage = await resize(fileContent, thumbnailWidthSize, thumbnailWidthHeight)
You can then reuse your current upload function and run it as many times as many image resizes you need with different keys and wrap those calls around promise.all to make the operation fail if any of the upload fails.
await promise.all([
s3upload(image, imageKey),
s3upload(thumbnailImage, thumbnailImageKey)
])
So, there are two parts to your questions -
Converting the image to thumbnail on the fly while uploading to s3 bucket -
You can use the thumbd npm module and create a thumbd server.
Thumbd is an image thumbnailing server built on top of Node.js, SQS, S3, and ImageMagick.
Prerequistes for the thumbd server -
Thumbd requires the following environment variables to be set:
AWS_KEY the key for your AWS account (the IAM user must have access to the appropriate SQS and S3 resources).
AWS_SECRET the AWS secret key.
BUCKET the bucket to download the original images from. The thumbnails will also be placed in this bucket
AWS_REGION the AWS Region of the bucket. Defaults to: us-east-1.
CONVERT_COMMAND the ImageMagick convert command. Defaults to convert.
REQUEST_TIMEOUT how long to wait in milliseconds before aborting a remote request. Defaults to 15000.
S3_ACL the acl to set on the uploaded images. Must be one of private, or public-read. Defaults to private.
S3_STORAGE_CLASS the storage class for the uploaded images. Must be either STANDARD or REDUCED_REDUNDANCY. Defaults to STANDARD.
SQS_QUEUE the queue name to listen for image thumbnailing.
When running locally, I set these environment variables in a .env file and execute thumbd using pm2/forever/foreman.
Setup -
apt-get install imagemagick
npm install thumbd -g
thumbd install
thumbd start // Run thumbd as a server
After the thumbd server is up and running, refer the code below to change image to thumbnail while uploading to s3 bucket.
var aws = require('aws-sdk');
var url = require("url");
var awsS3Config = {
accessKeyId: ACESS_ID,
secretAccessKey: SECRET_ID,
region: 'us-west-2'
}
var BUCKET_NAME = 'node-image-bucket';
var sourceImageDirectory = "/tmp/"
var imageUploadDir = "/thumbnails/"
var imageName = 'image.jpg'
var uploadImageName = 'image.jpg'
aws.config.update(awsS3Config);
var s3 = new aws.S3();
var Client = require('thumbd').Client,
client = new Client({
awsKey: awsS3Config.accessKeyId,
awsSecret: awsS3Config.secretAccessKey,
s3Bucket: BUCKET_NAME,
sqsQueue: 'ThumbnailCreator',
awsRegion: awsS3Config.region,
s3Acl: 'public-read'
});
export function uploadAndResize(sourceImageDirectory, imageName, imageUploadDir, uploadImageName) {
return new Promise((resolve, reject)=>{
client.upload(sourceImageDirectory + imageName, imageUploadDir + uploadImageName, function(err) {
if (err) {
reject(err);
} else {
client.thumbnail(imageUploadDir + uploadImageName, [{
"suffix": "medium",
"width": 360,
"height": 360,
"background": "white",
"strategy": "%(command)s %(localPaths[0])s -resize %(width)sX%(height)s^ -gravity north -extent %(width)sX%(height)s %(convertedPath)s"
}, {
"suffix": "thumb",
"width": 100,
"height": 100,
"background": "white",
"strategy": "%(command)s %(localPaths[0])s -resize %(width)sX%(height)s^ -gravity north -extent %(width)sX%(height)s %(convertedPath)s"
}], {
//notify: 'https://callback.example.com'
});
var response = {};
//https://s3-ap-us-west-2.amazonaws.com/node-image-bucket/1/5825c7d0-127f-4dac-b802-ca24efba2bcd-original.jpeg
response.url = 'https://s3-' + awsS3Config.region + '.amazonaws.com/' + BUCKET_NAME + '/' + imageUploadDir;
response.uploadImageName = uploadImageName;
response.sourceImageName = imageName;
resolve(response);
}
})
})
Second, you wanted to upload multiple images without changing the string -
Loop over the below method for all the files in a localpath and you are good to go.
export function uploadFiles(localPath, localFileName, fileUploadDir, uploadFileName) {
return new Promise((resolve, reject) => {
fs.readFile(localPath + localFileName, function (err, file) {
if (err) {
reject(err);
}
var params = {
ACL: 'public-read',
Bucket: BUCKET_NAME,
Key: uploadFileName,
Body: file
};
s3.upload(params, function (err, data) {
fs.unlink(localPath + localFileName, function (err) {
if (err) {
reject(err);
} else {
resolve(data)
}
});
});
});
})
}

How do I set my images uploaded to S3 with nodejs script to display instead of download?

I have a node js script that uploads files to AWS S3 through the command line. The problem Im having is when I try to view the file in the browser it automatically downloads it.
I have done some research and most other posts point out the headers, but I have verified the headers are correct (image/png)
Additionally, when I upload the same file through the AWS console (log into AWS), I am able to view the file within the browser.
var fs = require('fs');
var path = require('path');
AWS.config.update({region: myRegion});
s3 = new AWS.S3({apiVersion: '2006-03-01'});
var uploadParams = {
Bucket: process.argv[2],
Key: '', // Key set below
Body: '', // Body set below after createReadStream
ContentType: 'image/jpeg',
ACL: 'public-read',
ContentDisposition: 'inline'
};
var file = process.argv[3];
var fileStream = fs.createReadStream(file);
fileStream.on('error', function(err) {
console.log('File Error', err);
});
uploadParams.Body = fileStream;
uploadParams.Key = path.basename(file);
s3.putObject(uploadParams, function(errBucket, dataBucket) {
if (errBucket) {
console.log("Error uploading data: ", errBucket);
} else {
console.log(dataBucket);
}
});
I get successful upload, but unable to view file in browser as it auto downloads.
You have to specify the contentDisposition as part of request headers. You can not specify it as part of request paramenters. Specify it in headers explicitly as below .
var params = {Bucket : "bucketname" , Key : "keyName" , Body : "actualData"};
s3.putObject(params).
on('build',function(req){
req.httpRequest.headers['Content-Type'] = 'application/pdf' ; // Whatever you want
req.httpRequest.headers['ContentDisposition'] = 'inline';
}).
send( function(err,data){
if(err){
console.log(err);
return res.status(400).json({sucess: false});
}else{
console.log(success);
return res.status(200).json({success: true});
}
});
Code to upload obejcts/images to s3
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = "ACESS KEY HERE";
var secretAccessKey = "SECRET KEY HERE";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'bucketname',
Key: 'keyname.png',
Body: "GiveSomeRandomWordOraProperBodyIfYouHave"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
The above code will make sure the object has been uploaded to s3. You cab see it listed in s3 bucket in the browser but you cant view its contents in s3 bucket.
You can not view items within S3. S3 is a storage box. you can only download and upload elements in it. If you need to view the contents you would have to download and view it in the browser or any explorer of your choice. If you simply need to list the objects in s3. Use the below code.
Code to list objects of s3
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: 'mykey', secretAccessKey: 'mysecret', region: 'myregion'});
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketName',
Delimiter: '/',
Prefix: 's/prefix/objectPath/'
}
s3.listObjects(params, function (err, data) {
if(err)throw err;
console.log(data);
});
Use S3 list to list the elements of S3. This way you can view them. Create a hyperlink for each of the listed item and make it point to s3 download url. This way you can view in the browser and also download it if you need.
In case if you need to view the contents of it via node JS, use the code below to load the image as if you are loading it from a remote URL.
Code to Download contents:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('httpo://s3/URL' 'name.png', function(){
console.log('done');
});
Code to load image into a buffer :
const request = require('request');
let url = 'http://s3url/image.png';
request({ url, encoding: null }, (err, resp, buffer) => {
// typeof buffer === 'object'
// Use the buffer
// This buffer will now contains the image data
});
Use the above to load the image into a buffer. Once its in buffer, you can manipulate it the way you need. The above code wont downloads the image but it help you to manipuate the image in s3 using a buffer.
Contains Example Code. The link will contain Specific Node JS code examples for uploading and Manipulating objects of s3. use it for reference.

Combine Jimp-module with Amazon AWS S3 in NodeJS

The following I want to achieve:
Upload a picture to my backoffice
Resize this picture to reduce image-size.
Upload this picture to Amazon AWS S3.
I couldn't figure out how to directly store the picture to Amazon AWS S3 and therefore I upload it first to my backoffice.
My code:
router.post('/fileupload', function(req, res){
// Prepare data
var file = req.files.upfile;
var uploadpath = 'profilepicture/' + req.session.user + '.jpg';
var AWS = require('aws-sdk');
var fs = require('fs');
AWS.config.update({
accessKeyId: **,
secretAccessKey: **
});
// Upload file
file.mv(uploadpath,function(err){
if (err) throw err;
// Read in the file, convert it to base64, store to S3
Jimp.read(uploadpath, function (err, data) {
if (err) throw err;
// Reduce size
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath);
var s3 = new AWS.S3();
var stream = fs.createReadStream(uploadpath);
s3.putObject({
Bucket: bucketAmazon,
Key: req.session.user + '.jpg',
ContentType: 'image/jpg',
Body: stream,
ContentEncoding: 'base64',
ACL: 'public-read',
Metadata: {
'Content-Type': 'image/jpeg'
}
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded package.');
return res.render('settings', {
user: req.session.user,
logged: true,
wrongPicture: false
});
});
});
});
});
However, when I run this code: the file is uploaded to my backoffice and cropped correctly but in Amazon AWS S3 it shows that the size is '0 B'.
If I remove the line data.resize(400, Jimp.AUTO).quality(100).write(uploadpath), then the file is uploaded correctly to Amazon AWS S3, but ofcourse the picture is not reduced.
You can use the write callback to guarantee that the file is written before the upload starts.
...
data.resize(400, Jimp.AUTO).quality(100).write(uploadpath, () => {
// upload to s3 code here
});

Firebase storage: Image file types not showing after upload. How do I use the image?

When I use an npm package like google-cloud or multer-gcs to upload a file to firebase storage bucket (that uses google cloud storage under the hood..) the file gets uploaded. However, the file does not show and image type(MIME). Also, how do I use the image in my node.js application?
Here is the code to upload image to firebase bucket. Used multer to handle file upload..
var path = require('path');
var multer = require('multer');
var gcs = require( 'multer-gcs' );
var storage = gcs({
filename : function( req, file, cb ) {
cb( null, file.fieldname + '-' + Date.now() + path.extname(file.originalname).toLowerCase());
},
bucket : 'p****n-bcXX3.appspot.com', // Required : bucket name to upload
projectId : 'p****n-bcXX3', // Required : Google project ID
keyFilename: './p****n-5cbc725XXXXd.json', // Required : JSON credentials file for Google Cloud Storage
acl : 'publicread' // Optional : Defaults to private
});
In my routes:
router.post('/food/create', [authChecker, gcsUpload.single('food_image')], function(req, res, next) {
Controllers.create_food(req, res);
});
When I upload file directly to the bucket, type shows clearly. What's the catch here?
It looks like multer-gcs doesn't support the ability to add custom metadata to a file. If you use gcloud directly, you'd do something like this:
var options = {
destination: 'new-image.png',
metadata: {
contentType: 'image/png'
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});
I'd file an issue with the developer of multer and ask them to support metadata uploads along side blob uploads, especially since gcloud already supports the ability to do this.
Here is what I am doing now to handle the file upload.
req.file.modifiedname = (Math.random().toString(36)+'00000000000000000').slice(2, 10) + Date.now() + path.extname(req.file.originalname);
var options = {
metadata: {
contentType: req.file.mimetype
},
predefinedAcl : 'publicread'
};
var blob = bucket.file(req.file.modifiedname);
var blobStream = blob.createWriteStream(options);
blobStream.on('error', function (err) {
return next(err);
});
blobStream.on('finish', function() {
publicUrl = format(
'https://storage.googleapis.com/%s/%s',
bucket.name, blob.name
);
req.file.gcsfileurl = publicUrl;
console.log(req.file.gcsfileurl);
next();
});
blobStream.end(req.file.buffer);

Resources