Related
I am working a on nodejs restful api and trying to upload file on S3 using multer but its not working also i am not getting any error .
here is a code from my Controller
var aws = require('aws-sdk')
var express = require('express')
var multer = require('multer')
var multerS3 = require('multer-s3')
var bodyParser = require('body-parser')
var uuid = require('uuid').v4;
aws.config.update({
secretAccessKey: '',
accessKeyId: '' ,
region: 'us-west-2'
});
var app = express();
var s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'stack',
key: function (req, file, cb) {
console.log(file);
cb(null, req.s3key)
}
})
})
var fileUpload = upload.array('attachments',1);
function uploadToS3(req, res){
req.s3key = uuid();
let downloadUrl = 'https://s3-us-west-2.amazonaws.com/stack/'+req.s3key;
return new Promise((resolve,reject) => {
return fileUpload(req, res, err=> {
if(err) return reject(err);
return resolve(downloadUrl)
})
})
}
exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
});
}
What am i missing here??
Can you try catch block for error log if there is any missing then it will display in log.
`exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
}).catch(error=>{
console.log(error)
});`
}
I am using Ionic v3 and and for backend used Nodejs.
var storage = multer.diskStorage({
destination: function(req, file, callback) {
callback(null, './uploads')
},
filename: function(req, file, callback) {
console.log(file)
callback(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname))
}
});
var upload = multer({storage: storage});
To call this method we require req ,res through API call like below,
upload(req, res, function(err) {
res.end('File is uploaded')
});
My question that is it possible to call this upload function without API call(req,res) ? If yes how can we do ?
What i want to do is, I am developing chat application using ionic2 and nodejs where i can share image. That image should be upload to server side. How can I do for socket programming ?
If you want to upload image using base64 then you can use following code
socket.on('Upload_Image_base64', function(data)
{
var fs = require('fs');
var img = data.image64;
var imgname = new Date().getTime().toString();
imgname = data.userid + imgname + '.png';
var data = img.replace(/^data:image\/\w+;base64,/, "");
var buf = new Buffer(data, 'base64');
fs.writeFile(__dirname + "/uploads/" +imgname, buf);
});
// here data.image64 and data.userid --> is the parameter which we pass during socket event request.
For multi part - this may help you.
socket.on('Upload_Image', function(data)
{
var app = require('express')();
var multer = require('multer')
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'uploads/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({ storage: storage }).single(data.file);
console.log(data.file);
app.post('/', function (req, res)
{
upload(req,res,function(req,res)
{
if(err)
{
console.log("error uploading file");
}
else
{
console.log("uploaded");
}
});
});
});
Im able to upload an image to S3. Now, if the file selected is .gif, I want to be able to convert the .gif file to .mp4 and upload the converted file to S3. I am able to convert a .gif to .mp4 with ffmpeg only if I give the path of the file. How do I access the uploaded file from Multer? Below is my code :
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
var aws = require('aws-sdk');
var multer = require('multer');
var multerS3 = require('multer-s3');
var s3 = new aws.S3();
var ffmpeg = require('fluent-ffmpeg');
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'myBucket',
key: function (req, file, cb) {
console.log(file);
var extension = file.originalname.substring(file.originalname.lastIndexOf('.')+1).toLowerCase();
if(extension=="gif"){
console.log("Uploaded a .gif file");
ffmpeg(file) //THIS IS NOT WORKING
.setFfmpegPath("C:\\ffmpeg\\bin\\ffmpeg.exe")
.output('./outputs/2.mp4') //TRYING TO UPLOAD LOCALLY, WHICH FAILS
.on('end', function() {
console.log('Finished processing');
})
.run();
}
cb(null, filename);
}
})
});
I'm trying to access the uploaded file like this: ffmpeg(file) since file is an argument passed in the multer function.
My form :
<form action="/upload" method="post" enctype="multipart/form-data">
<input type="file" name="file"> <br />
<input type="submit" value="Upload">
</form>
In which part of the process do I convert the file?
Please help. Many thanks.
You are trying to process a file locally that's on s3. The file needs to be your server's file system or at the very least be publicly available on s3. So you have two options here.
a) You could first upload all the files to the server where express is running (not on s3, first we store them temporarily). If the file is a .gif, process it and upload the resulting .mp4 file, otherwise upload to s3. Here's a working example:
var fs = require('fs')
var path = require('path')
var express = require('express');
var bodyParser = require('body-parser');
var aws = require('aws-sdk');
var multer = require('multer');
var ffmpeg = require('fluent-ffmpeg');
var shortid = require('shortid');
aws.config.update(/* your config */);
var app = express();
var s3 = new aws.S3();
var bucket = 'myBucket';
var upload = multer({
storage: multer.diskStorage({
destination: './uploads/',
filename: function (req, file, cb){
// user shortid.generate() alone if no extension is needed
cb( null, shortid.generate() + path.parse(file.originalname).ext);
}
})
});
//----------------------------------------------------
app.post('/upload', upload.single('file'), function (req, res, next) {
var fileInfo = path.parse(req.file.filename);
if(fileInfo.ext === '.gif'){
var videoPath = 'uploads/' + fileInfo.name + '.mp4';
ffmpeg(req.file.path)
.setFfmpegPath("C:\\ffmpeg\\bin\\ffmpeg.exe")
.output(videoPath)
.on('end', function() {
console.log('[ffmpeg] processing done');
uploadFile(videoPath, fileInfo.name + '.mp4');
})
.run();
}
else {
uploadFile(req.file.path, req.file.filename);
}
res.end();
});
//----------------------------------------------------
function uploadFile(source, target){
fs.readFile(source, function (err, data) {
if (!err) {
var params = {
Bucket : bucket,
Key : target,
Body : data
};
s3.putObject(params, function(err, data) {
if (!err) {
console.log('[s3] file uploaded:');
console.log(data);
fs.unlink(source); // optionally delete the file
}
else {
console.log(err);
}
});
}
});
}
app.listen(3000);
b) Alternatively if you're ok with making your s3 files public you could upload them all using multer-s3. Since ffmpeg also accepts network locations as input paths you can pass it the s3 location of your .gif files and then upload the converted .mp4 files:
var fs = require('fs')
var path = require('path')
var express = require('express');
var bodyParser = require('body-parser');
var aws = require('aws-sdk');
var multer = require('multer');
var ffmpeg = require('fluent-ffmpeg');
var multerS3 = require('multer-s3');
aws.config.update(/* your config */);
var app = express();
var s3 = new aws.S3();
var bucket = 'myBucket';
var upload = multer({
storage: multerS3({
s3: s3,
bucket: bucket,
key: function (req, file, cb) {
cb(null, file.originalname);
},
acl: 'public-read'
})
});
----------------------------------------------------
app.post('/upload', upload.single('file'), function (req, res, next) {
var fileInfo = path.parse(req.file.originalname);
if(fileInfo.ext === '.gif'){
var videoPath = 'uploads/' + fileInfo.name + '.mp4';
ffmpeg(req.file.location)
.setFfmpegPath("C:\\ffmpeg\\bin\\ffmpeg.exe")
.output(videoPath)
.on('end', function() {
console.log('[ffmpeg] processing done');
uploadFile(videoPath, fileInfo.name + '.mp4');
})
.run();
}
res.end();
})
//----------------------------------------------------
function uploadFile(source, target){
fs.readFile(source, 'base64', function (err, data) {
if (!err) {
var params = {
Bucket : bucket,
Key : target,
Body : data,
ContentType : 'video/mp4'
};
s3.putObject(params, function(err, data) {
if (!err) {
console.log('[s3] file uploaded:');
console.log(data);
fs.unlink(source); // optionally delete the file
}
else {
console.log(err);
}
});
}
});
}
app.listen(3000);
For both examples, remember to create an uploads/ folder and use your aws configuration.
I am trying to upload an image to amazon s3 using multer-s3, but I am getting this error:
TypeError: Expected opts.s3 to be object
node_modules/multer-s3/index.js:69:20
This is my server code:
var upload = multer({
storage: s3({
dirname: '/',
bucket: 'bucket',
secretAccessKey: 'key',
accessKeyId: 'key',
region: 'us-west-2',
filename: function (req, file, cb) {
cb(null, file.originalname);
}
})
});
app.post('/upload', upload.array('file'), function (req, res, next) {
res.send("Uploaded!");
});
Why I am getting this error?
[Update Mar 2022] It works perfectly fine till-date and now also shows the uploaded file public URL as well.
Complete and working Node Cheat | Upload to s3 using multer-s3 available.
Code:
var express = require('express'),
aws = require('aws-sdk'),
bodyParser = require('body-parser'),
multer = require('multer'),
multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1'
});
var app = express(),
s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'bucket-name',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');//index.html is inside node-cheat
});
//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
res.send({
message: "Uploaded!",
urls: req.files.map(function(file) {
return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
})
});
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
For complete repo:
Clone node-cheat express_multer_s3, run node app followed by npm install express body-parser aws-sdk multer multer-s3.
Happy Helping!
#V31 has answered very well still I want to add my 2 cents.
I believe in keeping one responsibility into one file, for better code organization and debugging purpose.
I have created a file for uploading upload.js.
require('dotenv').config();
const AWS = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const s3Config = new AWS.S3({
accessKeyId: process.env.AWS_IAM_USER_KEY,
secretAccessKey: process.env.AWS_IAM_USER_SECRET,
Bucket: process.env.AWS_BUCKET_NAME
});
const fileFilter = (req, file, cb) => {
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true)
} else {
cb(null, false)
}
}
// this is just to test locally if multer is working fine.
const storage = multer.diskStorage({
destination: (req, res, cb) => {
cb(null, 'src/api/media/profiles')
},
filename: (req, file, cb) => {
cb(null, new Date().toISOString() + '-' + file.originalname)
}
})
const multerS3Config = multerS3({
s3: s3Config,
bucket: process.env.AWS_BUCKET_NAME,
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
console.log(file)
cb(null, new Date().toISOString() + '-' + file.originalname)
}
});
const upload = multer({
storage: multerS3Config,
fileFilter: fileFilter,
limits: {
fileSize: 1024 * 1024 * 5 // we are allowing only 5 MB files
}
})
exports.profileImage = upload;
Which is imported inside my routes routes.js
const express = require('express');
const ProfileController = require('../profile/controller');
const { profileImage } = require('../utils/upload.js');
const routes = (app) => {
const apiRoutes = express.Router();
apiRoutes.use('/profile', profileRoutes);
profileRoutes.post('/',profileImage.single('profileImage'), ProfileController.saveProfile);
app.use('/api', apiRoutes);
}
module.exports = routes
Postman screen shot for post body
I just want to add my cents,
There are many comments in all answers like how to get public URL after uploading and S3 response object and lets see implementation and cases,
// INITIALIZE NPMS
var AWS = require('aws-sdk'),
multer = require('multer'),
multerS3 = require('multer-s3'),
path = require('path');
// CONFIGURATION OF S3
AWS.config.update({
secretAccessKey: '***********************************',
accessKeyId: '****************',
region: 'us-east-1'
});
// CREATE OBJECT FOR S3
const S3 = new AWS.S3();
// CREATE MULTER FUNCTION FOR UPLOAD
var upload = multer({
// CREATE MULTER-S3 FUNCTION FOR STORAGE
storage: multerS3({
s3: S3,
acl: 'public-read',
// bucket - WE CAN PASS SUB FOLDER NAME ALSO LIKE 'bucket-name/sub-folder1'
bucket: 'bucket-name',
// META DATA FOR PUTTING FIELD NAME
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
// SET / MODIFY ORIGINAL FILE NAME
key: function (req, file, cb) {
cb(null, file.originalname); //set unique file name if you wise using Date.toISOString()
// EXAMPLE 1
// cb(null, Date.now() + '-' + file.originalname);
// EXAMPLE 2
// cb(null, new Date().toISOString() + '-' + file.originalname);
}
}),
// SET DEFAULT FILE SIZE UPLOAD LIMIT
limits: { fileSize: 1024 * 1024 * 50 }, // 50MB
// FILTER OPTIONS LIKE VALIDATING FILE EXTENSION
fileFilter: function(req, file, cb) {
const filetypes = /jpeg|jpg|png/;
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
const mimetype = filetypes.test(file.mimetype);
if (mimetype && extname) {
return cb(null, true);
} else {
cb("Error: Allow images only of extensions jpeg|jpg|png !");
}
}
});
There are three cases, if we want to retrieve files res object from S3 after upload:
Case 1: When we are using .single(fieldname) method it will return file object in req.file
app.post('/upload', upload.single('file'), function (req, res, next) {
console.log('Uploaded!');
res.send(req.file);
});
Case 2: When we are using .array(fieldname[, maxCount]) method it will return file object in req.files
app.post('/upload', upload.array('file', 1), function (req, res, next) {
console.log('Uploaded!');
res.send(req.files);
});
Case 3: When we are using .fields(fields) method it will return file object in req.files
app.post('/upload', upload.fields([
{ name: 'avatar', maxCount: 1 },
{ name: 'gallery', maxCount: 8 }
]), function (req, res, next) {
console.log('Uploaded!');
res.send(req.files);
});
s3 needs to be an object to be passed. According to the docs, the object needs to be like this:
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'some-bucket',
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname});
},
key: function (req, file, cb) {
cb(null, Date.now().toString())
}
})
})
MulterS3 Docs
/*** Using Multer To Upload Image image is uploading */
const fileStorage = multer.diskStorage({
destination: function(req, file, cb) {
cb(null, "./public/uploads");
},
filename: function(req, file, cb) {
cb(null, file.originalname);
}
});
/** AWS catalog */
aws.config.update({
secretAccessKey: process.env.SECRET_KEY,
accessKeyId: process.env.ACCESS_KEY,
region: "us-east-1"
});
const s3 = new aws.S3();
const awsStorage = multerS3({
s3: s3,
bucket: process.env.BUCKET_NAME,
key: function(req, file, cb) {
console.log(file);
cb(null, file.originalname);
}
});
const upload = multer({
storage: awsStorage(),
/** in above line if you are using local storage in ./public/uploads folder than use
******* storage: fileStorage,
* if you are using aws s3 bucket storage than use
******* storage: awsStorage(),
*/
limits: { fileSize: 5000000 },
fileFilter: function(req, file, cb) {
checkFileType(file, cb);
}
});
app.post("/user-profile-image", upload.single("profile"), (req, res, err) => {
try {
res.send(req.file);
} catch (err) {
res.send(400);
}
});
const checkFileType = (file, cb) => {
const filetypes = /jpeg|jpg|png|gif/;
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
const mimetype = filetypes.test(file.mimetype);
if (mimetype && extname) {
return cb(null, true);
} else {
cb("Error: Images Only!");
}
};
//here is the function for upload the images on aws bucket using multer
const path = require('path');
const fs = require('fs');
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: '**************************',
accessKeyId: '********************',
region: '**********************'
});
s3 = new aws.S3();
const storage = multerS3({
s3: s3,
bucket: 'bucket-name',
key: function(req, file, cb) {
console.log(file);
cb(null, file.originalname);
}
})
//export the created function
exports.uploadVideo = multer({ storage: storage }).single('file_name');
//================================================================================
//import uploadVideo function whenever you need to upload the file on aws s3 bucket
const { uploadVideo } = require('../../services/upload');
exports.videoUpload = (req, res) => {
uploadVideo(req, res, function(err) {
if (err) {
console.log(err);
res.json({ status: 401, msg: err.message });
} else {
const image = getImagePath(req.file.filename);
res.json({ status: 200, msg: 'Image uploaded sucess.', data: image });
}
});
}
//================================================================================
//here is route file
router.post('/video-upload',uploadController.videoUpload);
I was passing S3 to mutler in caps, like
S3: {object}
Changing it to small s3 works for me:-
s3: {object}
WE can Upload IMAGE/ CSV/ EXCEL files to AWS s3 using multer-s3.
Im using .single(fieldname) method for uploading single file.
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const s3 = new aws.S3({
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
region: process.env.REGION,
});
const upload = multer({
storage: multerS3({
s3: s3,
bucket: process.env.AWS_S3_BUCKET,
metadata: function (req, file, cb) {
cb(null, {fieldName: 'Meta_Data'});
},
key: function (req, file, cb) {
cb(null, file.originalname);
},
limits: {
fileSize: 1024 * 1024 * 5 // allowed only 5 MB files
}
})
}).single('file');
exports.uploadfile = async(req,res,next)=>{
try{
upload(req,res, function(err){
if(err){
console.log(err);
}
console.log(req.file.location);
})
})
}catch (err){
res.status(400).json({
status : 'fail',
message : err.message
});
}
}
In Routes file
router.route('/')
.post(imageController.uploadfile);
I am at a loss of what I am doing wrong, here is what I have:
HTML
<html>
<body>
<form method="POST" action="/upload" enctype="multipart/form-data">
<div class="field">
<label for="image">Image Upload</label>
<input type="file" name="image" id="image">
</div>
<input type="submit" class="btn" value="Save">
</form>
</body>
</html>
Port 5000 is my Node.js server's port.
In this example I am using POST to /upload, and it works fine.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: "Hello"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
Now I want to post the file that I am POSTing, which is where the problem arises.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var path = req.files.image.path;
fs.readFile(path, function(err, file_buffer){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: file_buffer
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
});
}
The error I get is:
TypeError: Cannot read property 'path' of undefined
As a matter of fact files is completely empty.
I am assuming I am missing something pretty obvious but I can't seem to find it.
You will need something like multer to handle multipart uploading.
Here is an example streaming your file upload to s3 using aws-sdk.
var multer = require('multer');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.use(multer({ // https://github.com/expressjs/multer
dest: './public/uploads/',
limits : { fileSize:100000 },
rename: function (fieldname, filename) {
return filename.replace(/\W+/g, '-').toLowerCase();
},
onFileUploadData: function (file, data, req, res) {
// file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
var params = {
Bucket: 'makersquest',
Key: file.name,
Body: data
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
}
}));
app.post('/upload', function(req, res){
if(req.files.image !== undefined){ // `image` is the field name from your form
res.redirect("/uploads"); // success
}else{
res.send("error, no file chosen");
}
});
Simple S3 File Upload Without Multer
var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
var AWS = require('aws-sdk');
app.post('/imageUpload', async (req, res) => {
AWS.config.update({
accessKeyId: "ACCESS-KEY", // Access key ID
secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
region: "us-east-1" //Region
})
const s3 = new AWS.S3();
// Binary data base64
const fileContent = Buffer.from(req.files.uploadedFileName.data, 'binary');
// Setting up S3 upload parameters
const params = {
Bucket: 'BUKET-NAME',
Key: "test.jpg", // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
res.send({
"response_code": 200,
"response_message": "Success",
"response_data": data
});
});
})
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
[Update Mar 2022] Supports multiple file uploads at a time, and returns the uploaded file(s)' public URL(s) too.
Latest Answer # Dec-2016 [New]
Use multer-s3 for multipart uploading to s3 without saving on local disk as:
var express = require('express'),
aws = require('aws-sdk'),
bodyParser = require('body-parser'),
multer = require('multer'),
multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1'
});
var app = express(),
s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'bucket-name',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
res.send({
message: "Uploaded!",
urls: req.files.map(function(file) {
return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
})
});
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
Latest Answer # Mar-2016 [Old-One]
Edited 1 use multer#1.1.0 and multer-s3#1.4.1 for following snippet:
var express = require('express'),
bodyParser = require('body-parser'),
multer = require('multer'),
s3 = require('multer-s3');
var app = express();
app.use(bodyParser.json());
var upload = multer({
storage: s3({
dirname: '/',
bucket: 'bucket-name',
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1',
filename: function (req, file, cb) {
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
res.send("Uploaded!");
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
For complete running example clone express_multer_s3 repo and run node app.
You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:
var express = require('express')
var multer = require('multer')
var app = express()
app.use(multer({ dest: './uploads/'}))
Now req.files.image.path should be populated in your app.post function.
One of the easy ways to upload your image is to use an NPM package Multer
You can upload an image to S3 and then store its name in your database so every time you want to fetch it you can generate a signed URL for that image. This is one of the ways to secure access to your S3 bucket.
For uploading an image you can do something like this
const AWS = require("aws-sdk");
const express = require("express");
const multer = require("multer");
const crypto = require("crypto");
const cors = require("cors");
const {
S3Client,
PutObjectCommand
} = require("#aws-sdk/client-s3");
const app = express();
app.use(cors());
app.use(express.json());
const port = process.env.PORT || 3000
const storage = multer.memoryStorage();
const upload = multer({ storage: storage });
// Read the values from .env file
const bucketName = process.env.BUCKET_NAME;
const bucketRegion = process.env.BUCKET_REGION;
const accessId = process.env.ACCESS_ID;
const secretAccessKey = process.env.SECRET_ACCESS_KEY;
// Create a client
const s3 = new S3Client({
credentials: {
accessKeyId: accessId,
secretAccessKey: secretAccessKey,
},
region: bucketRegion,
});
// This function generates unique name for our files
const generateFileName = (bytes = 32) =>
crypto.randomBytes(bytes).toString("hex");
// Notice the upload middleware.
// "image" is the same name that you will pass form your UI request
app.post('/', upload.single("image"), (req, res) => {
# When you use multer the image can be accessed from req.file
let fileName = generateFileName()
let params = {
Bucket: bucketName,
Key: fileName,
Body: req.file.buffer ,
ContentType: req.file.mimetype,
ContentEncoding: 'base64',
};
const command = new PutObjectCommand(params);
await s3.send(command);
// before sending response you can save the 'fileName' in the DB of your choice
res.send('image uploaded')
})
app.listen(port, () => {
console.log(`app listening on port ${port}`)
})
Next, to get the signed URL for the image you can do as follows
// assuming other things are set as above snippet
const { GetObjectCommand } = require("#aws-sdk/client-s3");
const { getSignedUrl } = require("#aws-sdk/s3-request-presigner");
app.get('/', (req, res) => {
// First you will get the image name that was saved in DB
// lets say it was called user_image.
let obj_params = {
Bucket: bucketName,
Key: user_image,
};
let command = new GetObjectCommand(obj_params);
image_url = await getSignedUrl(
s3,
command,
{ expiresIn: 86400 } // seconds in a day
);
let response = {
success: true,
data: {
image_url
},
};
res.status(200).send(response);
})
Note:
Note that you might need to install some packages to make it work.
Make sure in your API requests you are setting 'content-type': 'multipart/form-data' in request headers
In your API gateway in S3, you might also need to set the Binary Media Type as multipart/form-data. More info on that in this link
This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.
AWS Missing credentials when i try send something to my S3 Bucket (Node.js)
This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.
my aconfig.json file with the amazon credentials looks like this:
{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }
The final contents of the route file look like the file pasted below.
router.post('/sendToS3', function(req, res) {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'aconfig.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});
var sampleFile = {
"_id" : 345345,
"fieldname" : "uploads[]",
"originalname" : "IMG_1030.JPG",
"encoding" : "7bit",
"mimetype" : "image/jpeg",
"destination" : "./public/images/uploads",
"filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
"path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
"size" : 251556,
"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'application/octet-stream' // force download if it's accessed as a top location
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
});
This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.
Hope this helps