I am working a on nodejs restful api and trying to upload file on S3 using multer but its not working also i am not getting any error .
here is a code from my Controller
var aws = require('aws-sdk')
var express = require('express')
var multer = require('multer')
var multerS3 = require('multer-s3')
var bodyParser = require('body-parser')
var uuid = require('uuid').v4;
aws.config.update({
secretAccessKey: '',
accessKeyId: '' ,
region: 'us-west-2'
});
var app = express();
var s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'stack',
key: function (req, file, cb) {
console.log(file);
cb(null, req.s3key)
}
})
})
var fileUpload = upload.array('attachments',1);
function uploadToS3(req, res){
req.s3key = uuid();
let downloadUrl = 'https://s3-us-west-2.amazonaws.com/stack/'+req.s3key;
return new Promise((resolve,reject) => {
return fileUpload(req, res, err=> {
if(err) return reject(err);
return resolve(downloadUrl)
})
})
}
exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
});
}
What am i missing here??
Can you try catch block for error log if there is any missing then it will display in log.
`exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
}).catch(error=>{
console.log(error)
});`
}
When I try to upload a video file into an S3 bucket it takes so much time to upload that file. But the video file is uploaded that is not an issue. But the uploading time is pretty high however my video file size is just 30MB. So this is not a cool way to upload video files from my application. I have to upload some large video files into S3 bucket that could take around 200MB per video. So I need a better way to upload these video files into an S3 bucket.
Here is my code snippet,
const express = require('express');
const app = express();
const AWS = require('aws-sdk');
const fs = require('fs');
const fileType = require('file-type');
const bluebird = require('bluebird');
const multiparty = require('multiparty');
const cors = require('cors');
app.use(cors());
app.options('*', cors());
// configure the keys for accessing AWS
AWS.config.update({
accessKeyId: "xxx",
secretAccessKey: "yyy"
});
// configure AWS to work with promises
AWS.config.setPromisesDependency(bluebird);
// create S3 instance
const s3 = new AWS.S3();
// abstracts function to upload a file returning a promise
const uploadFile = (buffer, name, type) => {
const params = {
ACL: 'public-read',
Body: buffer,
Bucket: "bucket-name",
ContentType: type.mime,
Key: `${name}.${type}`
};
return s3.upload(params).promise();
};
// Define POST route
app.post('/test-upload', (request, response) => {
const form = new multiparty.Form();
response.header("Access-Control-Allow-Origin", "*");
form.parse(request, async (error, fields, files) => {
if (error) throw new Error(error);
try {
const path = files.file[0].path;
const buffer = fs.readFileSync(path);
const type = fileType(buffer);
const timestamp = Date.now().toString();
const fileName = `test/${timestamp}-lg`;
const data = await uploadile(buffer, fileName, type);
console.log("Data", data);
return response.status(200).send(data) +" " + data;
} catch (error) {
return response.status(400).send(error);
}
});
});
app.get('/', function(req, res){
res.send('Hello!')
})
var listener = app.listen(9000);
console.log('Server up and running...' + listener.address().address);
Thanks in advance.
I have been able to set up a process to upload a single image at a time using NodeJS/Express/Amazon S3/ Multer. It works perfectly. I've been trying to change the code to allow users to upload more than one image at a time. So far I have been very unsuccessful. How would I change my code below to allow multiple images to be uploaded at once? Thanks!
aws.config.update({
secretAccessKey: '*****************',
accessKeyId: '******',
region: 'us-east-2'
});
var s3 = new aws.S3();
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'myfiles',
key: function (req, file, cb) {
var fileExtension = file.originalname.split(".")[1];
var path = "uploads/" + req.user._id + Date.now() + "." + fileExtension;
cb(null, path);
}
})
});
router.post("/", upload.array('image', 1), function(req, res, next){
var filepath = undefined;
if(req.files[0]) {
filepath = req.files[0].key;
}......
you have done the hard part, all what u have to do is to modify your html file input to make it accept multiple files like so
<input type="file" name="img" multiple>
and change the number of files in the array to the maximum number of files you wan to upload
from
upload.array('image', 1)
to
upload.array('image', x)
where (x) is the maximum number of files per upload
EDIT1 : update
Here is kind of full example & to avoid "too large entity issue"
var express = require("express");
var app = express();
var multer = require('multer');
var cookieParser = require('cookie-parser');
var path = require('path');
var router = express.Router();
app.use("/", router);
app.use(bodyParser.json({limit: "50mb"}));
app.use(cookieParser());
var urlencodedParser = bodyParser.urlencoded({
extended: true,
parameterLimit: 50000
});
// in case u want to c the requsted url
router.use(function(req, res, next) {
console.log('Request URL: ', req.originalUrl);
next();
});
//the files will b uploaded to folder name uploads, html file input name is uploadedFile
app.post('/your/route', urlencodedParser, function(req, res) {
var storage = multer.diskStorage({
destination: function(req, file, callback) {
callback(null, './uploads');
},
filename: function(req, file, callback) {
var fname = file.fieldname + '-' + Date.now() + path.extname(file.originalname);
callback(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname));
}
});
var upload_photos = multer({
storage: storage
}).array('uploadedFile', 3);
upload_photos(req, res, function(err) {
// uploading files
});
});
https://www.npmjs.com/package/multer-s3
You can use this npm package for aws-s3 upload. I have used the same and saving files without any issue.
I am trying to clean up my controllers by modularizing my multer module functions, which allow me to upload multiple files with my forms. The current code presented below works, but I want put the multer function in a new file. I assumed that simply taking the function and exporting it from another file would work, but for some reason I'm hitting a time out during my form POST, despite calling the console.log at the start of the multer code. It appears that something within that code is stopping the process, but no error message is thrown. Does anyone see what might be causing the error?
Multer called within route file:
var aws = require('aws-sdk');
var multer = require('multer');
var multerS3 = require('multer-s3');
var moment = require('moment');
var uploadDate = new moment().format("YYYY-MM-DD");
var s3 = new aws.S3();
var options = {
Bucket: process.env.AWS_BUCKET,
Expires: 60
};
if(app.get('env') === 'production' || app.get('env') === 'staging'){
options.ACL = 'private'
} else {
options.ACL = 'public-read'
};
//////////Multer code being called
var upload = multer({
storage: multerS3({
s3: s3,
bucket: options.Bucket,
contentType: multerS3.AUTO_CONTENT_TYPE,
acl: options.ACL,
key: function(req, file, cb){
var fileNameFormatted = file.originalname.replace(/\s+/g, '-').toLowerCase();
cb(null, req.session.organizationId + '/' + uploadDate + '/' + fileNameFormatted);
}
}),
fileFilter: function(req, file, cb){
if(!file.originalname.match(/\.(jpg|jpeg|png|gif|csv|xls|xlsb|xlsm|xlsx)$/)){
return cb('One of your selected files is not supported', false);
}
cb(null, true);
}
}).array('fileUpload', 5);
///////Post route calling multer and rest of form submission
.post(function(req, res){
upload(req, res, function(){
if(err){
console.log('Multer upload error');
req.flash('error', err);
res.redirect(req.get('referer'));
return;
}
... MORE CODE ...
})
});
/controllers/multer.js, which contains the multer code:
var express = require('express');
var app = express();
var aws = require('aws-sdk');
var multer = require('multer');
var multerS3 = require('multer-s3');
var moment = require('moment');
var s3 = new aws.S3();
var uploadDate = new moment().format("YYYY-MM-DD");
var options = {
Bucket: process.env.AWS_BUCKET,
Expires: 60,
//ContentType: req.query.file_type
};
if(app.get('env') === 'production' || app.get('env') === 'staging'){
options.ACL = 'private'
} else {
options.ACL = 'public-read'
};
module.exports = {
//Annotation file upload
annotationFileUpload: function(){
multer({
storage: multerS3({
s3: s3,
bucket: options.Bucket,
contentType: multerS3.AUTO_CONTENT_TYPE,
acl: options.ACL,
key: function(req, file, cb){
console.log("key called");
var fileNameFormatted = file.originalname.replace(/\s+/g, '-').toLowerCase();
cb(null, req.session.organizationId + '/' + uploadDate + '/' + fileNameFormatted);
}
}),
fileFilter: function(req, file, cb){
console.log("file filter called");
if(!file.originalname.match(/\.(jpg|jpeg|png|gif|csv|xls|xlsb|xlsm|xlsx)$/)){
return cb('One of your selected files is not supported', false);
}
cb(null, true);
}
}).array('fileUpload', 5);
}
}
Importing /controller/multer and calling route:
var multerFile = require('./components/multer');
.post(function(req, res){
multerFile.annotationFileUpload(req, res, function(err){
if(err){
console.log('Multer upload error');
req.flash('error', err);
res.redirect(req.get('referer'));
return;
}
....MORE CODE....
})
});
You need to get rid of the wrapper function, since multer() returns the middleware function itself. The reason it times out is because you're calling a function that just creates the middleware and immediately discards it and then does nothing else (like responding to the request).
So change this:
annotationFileUpload: function(){
multer({
// ...
}).array('fileUpload', 5);
}
to this:
annotationFileUpload: multer({
// ...
}).array('fileUpload', 5)
I am at a loss of what I am doing wrong, here is what I have:
HTML
<html>
<body>
<form method="POST" action="/upload" enctype="multipart/form-data">
<div class="field">
<label for="image">Image Upload</label>
<input type="file" name="image" id="image">
</div>
<input type="submit" class="btn" value="Save">
</form>
</body>
</html>
Port 5000 is my Node.js server's port.
In this example I am using POST to /upload, and it works fine.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: "Hello"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
Now I want to post the file that I am POSTing, which is where the problem arises.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var path = req.files.image.path;
fs.readFile(path, function(err, file_buffer){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: file_buffer
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
});
}
The error I get is:
TypeError: Cannot read property 'path' of undefined
As a matter of fact files is completely empty.
I am assuming I am missing something pretty obvious but I can't seem to find it.
You will need something like multer to handle multipart uploading.
Here is an example streaming your file upload to s3 using aws-sdk.
var multer = require('multer');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.use(multer({ // https://github.com/expressjs/multer
dest: './public/uploads/',
limits : { fileSize:100000 },
rename: function (fieldname, filename) {
return filename.replace(/\W+/g, '-').toLowerCase();
},
onFileUploadData: function (file, data, req, res) {
// file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
var params = {
Bucket: 'makersquest',
Key: file.name,
Body: data
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
}
}));
app.post('/upload', function(req, res){
if(req.files.image !== undefined){ // `image` is the field name from your form
res.redirect("/uploads"); // success
}else{
res.send("error, no file chosen");
}
});
Simple S3 File Upload Without Multer
var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
var AWS = require('aws-sdk');
app.post('/imageUpload', async (req, res) => {
AWS.config.update({
accessKeyId: "ACCESS-KEY", // Access key ID
secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
region: "us-east-1" //Region
})
const s3 = new AWS.S3();
// Binary data base64
const fileContent = Buffer.from(req.files.uploadedFileName.data, 'binary');
// Setting up S3 upload parameters
const params = {
Bucket: 'BUKET-NAME',
Key: "test.jpg", // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
res.send({
"response_code": 200,
"response_message": "Success",
"response_data": data
});
});
})
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
[Update Mar 2022] Supports multiple file uploads at a time, and returns the uploaded file(s)' public URL(s) too.
Latest Answer # Dec-2016 [New]
Use multer-s3 for multipart uploading to s3 without saving on local disk as:
var express = require('express'),
aws = require('aws-sdk'),
bodyParser = require('body-parser'),
multer = require('multer'),
multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1'
});
var app = express(),
s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'bucket-name',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
res.send({
message: "Uploaded!",
urls: req.files.map(function(file) {
return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
})
});
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
Latest Answer # Mar-2016 [Old-One]
Edited 1 use multer#1.1.0 and multer-s3#1.4.1 for following snippet:
var express = require('express'),
bodyParser = require('body-parser'),
multer = require('multer'),
s3 = require('multer-s3');
var app = express();
app.use(bodyParser.json());
var upload = multer({
storage: s3({
dirname: '/',
bucket: 'bucket-name',
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1',
filename: function (req, file, cb) {
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
res.send("Uploaded!");
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
For complete running example clone express_multer_s3 repo and run node app.
You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:
var express = require('express')
var multer = require('multer')
var app = express()
app.use(multer({ dest: './uploads/'}))
Now req.files.image.path should be populated in your app.post function.
One of the easy ways to upload your image is to use an NPM package Multer
You can upload an image to S3 and then store its name in your database so every time you want to fetch it you can generate a signed URL for that image. This is one of the ways to secure access to your S3 bucket.
For uploading an image you can do something like this
const AWS = require("aws-sdk");
const express = require("express");
const multer = require("multer");
const crypto = require("crypto");
const cors = require("cors");
const {
S3Client,
PutObjectCommand
} = require("#aws-sdk/client-s3");
const app = express();
app.use(cors());
app.use(express.json());
const port = process.env.PORT || 3000
const storage = multer.memoryStorage();
const upload = multer({ storage: storage });
// Read the values from .env file
const bucketName = process.env.BUCKET_NAME;
const bucketRegion = process.env.BUCKET_REGION;
const accessId = process.env.ACCESS_ID;
const secretAccessKey = process.env.SECRET_ACCESS_KEY;
// Create a client
const s3 = new S3Client({
credentials: {
accessKeyId: accessId,
secretAccessKey: secretAccessKey,
},
region: bucketRegion,
});
// This function generates unique name for our files
const generateFileName = (bytes = 32) =>
crypto.randomBytes(bytes).toString("hex");
// Notice the upload middleware.
// "image" is the same name that you will pass form your UI request
app.post('/', upload.single("image"), (req, res) => {
# When you use multer the image can be accessed from req.file
let fileName = generateFileName()
let params = {
Bucket: bucketName,
Key: fileName,
Body: req.file.buffer ,
ContentType: req.file.mimetype,
ContentEncoding: 'base64',
};
const command = new PutObjectCommand(params);
await s3.send(command);
// before sending response you can save the 'fileName' in the DB of your choice
res.send('image uploaded')
})
app.listen(port, () => {
console.log(`app listening on port ${port}`)
})
Next, to get the signed URL for the image you can do as follows
// assuming other things are set as above snippet
const { GetObjectCommand } = require("#aws-sdk/client-s3");
const { getSignedUrl } = require("#aws-sdk/s3-request-presigner");
app.get('/', (req, res) => {
// First you will get the image name that was saved in DB
// lets say it was called user_image.
let obj_params = {
Bucket: bucketName,
Key: user_image,
};
let command = new GetObjectCommand(obj_params);
image_url = await getSignedUrl(
s3,
command,
{ expiresIn: 86400 } // seconds in a day
);
let response = {
success: true,
data: {
image_url
},
};
res.status(200).send(response);
})
Note:
Note that you might need to install some packages to make it work.
Make sure in your API requests you are setting 'content-type': 'multipart/form-data' in request headers
In your API gateway in S3, you might also need to set the Binary Media Type as multipart/form-data. More info on that in this link
This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.
AWS Missing credentials when i try send something to my S3 Bucket (Node.js)
This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.
my aconfig.json file with the amazon credentials looks like this:
{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }
The final contents of the route file look like the file pasted below.
router.post('/sendToS3', function(req, res) {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'aconfig.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});
var sampleFile = {
"_id" : 345345,
"fieldname" : "uploads[]",
"originalname" : "IMG_1030.JPG",
"encoding" : "7bit",
"mimetype" : "image/jpeg",
"destination" : "./public/images/uploads",
"filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
"path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
"size" : 251556,
"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'application/octet-stream' // force download if it's accessed as a top location
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
});
This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.
Hope this helps