I am trying to upload images to amazon s3. However req.files is still undefined. I don't understand where is my code going wrong. The images are sent from Angular frontend. 'artFileLocations' is array of files in FormData. How do I fix this?
const router = require('express').Router()
const AWS = require('aws-sdk')
const multer = require('multer');
const multerS3 = require('multer-s3');
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
var s3 = new AWS.S3();
// Unique name of aws s3 bucket created
const myBucket = 'nish.images';
// Multer upload (Use multer-s3 to save directly to AWS instead of locally)
var upload = multer({
storage: multerS3({
s3: s3,
bucket: myBucket,
// Set public read permissions
acl: 'public-read',
// Set key/ filename as original uploaded name
key: function (req, file, cb) {
cb(null, file.originalname)
}
})
})
router.post('/', upload.array('artFileLocations', 3), function(req, res) {
console.log('Reaching addPainting')
console.log(req.body, req.files)
});
module.exports = router;
Your code looks fine.
but can you update your HTML file like this:
<input type="file" name="artFileLocations" multiple>
If you already update your HTML file, then I think you don't face any other error.
Try uploading single file and check whether your req.files is working or not. I also encountered this error where the multiple file upload was not working. So, check if the req.files is working for single file upload.
Related
Im using multer-s3 to upload a PDF to S3 bucket, and it works, but now I need to display the PDF without downloading it (just like this) and I already tryied to use iframe, embed and object, each one with the src='link_to_s3_file' and when I access the page it just download the file.
That's my code to configure the upload:
const multer = require('multer')
const multerS3 = require('multer-s3')
const AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION
})
const s3 = new AWS.S3()
const upload = multer({
storage: multerS3({
s3: s3,
bucket: process.env.AWS_S3_BUCKET,
acl: 'private',
contentDisposition: 'attachment',
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
cb(null, file.originalname)
}
})
})
How can I display the PDF instead of download it?
In order to open PDF directly, without downloading it, you should change 2 things:
ContentDisposition='inline'
ContentType='application/pdf'
If you're sure that multerS3.AUTO_CONTENT_TYPE will recognize it correctly, you might not have to change this, but ContentDisposition is for sure one thing you have to change. attachment means that it will download it and inline is to open it in browser directly.
I am using the node.js as a backend and needs to upload the video on Amazon S3.
For that I am using the multer module but I need to know the efficient and standard way for uploading the video.
Generally we see that when we upload anything on the any good platfrom then there is a proper mechanism for uploading the video like:
When video is on uploading state, user get the response that how much percent is left for uploading the video
After uploading the video user get the response that video is uploaded successfully.
There is a handler which allow specific type of format to allow video.
There is a few limit size also which warn the user that maximum size is 20mb or 50 mb.
I am bit struggling about the good tutorial but unable to find as everywhere is sharing the tutorials about the image upload. So I thought I will raise the question which will help many others also regarding the same
I have implemented the process by which video will be uploaded on S3 but not getting the response after upload. It directly return the response and video will be uploaded in background.
I am sharing my implementation:
customapi.js file
const express = require('express');
const router = express.Router();
const helper = require('./file-upload');
const videoHandler = require('./videohandler');
// Post the video
router.post(
'/uploadvideo',
helper.single('media'),
videoHandler .uploadVideo
);
file-upload.js file
const AWS = require('aws-sdk')
const multer = require('multer')
const multerS3 = require('multer-s3')
const uuid = require('uuid/v1');
AWS.config.update({
accessKeyId: process.env.keyId,
secretAccessKey: process.env.accessKey,
});
const s3 = new AWS.S3();
const upload = multer({
storage: multerS3({
s3:s3,
bucket: process.env.bucketname,
acl: 'public-read',
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname})
},
key: function (req, file, cb) {
console.log(file) // This will print the filename which we can search in s3.
cb(null, uuid()+file.originalname)
console.log(uuid() + file.originalname)
}
})
})
videohandler.js file
module.exports = {
uploadVid: async (req, res) => {
try {
return res.send({message: "Done"})
} catch (error) {
console.log(error);
return res.send({message: "Error"})
}
}
}
I know what is happening exactly by which I am getting the response instantly instead of after the file upload.
When api calls -> helper.single('media') will invoke and process to start performing the task -> meanwhile videoHandler.uploadVideo will also called which directly send the response as there is no callback which understand the uploading process and return response accordingly.
Please share the best and efficient way as it supports many people also who is struggling for uploading the video.
Any help or suggestion is really appreciated.
Update Question
Many people sharing the way that video first upload on local disk and then it upload on S3. I need to know that is it a good behaviour. I mean for performing the task we need to do the double work instead of directly upload. It will consume the bandwidth and storage of the application on server.
I could handle it in this way. check whether is it applicable to your scenario.
App.js
router.post("/upload_service",
(req, res, next) => {
const upload = UploadController.upload.single('file')
upload(req, res, (err) => {
if (err) {
const error = new Error('Image upload error');
return next(error);
}
return next()
})
},
SomeController.createMethod)
UploadController.js
const multer = require('multer')
const multerS3 = require('multer-s3')
const AWS = require('aws-sdk')
const upload = multer({
storage: multerS3({
s3: new AWS.S3(),
bucket: 'bucket_name',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
const file_name_timestamp = Date.now().toString()
cb(null, `path/${file_name_timestamp}.${String(file.mimetype).split('/').pop()}`);
},
ContentType: "application/octet-stream",
})
})
module.exports = {
upload
}
SomeController.js
const createMethod = async function (req, res) {
if (!req.file) {
res.send('File missing')
}
}
Once file upload middleware was success, SomeController.createMethod will be triggered. using req.file, creation data can be visible.
I have application on Node. API endpoint for sending files to AWS contains this part
const multer = require('multer');
const aws = require('aws-sdk');
const multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: require("../config/keys").aws.key,
accessKeyId: require("../config/keys").aws.id,
region: 'eu-central-1'
});
const s3 = new aws.S3();
const storage = multerS3({
s3: s3,
bucket: require("../config/keys").aws.bucket,
key: function (req, file, cb) {
cb(null, Date.now().toString());
}
});
const upload = multer({ storage: storage });
module.exports = upload;
I call it in route this way
const upload = require('../../middleware/multer-aws');
router.post("/add", upload.any('file'), (req, res) => {
...
When I test loading files on localhost it works well, but when I do the same on EC2 instance I have 403 Forbidden response.
In my code I don't have 403 codes, so it definetely comes from AWS.
I can't get where is the problem and how to solve it. Any ideas?
Looks like some NACL issue outbound HTTP access might be blocked from the EC2 instance, if AWS access keys and secret keys are perfectly working in local instance and uploading files to S3, it should have worked from EC2, please check NACL
Issue solved. I checked pm2 logs, found message about InvalidAccessKeyId. Checked env variable and found, that I messed key and id on prod.
I'm facing issues for uploading local images to my google cloud storage.
I've already tried two methods. The first one is uploading with multer
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, './uploads/')
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now())
}
});
var upload = multer({storage: storage}).single('image');
app.post('/upload',function(req,res,next){
upload(req,res,(err) => {
if(err){
console.log(err)
}else{
console.log(req.file)
}
})
})
Then, i've tried directly with GCS
var bucket = admin.storage().bucket('mybucket')
app.post('/upload',function(req,res,next){
bucket
.save(file)
.then(() => {
})
for both of these solutions , req.files is always undefined whereas req.body is a buffer like this :
<Buffer 2d 2d 2d 2d ...>
when i try to save this buffer on my GCS bucket, i the .jpg/png file is created in my bucket but it is corrupted.
I'm browsing the web seeking for a solution but i found nothing that helped me to overcome this situation.
Any advice ?
You need multer, multer-google-storage and ofcourse bodyParser if you have additional form values. You need to sent data in multipart/form-data
In your .env file
GCS_BUCKET = <bucket name>
GCLOUD_PROJECT = <project id>
GCS_KEYFILE = <key file location>
You can download key file from GCP Console>Your Project>I AM & Admin>Service Accounts
In your route
const multer = require('multer');
const multerGoogleStorage = require("multer-google-storage");
var uploadHandler = multer({
storage: multerGoogleStorage.storageEngine()
});
router.post('/', uploadHandler.single('image'), function (req, res, next) {
const body = req.body;
res.json({fileName: req.file.filename});
res.end();
}
This will store file on to GCS with name [random-string-generated-by-gcs]_[YOUR FILE NAME WITH EXTENTION]. The same can be access under the route via req.file.filename.
Documentation
Make sure you have added enctype="multipart/form-data" attribute to your form. A probable reason for req.files being undefined.
I have managed to upload pdf files on digital ocean spaces with a node js app as shown below.
I don't know how to then access those files and display them to the user. I got the code below from this tutorial, object storage file upload, but there isn't an example on how to then access the files.
When I just try to access them with their url, I just get white space.
I have made them public but still get absolutely nothing trying to access them from the url.
Is there a way to access the files using multer still, do I have to make a get request with the RESTFul API? How do I access files stored in a digital ocean spaces?
This is how I upload files
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
const spaceEndPoint = new aws.Endpoint("ams3.digitaloceanspaces.com");
const s3 = new aws.S3({
endpoint:spaceEndPoint
})
const upload = multer({
storage:multerS3({
s3:s3,
bucket: "fileRepo",
acl:"public-read",
key:function(request, file, cb){
console.log(file);
cb(null, file.originalname);
}
})
}).array("upload",1);
router.get("uploadFile", function(req,res){
upload(req, res, function(error){
if(error){
console.log(error);
return res.redirect("/");
}
});
})
And this is how I try to retrive the pdfs
router.get("/contentPage", function(req, res){
var fileName = req.body.department;
var directory = "https://fileRepo.ams3.digitaloceanspaces.com/" + fileName + ".pdf";
res.render("fileview", {dir: directory});
})
<div id="departmentListWrapper" class="container">
<embed src="<%= dir %>" width="800px" height="2100px" />
</div>
Can anyone tell me what the issue might be with retrieving these files?
Try this code snippet, works fine for me
import multer from 'multer';
import multerS3 from 'multer-s3';
aws.config.update({
accessKeyId: 'your access key',
secretAccessKey: 'your secret key'
});
var filepath = "path of files folder"
// Create an S3 client setting the Endpoint to DigitalOcean Spaces
var spacesEndpoint = new aws.Endpoint('nyc3.digitaloceanspaces.com');
var s3 = new aws.S3({endpoint: spacesEndpoint});
var params = {
Bucket: bucketName,
Key: keyName,
Body: fs.createReadStream(filepath),
ACL: 'public-read'
};
s3.putObject(params, async function(err, data) {
if (err) {
console.log(err);
} else {
console.log(data);
}
});
It turns out that digital ocean did not know what type of file was being uploaded, I had to make it so that the file type is read before it is uploaded so that the browser knew what type of file I was trying to upload by adding the line
contentType: multerS3.AUTO_CONTENT_TYPE
to the multer object so that it looks like this.
const upload = multer({
storage:multerS3({
s3:s3,
bucket: "fileRepo",
contentType: multerS3.AUTO_CONTENT_TYPE
acl:"public-read",
key:function(request, file, cb){
console.log(file);
cb(null, file.originalname);
}
})
}).array("upload",1);