How to disable restify.plugin.bodyParser for file uploads - node.js

This question is very similar to How to disable Express BodyParser for file uploads (Node.js). But the answer they have provided is for Express and I have tried the solution with the Restify 7, and it does not seem to work.
I'm using Node.js + Restify to build Restfull application. I am using BodyParser,to parse post parameters. However, I would like to have access to multipart form-data POSTS.
I use multer, multer-s3 and aws-sdk and I want to have access to the data to send them to digitalocean sapce. But all file uploads are parsed automatically by restify.plugin.bodyParser.
Is there a way for me to disable the BodyParser for multipart formdata posts without disabling it for everything else?
This is an example code :
const restify = require('restify');
const errors = require('restify-errors');
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const server = restify.createServer();
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.bodyParser());
// Set S3 endpoint to DigitalOcean Spaces
const spacesEndpoint = new aws.Endpoint('nyc3.digitaloceanspaces.com');
const s3 = new aws.S3({
endpoint: spacesEndpoint
});
const upload = multer({
storage: multerS3({
s3: s3,
bucket: 'space_name',
acl: 'public-read',
key: function (request, file, cb) {
console.log(file);
cb(null,new Date().toISOString());
}
})
}).single('logo');
server.post('/upload', async (req, res, next) => {
upload(req, res, async (error) => {
if (error) {
console.log(error);
return next(new errors.InvalidContentError(error));
}
console.log('File uploaded successfully.');
res.send(200);
next();
});
});

I just changed server.use(restify.plugins.bodyParser()); by server.use(restify.plugins.jsonBodyParser()); and everything is working fine

Related

Why am I getting this error? CredentialsError: Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1

I have built an app in Nodejs which uploads file to the DigitalOcean Spaces. I am using multer for the same. When I run it locally on my device, it successfully uploads on DigitalOcean but when I try to upload it after the deploying the app on DigitalOcean, it gives me 404 not found and the above error.
I have also uploaded the .aws folder which contains the access key and the secret key and also config file.
What do I do?
const aws = require('aws-sdk');
const express = require('express');
const multer = require('multer');
const multerS3 = require('multer-s3');
const app = express();
const spacesEndpoint = new aws.Endpoint('nyc3.digitaloceanspaces.com');
const s3 = new aws.S3({
endpoint: spacesEndpoint,
});
const upload = multer({
storage: multerS3({
s3: s3,
bucket: 'imagebr',
acl: 'public-read',
key: function (request, files, cb) {
cb(null, files.originalname);
}
})
}).single("image");
app.use(express.static('public'));
app.post('/uploads',upload,(req, res) => {
res.send('uploaded');
})
app.listen(3001, function () {
console.log('Server listening on port 3001.');
});

MulterS3 - Control flow to check req object

I'm trying to use Multer-S3, and it works like a charm to upload images.
My challenge is that before running the Multer-S3 middleware, I want to have some kind of control flow, where it's possible to check if the req.body contains numbers for some fields, if the file is of a specific type etc before the Multer-S3 middleware runs.
As I'm using form-data, I need to use multer or similar to get access to the req.body and the req.file, but if I use another middleware with multer, it no longer works.
Anyone got ideas how to do this?
Below sample code to try to explain:
const express = require('express'); //"^4.13.4"
const aws = require('aws-sdk'); //"^2.2.41"
const bodyParser = require('body-parser');
const multer = require('multer'); // "^1.3.0"
const multerS3 = require('multer-s3'); //"^2.7.0"
aws.config.update({
secretAccessKey: 'YOUR_ACCESS_SECRET_KEY',
accessKeyId: 'YOUR_ACCESS_KEY_ID',
region: 'us-east-1'
});
const app = express();
const s3 = new aws.S3();
app.use(bodyParser.json());
const upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'YOUR_BUCKET_NAME',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//This is where I would like to add some controll flow to check the req.body and req.file
// before the middleware upload is used.
//When using another multer middleware before to get access to the req body and file,
//it seems I can't use the upload middleware to upload..
app.post('/upload', upload.single("file"), (req, res, next) => {
res.send("Uploaded!");
});
app.listen(3000, () => {
console.log('Example app listening on port 3000!');
});
Anyone able to help?
Best regards,
Oscar
This works for me:
let formData = new FormData()
formData.append('param', this.data)
formData.append('dataForFile', this.$refs.file.files[0])
this.$axios.post(`/file/upload`, formData)
.then(data => {
...
})
It's important to remember that you place the file data last in the formData list.

Multer returns req.file as undefined, and req.file.location as location undefined while uploading file to aws bucket

I am trying to upload images to s3 Bucket. And have tried many solutions online yet I get the above errors. I don't want to store images locally, instead I want to upload them directly to s3 bucket. Any help would be appreciated.
This is Upload.js file
const AWS = require('aws-sdk');
const Keys = require('../Config/dev');
const { v4: uuidv4 } = require('uuid');
const axios = require('axios').default;
const multer = require('multer');
const multerS3 = require('multer-s3');
const s3 = new AWS.S3({
accessKeyId: Keys.accessKeyId,
secretAccessKey: Keys.secretAccessKey,
region : 'ap-south-1'
});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'thebucketname',
acl : "public-read",
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname});
},
key: function (req, file , cb){
cb(new Date().toISOString().replace(/[-T:\.Z]/g, "") + file.originalname);
}
})
});
module.exports = upload;
This is the router code
const express = require('express');
const Router = express.Router();
const controllers = require('../controllers/controllers.js');
const uploader = require('../controllers/Upload');
const singleUpload = uploader.single('img');
Router.post('/single-image',(req, res)=>{
singleUpload(req, res , (err)=>{
if(!req.file){
console.log(req.file);
}else
{
console.log(req.file);
return res.json({'imageUrl': req.file.location});
}
});
});
This is how I am using postman for api request. I have also set Content-Type to Multipart/form-data inside the Headers in postman. I get the error "undefined" for req.file when I do this.
Also, If I use
app.use(multer({dest:'./public/uploads/'}).single('file'));
my file gets stored in the 'uploads' folder but then I get the error "req.file.location undefined", and file doesn't upload to aws.
Firstly, if you want to upload files to s3 and not store it on your server, you can store the uploaded file as an in-memory buffer instead of writing it on your server and then uploading to s3. NOTE: this in-memory method is not recommended with large files or a large number of small files, because you need to ensure that your server has enough memory to deal with the uploads.
Then you can just pass the buffer to the s3 upload function. I don't know much about some package called multer-s3 that you've apparantly used so I'm not using that. I had made it for an array of files but it should work for single files also. I combined your code with some of my code and came up with the following:
//aws-sdk for node
const AWS = require('aws-sdk');
AWS.config.update({ region: <your region here> });
//S3
const S3 = new AWS.S3({});
const express = require('express');
const Router = express.Router();
const controllers = require('../controllers/controllers.js');
const uploader = require('../controllers/Upload');
//import multer
const multer = require("multer");
//make multer ready for in-memory storage of uploaded file
const multerMemoryStorage = multer.memoryStorage();
const multerUploadInMemory = multer({
storage: multerMemoryStorage
});
//using multer.single as a middleware is what I prefer
Router.post('/single-image',multerUploadInMemory.single("filename"),async(req, res)=>{
try{
if(!req.file || !req.file.buffer){
throw new Error("File or buffer not found");
}
const uploadResult = await S3.upload({
Bucket: "yourBucketName",
Key: "WhateverKeynameYouWantToGive",
Body: req.file.buffer,
ACL: 'public-read'
}).promise();
console.log(`Upload Successful!`);
res.send({
message: "file uploaded"
})
}catch(e){
console.error(`ERROR: ${e.message}`);
res.status(500).send({
message: e.message
})
}
});
You can first use console.log(req.file) to see if it's not undefined (which it shouldn't be) and you can check if you're getting the buffer property in the file.
Also, it says in a "warning" here that you should never add multer as a global middleware, so app.use(multer({dest:'./public/uploads/'}) is a no-no.

S3 bucket: TypeError: Cannot read property 'transfer-encoding' of undefined

I am using first-time AWS' S3 bucket. I used node, express server, multer, and multerS3. For testing I used postman. I wanted to upload image to my s3 bucket. I have created the bucket also add my credentials to my backend. But when I am trying to upload an image by using postman, (this is how I did post request). I got error "TypeError: Cannot read property 'transfer-encoding' of undefined".
This is my s3 setup
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
aws.config.update({
secretAccessKey: "AKIAJWFJ6GS2*******",
accessKeyId: "W/2129vK2eLcwv67J******",
region: "us-east-1"
});
const s3 = new aws.S3();
const upload = multer({
storage: multerS3({
s3: s3,
bucket: "testing-alak",
metadata: function(req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function(req, file, cb) {
cb(null, Date.now().toString());
}
})
});
module.exports = upload;
This is upload file setup
const express = require("express");
const router = express.Router();
const upload = require("./upload-file");
const singleUpload = upload.single("image");
router.post("/", (req, res) => {
singleUpload((req, res, next) => {
return res.json({
imgUrl: req.file.location
});
});
});
module.exports = router;
This is my express server
const express = require("express");
const app = express();
const route = require("./route");
const bodyParser = require("body-parser");
app.use(express.json()); //body Parser
app.use(bodyParser.urlencoded({ extended: true }));
app.use("/img", route);
const port = process.env.PORT || 5000;
app.listen(port, () => console.log(`🚀 App is listening at port ${port}!`));
If singleUpload is multer middleware, I've always used it like this:
router.post("/", singleUpload, (req, res) => {
return res.json({
imgUrl: req.file.location // this should be path?
});
});
Also, I don't think there is a location property. Maybe path is what you are looking for?
fieldname Field name specified in the form
originalname Name of the file on the user's computer
encoding Encoding type of the file
mimetype Mime type of the file
size Size of the file in bytes
destination The folder to which the file has been saved DiskStorage
filename The name of the file within the destination DiskStorage
path The full path to the uploaded file DiskStorage
buffer A Buffer of the entire file MemoryStorage

S3 uploading is taking too much time

When I try to upload a video file into an S3 bucket it takes so much time to upload that file. But the video file is uploaded that is not an issue. But the uploading time is pretty high however my video file size is just 30MB. So this is not a cool way to upload video files from my application. I have to upload some large video files into S3 bucket that could take around 200MB per video. So I need a better way to upload these video files into an S3 bucket.
Here is my code snippet,
const express = require('express');
const app = express();
const AWS = require('aws-sdk');
const fs = require('fs');
const fileType = require('file-type');
const bluebird = require('bluebird');
const multiparty = require('multiparty');
const cors = require('cors');
app.use(cors());
app.options('*', cors());
// configure the keys for accessing AWS
AWS.config.update({
accessKeyId: "xxx",
secretAccessKey: "yyy"
});
// configure AWS to work with promises
AWS.config.setPromisesDependency(bluebird);
// create S3 instance
const s3 = new AWS.S3();
// abstracts function to upload a file returning a promise
const uploadFile = (buffer, name, type) => {
const params = {
ACL: 'public-read',
Body: buffer,
Bucket: "bucket-name",
ContentType: type.mime,
Key: `${name}.${type}`
};
return s3.upload(params).promise();
};
// Define POST route
app.post('/test-upload', (request, response) => {
const form = new multiparty.Form();
response.header("Access-Control-Allow-Origin", "*");
form.parse(request, async (error, fields, files) => {
if (error) throw new Error(error);
try {
const path = files.file[0].path;
const buffer = fs.readFileSync(path);
const type = fileType(buffer);
const timestamp = Date.now().toString();
const fileName = `test/${timestamp}-lg`;
const data = await uploadile(buffer, fileName, type);
console.log("Data", data);
return response.status(200).send(data) +" " + data;
} catch (error) {
return response.status(400).send(error);
}
});
});
app.get('/', function(req, res){
res.send('Hello!')
})
var listener = app.listen(9000);
console.log('Server up and running...' + listener.address().address);
Thanks in advance.

Resources