I am trying to upload pdf/xls/xlsx on the file server but whenever I try to send the file on my back end using FormData, no folder is being created and the file isn't uploaded either.
Here is my code on the front end (Vue):
let toSubmit = {
document_title: this.document_title.toUpperCase(),
added_by: this.userInfo.employeeCode,
FileName: `${this.document_title.replace(/ +/g, "_").toUpperCase()}.${this.$refs.files.files[0].name.split(".").pop().toLowerCase()}`
}
const formData = new FormData
formData.append("toSubmit", JSON.stringify(toSubmit))
_.forEach(this.uploadFiles, file=>{
formData.append("files", file)
})
const url = `${this.api}add/template/document`
axios.post(url, formData, {
headers: {
'Content-Type': 'multipart/form-data',
dataType: 'json',
}
}).then(res=>{
console.log('document template', res.data)
})
And on my back-end, I used multer for uploading the file but I have no idea if the problem lies on multer itself or if I have something missing in my code. Anyway, this is my code on the back end (Node):
API
router.post("/add/template/document", uploadTemplate.array("files"), (req, res)=>{
let myData = JSON.parse(req.body.toSubmit)
res.send(myData)
})
Uploading
const uploadTemplate = multer({storage: storageTemplate});
const storageTemplate = multer.diskStorage({
destination: (req, file, cb) => {
var dir = `./uploads/DocumentTemplates/`;
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
cb(null, dir);
},
filename: function(req, file, cb){
let myData = JSON.parse(req.body.toRequest);
let fileName = myData.FileName
let new_filename = `${fileName}`
cb(
null,
new_filename
)
}
})
I still can't figure out why no folder is being created. Am I missing something?
you're creating a subfolder without recursive flag, that's why the folder is not created
also, there is no body in multer middleware, only file, so you cannot send custom data to file like that, you need to change upload middleware
to create subfolders, add this flag:
fs.mkdirSync(dir, {recursive: true});
there is no body in multer, use file (you can add mimetype validation, check that only certain types are uploaded):
filename: function(req, file, cb){
console.log('file', file);
// validate expected file against file.mimetype
// if it fails, return error: cb(yourErrorMessage, null);
cb(
null,
file.originalname // handle on front-end
)
}
and on the frontend:
formData.append("files", file, 'filename goes here');
Related
i have a change user profile avatar page .. im using XHR to upload the picture .. then recieving it with multer (backend).
the problem im facing is that the received data is empty , because im sending the picture inside and object
var loadFile = function(event) {
//avatar file
let file = event.target.files[0];
let formData = new FormData();
formData.append('userAvatarFile', file);
customAjax({
method: 'post',
route: '/profileEdit/userAvatar',
type: "multipart/form-data",
data: {
target: formData //the avatar file is inside this "data" object
})
};
The problem : as you can see im putting the 'formData' inside an object , but when i do that multer will not find the file , and have to send the formData like that .. How can i make multer read the file from inside the 'data' object ?
Backend Part:
//SET DESTINATION AND FILE NAME FOR THE UPLOADED IMAGES
const storage = multer.diskStorage({destination: __dirname +
'/../views/resources/images/avatar',
filename: function(req, file, cb){
cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname));
}});
//init upload for multer
const upload = multer({storage: storage,
limits: {
fileSize: 1000000
},
fileFilter: function(req, file, cb){
checkFileType(file, cb);
}
}).single('userAvatarFile');
//SECURITY PART OF FILE UPLOADING
//check the uploaded files
function checkFileType(file, cb){
//Allowed ext
const filetypes = /jpeg|jpg|png|gif/;
//check ext
const extname= filetypes.test(path.extname(file.originalname).toLowerCase());
//check mime
const mimetype = filetypes.test(file.mimetype);
if(mimetype && extname){
return cb(null, true);
}else{
return cb(new Error('Images Only!'))
}
}
While working through the problem with OP, it seems that all xhr had been merged into a function to simplify all the request. However, the data is transformed by JSON.stringify before sending, which is not what "multipart/form-data" should do.
"multipart/form-data" is used to handle blob, which normal HTTP couldn't really handle without some compromise (like encoding base64 or others, which result in even larger size). "multipart/form-data" is basically a stream upload, so there will be chunks that is loaded to ur server. Multer is used to handle this.
You can read more about the "multipart/form-data" here
https://developer.mozilla.org/en-US/docs/Learn/Forms/Sending_and_retrieving_form_data
I'm trying to upload images to aws-s3 via a signed-url from NodeJS server (not from a browser). The image to upload has been generated by NodeJS. I'm getting the signed-url from aws and succeeding to upload it to s3.
But my image is corrupted. For some reason, S3 is adding some headers to my image (compare image attached).
What am I doing wrong?
getting the signed url:
try {
var params = {
Bucket: bucketName,
Key: 'FILE_NAME.png',
Expires: 60
};
const url = await s3.getSignedUrlPromise('putObject', params);
return url;
} catch (err) {
throw err;
}
uploading to s3
var stats = fs.statSync(filePath);
var fileSizeInBytes = stats["size"];
const imageBuffer = fs.readFileSync(filePath);
var formData = {
'file': {
value: imageBuffer,
options: {
filename: 'FILE_NAME.png'
}
}
};
request({
method: 'put',
url,
headers: {
'Content-Length': fileSizeInBytes,
'Content-MD': md5(imageBuffer)
},
formData
}, function (err, res, body) {
console.log('body',body);
});
Compare between the actual image and the uploaded image to s3. S3 added some headers:
I know this is old but I struggled with the same issue for a while. When uploading using a pre-sgined url, DO NOT use new FormData();
One thing I noticed that all of my files on s3 were exactly 2kb larger than the originals.
<input type="file" id="upload"/>
var upload = document.getElementById('upload');
var file = upload.files[0];
//COMMENTED OUT BECAUSE IT WAS CAUSING THE ISSUE
//const formData = new FormData();
//formData.append("file", file);
// Assuming axios
const config = {
onUploadProgress: function(progressEvent) {
var percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
console.log(percentCompleted);
},
header: {
'Content-Type': file.type
}
};
axios.put(S3SignedPutURL, file, config)
.then(async res => {
callback({res, key})
})
.catch(err => {
console.log(err);
})
I followed the above solution for react js
What I was doing before uploading an image is passing through the createObject URL and then passing it to the API body.
if (e.target.files && e.target.files[0]) {
let img = e.target.files[0];
**setImage(URL.createObjectURL(img))**
Correct Way:
if (e.target.files && e.target.files[0]) {
let img = e.target.files[0];
**setImage(img)**
Work For me, Thanks Sam Munroe
Came here in 2023, was facing the same problem using formdata, but in postman, before handing it to the front end department.
To handle it in postman, use the type of request body as binary:
And don't forget to add the proper headers.
Try to specify the content type in the request as Content-Type multipart/form-data.
I've spent hours trying to find the solution for something which should be quite simple: uploading a file to the server from the client. I am using React.js on the frontend, Express on the backend, and multer for the image uploads.
When I try to upload a file, nothing happens. An uploads/ directory is created, but no file goes there. req.file and req.files are undefined. req.body.file is empty. The form data exists before it is sent.
If I set the Content-Type header to "multipart/form-data" I get a boundary error from multer.
Input
<input
onChange={this.sendFile}
name="avatar"
placeholder="Choose avatar"
type="file"
/>
sendFile
sendFile = e => {
const data = new FormData();
const file = e.target.files[0];
data.append("file", file);
this.props.sendFile(data);
};
Redux action
export default file => async dispatch => {
const res = await axios.post("/api/upload/", { file });
};
Express
const multer = require("multer");
const upload = multer({ dest: "uploads/" });
router.post("/upload/", upload.single("avatar"), (req, res) => {
return res.sendStatus(200);
});
I tried to reproduce it and made it work with this method:
sendFile = e => {
const data = new FormData();
const file = e.target.files[0];
data.append("avatar", file); // <-- use "avatar" instead of "file" here
axios({
method: 'post',
url: 'http://localhost:9000/api/upload',
data: data,
config: { headers: { 'Content-Type': 'multipart/form-data' } }
});
};
Try to set the content-type header to multipart/form-data in the axios request and send the full FormData object as the second parameter.
Like this:
const config = {
headers: {
'content-type': 'multipart/form-data'
}
};
axios.post('/api/upload/', file, headers);`
I am able to successfully upload a file via an upload button to my vendors API. My vendors API also returns a .png file in blob format that I need to upload to Azure Blob Storage. I have tried a few approaches, but am getting the following error in my Node console:
[Error] statusCode: 414
My front end code is in an Angular Controller which passes data back to my Node backend that contains my Azure Blob Storage calls. I have the formidable and request modules installed and required, but am not using them in my current backend code since the data I receive is already in blob format.
Here is my front end upload code. The success "result" is the blob data I am returned:
$scope.sendToProduction = function () {
var parts = document.getElementById("file").value.split("\\");
var uploadedfilename = parts[parts.length - 1];
var basefilename = uploadedfilename.split(".")[0];
var fileextension = uploadedfilename.split(".")[1];
var filename = basefilename + '.' + fileextension;
var file = document.getElementById("file").files[0];
var formdata = new FormData();
formdata.append(filename, file);
$.ajax({
url: 'http://myvendorsapi/fileuploadendpoint',
type: "POST",
data: formdata,
mimeType: "multipart/form-data",
processData: false,
contentType: false,
crossDomain: true,
success: function (result) {
var filename = 'Test.png';
var file = result;
console.log(file);
$http.post('/postAdvanced', {filename: filename, file: file }).success(function (data) {
console.log(data);
}, function (err) {
console.log(err);
});
},
error: function (error) {
console.log("Something went wrong!");
}
});
};
Here is my node backend for uploading to Azure Blob Storage:
app.post('/postAdvanced', function (req, res, next) {
var filename = req.body.filename;
var file = req.body.file;
blobSvc.createBlockBlobFromText('blob5', file, filename, function (error, result, response) {
if (!error) {
console.log("Uploaded" + result);
}
else {
console.log(error);
}
});
})
How do I upload an AJAX response into Azure Blob Storage?
The problem is that in this line of code:
blobSvc.createBlockBlobFromText('blob5', file, filename, function (error, result, response) {
you have the wrong parameter order. It should be:
blobSvc.createBlockBlobFromText('blob5', filename, file, function (error, result, response) {
HTTP status code 414 means "Request-URI Too Long". Did you pass the correct blob name into blobSvc.createBlockBlobFromText?
I'm trying to save a PDF file into S3 with the AWS SDK.
I'm getting the PDF through the body of a POST request (Application/PDF).
When saving the file into the local HD with fs.writeFile, the file looks ok. But when uploading it to S3, the file is corrupted (it's just a single
page PDF).
Any help or hint would be greatly appreciated!
var data = body // body from a POST request.
var fileName = "test.pdf";
fs.writeFile(fileName, data, {encoding : "binary"}, function(err, data) {
console.log('saved'); // File is OK!
});
s3.putObject({ Bucket: "bucketName", Key: fileName, Body: data }, function(err, data) {
console.log('uploaded') // File uploads incorrectly.
});
EDIT:
It works if I write and then read the file and then upload it.
fs.writeFile(fileName, data, {encoding : "binary"}, function(err, data) {
fs.readFile(fileName, function(err, fileData) {
s3.putObject({ Bucket: "bucketName", Key: fileName, Body: fileData }, function(err, data) {
console.log('uploaded') // File uploads correctly.
});
});
});
Try setting the contentType and/or ContentEncoding on your put to S3.
ContentType: 'binary', ContentEncoding: 'utf8'
See the code sample here for working example putObject makes object larger on server in Nodejs
I think it is because the data is consumed (i.e. a stream).
It would explain why after writting the data you send nothing to S3 and reading again the data you can send a valid PDF.
Try and see if it works by just sending the data directly to S3 without writting it to disk.
Yes, you forgot about callback of writeFile function, so when you started uploading to Amazon S3 your file wasn't saved completly. You shouldn't forget that node.js is asynchronous and an app won't wait when the fs.writeFile finishes it work, it simply run s3.putObject the same time.
/**
* JS library: Promise.promisify from bluebirdjs
**/
My code is as below
global.Promise = require('bluebird');
const aws = require('aws-sdk');
const aswAccessKey = {
accessKeyId: 'your-accesskey-id',
secretAccessKey: 'your-secret-access-key'
};
const fs = require('fs');
const path = require('path');
const uuidV4 = require('uuid/v4');
// Create S3 service object
// available apiVersion: '2006-03-01', '2013-04-01',
const s3 = new aws.S3(Object.assign(aswAccessKey, {
apiVersion: '2013-04-01'
}));
function putObject(bucketName, file) {
console.log('putObject into ', bucketName);
/**
* If we don't use versioned bucket, we must not pass VersionId
*/
const params = {
Bucket: bucketName,
Key: '',
Body: 'Plain text',
ACL: 'public-read',
ContentType: 'binary',
CacheControl: 'max-age=172800'
};
return Promise
.promisify(fs.readFile, {
context: fs
})(file)
.then((fileData) => {
console.log(fileData);
params.Body = fileData;
params.Key = 'g01/' + uuidV4() + '-' + path.basename(file);
return Promise
.promisify(s3.putObject, {
context: s3
})(params)
.then((data) => {
console.log('successful');
console.log(data);
})
.catch((err) => {
console.log('Error', err);
});
})
.catch(() => {
});
}