We are trying to upload images to a NodeJS server that uses expressjs/multer to parse multipart/form-data input. Unfortunately, we encounter a TypeError like so:
TypeError: Cannot read property 'path' of undefined
at /var/www/app/services/media/multer.js:41:31
at Immediate.<anonymous> (/var/www/app/node_modules/multer/lib/make-middleware.js:52:37)
at Immediate.immediate._onImmediate (timers.js:590:18)
at tryOnImmediate (timers.js:543:15)
at processImmediate [as _immediateCallback] (timers.js:523:5)
[nodemon] app crashed - waiting for file changes before starting.
After looking into it, we figured out that this backend error meant that multer.js didn't add a file field unto the req, thus resulting in a req.file.path ===> TypeError in a later manipulation of the image.
Using Wireshark to sniff the packet, we found out that the method we employed to upload was sending a path rather than a file's content.
Here's the ReactJS code (using superagent):
selectPhotoTapped() {
ImagePicker.showImagePicker({
title: 'Select Image'
}, (response) => {
console.log('Response = ', response);
if (response.didCancel) {
console.log('User cancelled photo picker');
} else if (response.error) {
console.log('ImagePicker Error: ', response.error);
} else if (response.customButton) {
console.log('User tapped custom button: ', response.customButton);
} else {
var promise = new Promise(function(resolve, reject) {
request
.put(Environment.BASE_URL + '/media/img/')
.attach('upload', response.path, response.fileName)
.field('usertype', 'Barber')
.field('name', 'profilePic')
.field('description', 'Holla')
.field('type', 'test:barber:haircut')
.end(function(err, res) {
if (err) {
reject(err);
} else {
resolve(res);
}
});
}).then((response) => {
console.log(response);
alert("HERE");
response.json();
}).then((responseData) => {
alert("Succes " + responseData)
}).catch((error) => {
alert("ERROR " + error)
console.log(error);
});
}
});
}
and here is our backend multer code:
var storage = Multer.diskStorage({
destination: path.join(__dirname, '../../../', config.media.img),
filename: generate_filename
});
var multer = Multer({
storage: storage,
fileSize: 25 * 1024 * 1024,
fileFilter: filter
});
router.put('/media/img/', auth.authenticate, multer.single('upload'), media.img.put);
So the question is: Why does superagent send only the path when we use the attach method? We have tested the exact same superagent code in our mocha test of the backend and everything turned out fine. So why the difference between React and Node?
If you want more information, you can check this print of the request before and after the multer middleware parsed it.
Related
I have been trying to reproduce this error using postman in the Nodejs server on my local but I am not able to do so.
QA was able to produce it once or twice by terminating the application while uploading the file which in turn threw the uncaught exception error on Kibana APM logs and crashed the server.
My aim is to crash the server on local once so that I can add exception handlers to avoid crashes on prod env.
I am using Multer with Multer S3
"multer": "^1.4.2",
"multer-s3": "^2.9.0",
here is my code
uploadToS3: (fileFilter, limits, bcktName, awsS3Path) => { return multer({
storage: multerS3({
s3: s3,
bucket: bcktName,
contentType: multerS3.AUTO_CONTENT_TYPE,
key: function (req, file, cb) {
cb(null, awsS3Path + Date.now().toString())
}
}),
fileFilter:fileFilter,
limits:{...limits}
})
},
here is the complete error
Unexpected end of multipart data
node_modules/dicer/lib/Dicer.js in global.<anonymous> at line 62
var self = this;
self.emit('error', new Error('Unexpected end of multipart data'));
if (self._part && !self._ignoreData) {
var type = (self._isPreamble ? 'Preamble' : 'Part');
internal/process/task_queues.js in processTicksAndRejections at line 75
controller code:
let upload = util.uploadToS3(fileFilter, limits, process.env.Bucket, path).single('file');
upload(req, res, function(err) {
console.log("inside upload");
try {
if (err instanceof multer.MulterError || (err && MULTER_ERROR_CODES[err.code || err.message])) {
// A Multer error occurred when uploading.
return res
.status(400)
.send(util.sendFormattedErrorResponse(MULTER_ERROR_CODES[err.code || err.message]));
} else if (err) {
// log error in kibana
console.log(err);
// An unknown error occurred when uploading.
return res.status(500).send(util.sendFormattedErrorResponse(MULTER_ERROR_CODES['UNKNOWN']));
}
} catch (e) {
// log error in kibana
console.log(e);
}
Thanks in advance.
PS: if you can also point me where to add the exception handlers it would be great!
You can add a callback function to handle these errors,
Here is a reference link
https://github.com/expressjs/multer#error-handling
const multer = require('multer')
const upload = multer().single('avatar')
app.post('/profile', function (req, res) {
upload(req, res, function (err) {
if (err instanceof multer.MulterError) {
// A Multer error occurred when uploading.
} else if (err) {
// An unknown error occurred when uploading.
}
// Everything went fine.
})
})
You can add a global error handler, Details about it is mentioned in this link
Global Error Handler
I am working on creating a zip of multiple files on the server and stream it to the client while creating. Initially, I was using ArchiverJs It was working fine if I was appending buffer to it but it fails when I need to add streams into it. Then after having some discussion on Github, I switched to Node zip-stream which started working fine thanks to jntesteves. But as I deploy the code on GKE k8s I Started getting Network Failed errors for huge files.
Here is my sample code :
const ZipStream = require("zip-stream");
/**
* #summary Adding readable stream provided by https module into zipStreamer using entry method
*/
const handleEntryCB = ({ readableStream, zipStreamer, fileName, resolve }) => {
readableStream.on("error", () => {
console.error("Error while listening readableStream : ", error);
resolve("done");
});
zipStreamer.entry(readableStream, { name: fileName }, error => {
if (!error) {
resolve("done");
} else {
console.error("Error while listening zipStream readableStream : ", error);
resolve("done");
}
});
};
/**
* #summary Handling downloading of files using native https, http and request modules
*/
const handleUrl = ({ elem, zipStreamer }) => {
return new Promise((resolve, reject) => {
let fileName = elem.fileName;
const url = elem.url;
//Used in most of the cases
if (url.startsWith("https")) {
https.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else if (url.startsWith("http")) {
http.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else {
const readableStream = request(url);
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
}
});
};
const downloadZipFile = async (data, resp) => {
let { urls = [] } = data || {};
if (!urls.length) {
throw new Error("URLs are mandatory.");
}
//Output zip name
const outputFileName = `Test items.zip`;
console.log("Downloading using streams.");
//Initialize zip-stream instance
const zipStreamer = new ZipStream();
//Set headers to response
resp.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": `attachment; filename="${outputFileName}"`,
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS"
});
//piping zipStreamer to the resp so that client starts getting response
//as soon as first chunk is added to the zipStreamer
zipStreamer.pipe(resp);
for (const elem of urls) {
await handleUrl({ elem, zipStreamer });
}
zipStreamer.finish();
};
app.post(restPrefix + "/downloadFIle", (req, resp) => {
try {
const { data } = req.body || {};
downloadZipFile(data, resp);
} catch (error) {
console.error("[FileBundler] unknown error : ", error);
if (resp.headersSent) {
resp.end("Unknown error while archiving.");
} else {
resp.status(500).end("Unknown error while archiving.");
}
}
});
I tested for 7-8 files of ~4.5 GB each on local, it works fine and when I tried the same on google k8s, I got network failed error.
After some more research, I Increased server timeout on k8s t0 3000 seconds, than it starts working fine, but I guess the increasing timeout is not good.
Is there anything I am missing on code level or can you suggest some good GKE deployment configuration for a server that can download large files with many concurrent users?
I am stuck on this for the past 1.5+ months. please help!
Edit 1: I edited the timeout in the ingress i.e Network services-> Load Balancing ->edit the timeout in the service
I am building backend functions with node.js and firebase. I used BusBoy to build an image upload function;
I check my code again and again, changed the image with a smaller size one (like 140 kb) and tried to detect the problem and from the code I thought that it is some kind of problem to write the path name of the uploaded file, in firebase serve it gives the error:
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string. Received type undefined
exports.uploadImage = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers });
let imageToBeUploaded = {};
let imageFileName;
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(fieldname, file, filename, encoding, mimetype);
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res.status(400).json({ error: 'Wrong file type submitted' });
}
// my.image.png => ['my', 'image', 'png']
const imageExtension = filename.split('.')[filename.split('.').length - 1];
// 32756238461724837.png
imageFileName = `${Math.round(
Math.random() * 1000000000000
).toString()}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${
config.storageBucket
}/o/${imageFileName}?alt=media`;
return db.doc(`/users/${req.user.handle}`).update({ imageUrl });
})
.then(() => {
return res.json({ message: 'image uploaded successfully' });
})
.catch((err) => {
console.error(err);
return res.status(500).json({ error: 'something went wrong' });
});
});
busboy.end(req.rawBody);
};
When I look the details, I realized that the problem is in this line:
.upload(imageToBeUploaded.filepath,
I am sending post request from Postman
Is there anybody to help me ?
Full error message:
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string.
Received type undefined
> at validateString (internal/validators.js:125:11)
> at Object.basename (path.js:1289:5)
> at Bucket.upload (/Users/apple/Desktop/Oner/social_app/functions/node_modules/#google-cloud/storage/build/src/bucket.js:2237:38)
> at PromiseCtor (/Users/apple/Desktop/Oner/social_app/functions/node_modules/#google-cloud/promisify/build/src/index.js:71:28)
> at new Promise (<anonymous>)
> at Bucket.wrapper (/Users/apple/Desktop/Oner/social_app/functions/node_modules/#google-cloud/promisify/build/src/index.js:56:16)
> at Busboy.busboy.on (/Users/apple/Desktop/Oner/social_app/functions/handlers/users.js:123:8)
> at Busboy.emit (events.js:198:13)
> at Busboy.emit (/Users/apple/Desktop/Oner/social_app/functions/node_modules/busboy/lib/main.js:37:33)
> at /Users/apple/Desktop/Oner/social_app/functions/node_modules/busboy/lib/types/multipart.js:304:17
Anyone having the same issue, to fix it you have to rollback firebase-tools to previous version:
In functions folder
npm rm firebase-tools
npm i -g firebase-tools#6.8.0
Then add config.storageBucket to the .bucket() in the 'finish' event handler.
Nothing is Wrong in your code just remove Content-Type from postman headers.It worked for me
I understand you are trying to test ur code in postman. I faced same issue.
Genuine request:
Simply try to open new tab in POSTMAN, not duplicate, inherited from any other.
add image, add any other dependent headers like bearer token, etc and hit send.
I really don't know what went wront, but i think different headers in postman get added when playing with body type... and that causes problems.
I am really new in node js and a little bit more experienced in flaks. I am trying to connect a nodejs backend with a flask api. Basically I am sending a file that was uploaded in the nodejs app for processing (converting to another format) to my flask app.
For sending the data I am using request. In this way:
app.post('/converttest', uploader.single('file'), function(req,res){
var file = req.file,
result = {
error: 0,
uploaded: []
};
flow.exec(
function() { // Read temp File
fs.readFile(file.path, this);
},
function(err, data) { // Upload file to S3
var formData = {
file: data,
};
requestPack.post({url:'http://127.0.0.1:5000/api/resource/converter', formData: formData});
},
function(err, httpResponse, body) { //Upload Callback
if (err) {
return console.error('upload failed:', err);
}
res.redirect('/console');
});
});
Then I am receiving the file for processing in the flask app, like:
#app.route('/api/resource/converter', methods = ['POST','GET'])
def converter_csv():
if request.method == 'POST':
f = request.form['file']
if not f:
abort(400)
print('-----Converting-------')
file = open("temp/converting.txt","w")
file.write(f)
#....conversion process...
# Finish the process
return Response(converted_file,status=200)
In my console for the localhost of the flask app, I am getting:
127.0.0.1 - - [09/Aug/2017 15:47:59] "POST /api/resource/converter HTTP/1.1" 200 -
However my nodejs app did not receive any response. It just got frozen.
I appreciate any orientation anyone can give me. Thanks.
I think flow.exec is not in proper order
router.post('/converttest', uploader.single('file'), function(req, res) {
var filePath = req.file.path;
fs.readFile(filePath, 'utf8', function(err, data) { //change format reading as required
try {
formData = {file:data}
requestPack.post({url:'http://127.0.0.1:5000/api/resource/converter', formData: formData});
} catch(err) {
return console.error('upload failed:', err);
res.redirect('/console')
}
fs.unlink(filePath);}); });
I ended up using requestify. Seems like they make it a little bit easier for beginners like me:
var requestify = require('requestify');
app.get('/convertupload', function(req,res){
res.render('pages/convertupload');
});
app.post('/converttest', uploader.single('file'), function(req,res){
var file = req.file,
result = {
error: 0,
uploaded: []
};
flow.exec(
function() { // Read temp File
fs.readFile(file.path,this);
},
function(err, data) { // Upload file to S3
var formData = {
file: data
};
requestify.post('http://127.0.0.1:5000/api/resource/converter', {
form: formData
})
.then(function(response) {
// Get the response body (JSON parsed or jQuery object for XMLs)
console.log(response)
response.getBody();
});
res.redirect('/login');
});
});
I'm making a NodeJS app which should download a file given the public share URL that doesn't request login auth.
I have created the app inside the developer section and generated the proper tokens.
On the git documentation I've found this method that should be used to download a file. The first parameter is the fileId but I don't find written anywhere what this id is and where to find it.
https://rawgit.com/box/box-node-sdk/master/docs/jsdoc/Files.html#getReadStream
I've tried this code
var fs = require('fs'),
BoxSDK = require('box-node-sdk'),
sdk = new BoxSDK({
clientID: '...',
clientSecret: '...'
}),
client = sdk.getAnonymousClient(),
file_id = process.argv[2].replace(/\S*\/(\S+)$/,"$1");
client.files.getReadStream(file_id, null, function(error, stream)
{
if (error) {
throw error;
}
// write the file to disk
var output = fs.createWriteStream('./output/'+file_id+".zip"); //I know for sure there will only be zip files
stream.pipe(output);
});
But running it with this command
nodejs index.js https://adrime.box.com/s/s5w7lzm4xfifmxrod9r7eeniv9nhtenk
I get this error:
Error: Unexpected API Response [404 Not Found] (not_found: "Not Found")
at Object.module.exports.buildResponseError (/home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/util/errors.js:57:23)
at Object.module.exports.buildUnexpectedResponseError (/home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/util/errors.js:94:15)
at /home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/managers/files.js:148:20
at BoxClient._handleResponse (/home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/box-client.js:291:2)
at handleResponseCallback (/home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/box-client.js:233:9)
at /home/andrea/dev/node/box_api/node_modules/box-node-sdk/lib/api-request.js:285:3
at nextTickCallbackWith0Args (node.js:436:9)
at process._tickCallback (node.js:365:13)
Can anyone help me in programmatically downloading a public shared file from box.com?
Thank you in advance!
At the moment I've found this solution.
To me it works pretty well. I hope it will be useful to others as well.
var fs = require("fs"),
request = require("request");
function downloadFile(source, target, callback)
{
var wr = fs.createWriteStream(target);
wr.on("error", function (err)
{
callback(err);
});
wr.on("close", function (ex)
{
callback();
});
request
.get(source)
.on("error",function(err)
{
callback(err);
})
.on('response', function(response)
{
if((""+response.statusCode).charAt(0) != "2")
callback({
msg: "File not found: "+response.request.href,
path: response.request.href,
toString: function(){
return this.msg;
}
});
})
.on("end", function (ex)
{
console.log("request ended");
})
.pipe(wr);
}
function onDownloadComplete(err)
{
if(err)
throw err;
console.log("DOWNLOAD COMPLETE");
}
function init()
{
var static_url = process.argv[2].replace(/\/s\//,"/shared/static/") + ".zip";
console.log(static_url);
downloadFile(static_url, "./output/template.zip", onDownloadComplete);
}
init();