Node file uploading using formidable not working - node.js

We want to implement a functionality to upload multiple files into ExpressJS server and return all its unique urls, by an ajax call.
Following is the sample code in my front end:
var formData = new FormData();
for (var i = 0; i < nameId.length; i++) {
if($(nameId[i])[0].files[0]){
formData.append(nameId[i], $(nameId[i])[0].files[0], $(nameId[i])[0].files[0].name);
}
}
$.ajax({
url: '/upload-files',
type: 'POST',
data: formData,
processData: false,
contentType: false,
success: function(data){
console.log('upload successful!');
console.log(data);
}
});
And in our router we have following code snippet to accept the request and store the file:
router.post('/upload-files',function(req, res, next){
var form = new formidable.IncomingForm();
form.multiples = true;
form.uploadDir = path.join(__dirname, '/uploads');
form.on('file', function(field, file) {
console.log("File incoming");
fs.rename(file.path, path.join(form.uploadDir, file.name));
});
form.on('error', function(err) {
console.log('An error has occured: \n' + err);
});
form.on('end', function() {
res.end('success');
});
});
But, nothing ever happened in the router. Request is coming inside the router but after that nothing.
Is there anything wrong here? We are not getting any error in server side and in client side after long wait request is failing.
Please suggest.
Thanks

I was able to resolve it by adding & updating following code snippets:
In the upload request processing function added code :
form.parse(req);
And in the app.js updated following code snippet to:
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended:false}));
to (To process multipart data seperatly)
app.use(bodyParser.json())
.use(bodyParser.urlencoded());
NOTE Due to the change in parser following warning message is coming while starting the server:
body-parser deprecated undefined extended: provide extended option

My problem was solved by changing my ajax sending code as follow:-
$.ajax({
url: '/postroute',
type: 'POST',
processData: false,
contentType: false,
cache: false,
data: formData,
enctype: 'multipart/form-data',
success: function(){
console.log('Uploaded sucessfully');
}
});

Related

POST file using request, gridfs-stream and multer

In my node.js-application, I need to read data from a MongoDB with GridFS and upload it to another server via POST request, I am using the modules gridfs-stream, request and multer.
According to the request docs I can simply add a stream for multipart/form-data, so I'm doing this:
var fileUuid = "myFilename";
var options = {
url: "url.to/my/target/",
method: "POST",
header: {"Content-Type": "multipart/form-data"},
formData: {
filedata: gfs.createReadStream({filename: fileUuid})
}
}
request(options, function(error, response, body) {
if (error) {
console.log("[" + getDateTime() + "] Error replicating file:");
console.log(error);
return;
}
console.log('statusCode: ', response && response.statusCode);
console.log('body: ', body);
});
On my receiving server, I have the following code:
var upload = multer({ dest: './tmp_upload/' })
app.post('/my/target', upload.single('filedata'), function(req, res) {
console.log('['+getDateTime()+'] Request received');
}
But whenever I am executing the request, I get the following error on my sending server:
[2019:07:26:16:52:00] Error replicating file:
Error: write ECONNRESET
and this on my receiving server:
Error: Unexpected end of multipart data
at c:\Users\...\node_modules\dicer\lib\Dicer.js:62:28
at process._tickCallback (internal/process/next_tick.js:61:11)
Can anybody please give me a hint how to POST my file from one server to another?
EDIT:
The issue seems to be the gfs.createReadStream({filename: fileUuid}) type of stream... When I write the file to the filesystem first and instead put an fs.createReadStream(...) into the form as formData, then it works.
But is there an option to direct the stream directly into the formData, without having to write it to disk first?
So I ended up working with a temporary file that I read from the database, write it to disk and create a new readStream that I then put into the request. After the request was successful, I delete the temporary file from my disk and continue with my code:
var fsstreamwrite = fs.createWriteStream("./tmp/" + myFile);
var readstream = gfs.createReadStream( {filename: myFile} );
readstream.pipe(fsstreamwrite);
readstream.on("close", function () {
console.log("File read successfully from database");
var options = {
url: "url.to/my/target/",
method: "POST",
enctype: "multipart/form-data",
formData: {
"filedata": fs.createReadStream('./tmp/' + myFile)
}
}
request(options, function(error, response, body) {
if (error) {
// error handling
return;
}
// delete temporary file
fs.unlinkSync('./tmp/' + myFile)
// continue with my stuff...
});
});

Unhandled promise rejection in Node server, only on Heroku, works locally

I have a node app that serves a react app as well as makes requests to Google Cloud Storage. The App works perfectly locally, but after I've deployed it to Heroku I get the following error whenever I make requests to any of my endpoints:
2017-05-26T21:53:34.426234+00:00 app[web.1]: app.post /upload_url Endpoint
2017-05-26T21:53:34.484393+00:00 app[web.1]: (node:34) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): RangeError: Invalid status code: 0
The first line is a console log to check if the endpoint is reached, the second line is the error. Here's the code for the /upload_url endpoint:
app.post('/upload_url', (req, res) => {
console.log("app.post /upload_url Endpoint");
var originalFileName = req.body["originalFileName"];
var buf = crypto.randomBytes(16);
var uniqueFilename = buf.toString('hex') + '_' + originalFileName;
var file = bucket.file(uniqueFilename);
file.createResumableUpload(function (err, uri) {
if (!err) {
var json = JSON.stringify({
uri: uri,
uniqueFilename: uniqueFilename,
originalFileName: originalFileName
});
res.writeHead(200)
res.end(json);
} else {
res.writeHead(err.code);
res.end();
}
});
});
This endpoint is called by the react front end with the following fetch call:
function upload(file) {
var data = new FormData();
data.append('file', file);
return fetch(`upload_url`,{
method: 'POST',
headers: new Headers({
"Content-Type": "application/json",
}),
body: JSON.stringify({
originalFileName: file.name
})
});
}
}
Again, this works fine in development but not after deploying to Heroku. I've tried Heroku's suggestion of adding concurrency to the app (detailed here) without any luck. Any thoughts or suggestions on how to solve this problem would be very much appreciated.
EDIT:
bucket is a google cloud bucket and is defined like this:
const gcs = require('#google-cloud/storage')({
projectId: 'my-project',
keyFilename: process.env.GCS_KEYFILE
});
var bucket = gcs.bucket('my-bucket');
ANSWER:
While this didn't solve my issue entirely, by handling response error codes more appropriately I was able to determine that my actual problem is related to google cloud authentication. Here's my updated upload_url endpoint:
file.createResumableUpload(function (err, uri) {
if (!err) {
var json = JSON.stringify({
uri: uri,
uniqueFilename: uniqueFilename,
originalFileName: originalFileName
});
res.writeHead(200)
res.end(json);
} else {
if (err.code >= 100 && err.code < 600){
console.error(err)
res.writeHead(err.code);
res.end();
}else{
console.error(err)
res.writeHead(500);
res.end();
}
}
});
});
Refer to this answer https://stackoverflow.com/a/38258590/4348875, make sure err.code is a valid HTTP status code.

Using Bulit-in Http Module instead of Request Module on Node.js

I want to send form data to web-server by using Node.js, So I use "request" module that is very famous in nodeland. And It's cool, There is no problem, But because of some reason (write-stream encoding non-supported), I have to change it to built-in module, "http".
I think beneath codes are same to post some data to web-server, When I using "request" module, There is no problem so can get 200 response, success to sending data.
But in "http" module, I got a 302 response that redirects to another page. and failed post data. I don't know what is problem with, maybe it is something URL trouble, http use 'host and path' on the other hand, request use 'url' . I don't know how can I solve this, I stucked 2 days, please let me know If you have some hints..
Thanks.
By Using "Request" Module
function postFormByRequestModule() {
request({
url: 'http://finance.naver.com/item/board_act.nhn',
headers: { 'Content-Type': 'text/plain' },
method: 'POST',
form: {
code:'000215',
mode: 'write',
title: 'This is Title',
body:'This is body'
}
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
console.log(response.statusCode, response.body);
}
});
}
By Using "Http" Module
var postData = querystring.stringify({
code:'000215',
mode: 'write',
title: 'This is Title',
body:'This is body'
});
var options = {
host: 'finance.naver.com',
path: '/item/board_act.nhn',
method: 'POST',
headers: { 'Content-Type': 'text/plain', }
};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
res.on('end', function() {
console.log('No more data in response.')
})
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
function postFormByBuiltInHttpModule() {
req.write(postData);
req.end();
}
The built-in http client does not automatically follow forwards, whereas the request module does (and has many other "high level" features). So if you want to continue using the built-in client, you will need to manually check res.headers.location and retry the request at that url.

sending file name on a blank page ajax call to node

it is working fine. the only problem is after execution it keeps waiting for response after setting header.. but after a while it shows no response on page it was set also it keeps waiting.. i have tried using "res.end()" it stops the response but shows only blank page here is the code..
here is the ajax code for it
var formData = new FormData($('#cover_file_form')[0]);
$.ajax({
url: '/cropImg',
data: formData,
cache: false,
contentType: false,
processData: false,
type: 'POST',
success: function(data){
setTimeout(
function() {
$('.container-coverscreen img').attr('src','images-esct/cropped/'+data.file_name);
},
250);
}
});
and on server side
app.post('/cropImg',function(req,res){
// get the temporary location of the file
var tmp_path = req.files.file_browse_cover.path;
gm(tmp_path)
.resize(parseInt(req.body.InputWidth), parseInt(req.body.InputHeight), "!")
.crop(255, 292, parseInt(req.body.InputLeft), parseInt(req.body.InputTop))
.write(__dirname + "/public/images-esct/cropped/"+req.files.file_browse_cover.name , function (err) {
if(err) throw err;
console.log(err);
});
res.setHeader({file_name: req.files.file_browse_cover.name});
//res.end('please stop');
})
sorry for my bad english
Try to set the status code as well:
res.setHeader("Location", "/eventapp");
res.sendStatus(302);
res.end();
But it would be easier to just use the redirect-method:
res.redirect('/eventapp');

NodeJS: sending/uploading a local file to a remote server

I have used the Winston module to create a daily log file for my offline app. I now need to be able to send or upload that file to a remote server via POST (that part already exists)
I know I need to write the file in chunks so it doesn't hog the memory so I'm using fs.createReadStream however I seem to only get a 503 response, even if sending just sample text.
EDIT
I worked out that the receiver was expecting the data to be named 'data'. I have removed the createReadSteam as I could only get it to work with 'application/x-www-form-urlencoded' and a synchronous fs.readFileSync. If I change this to 'multipart/form-data' on the php server would I be able to use createReadStream again, or is that only if I change to physically uploading the json file.
I've only been learning node for the past couple of weeks so any pointers would be gratefully received.
var http = require('http'),
fs = require('fs');
var post_options = {
host: 'logger.mysite.co.uk',
path: '/',
port: 80,
timeout: 120000,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
var sender = http.request(post_options, function(res) {
if (res.statusCode < 399) {
var text = ""
res.on('data', function(chunk) {
text += chunk
})
res.on('end', function(data) {
console.log(text)
})
} else {
console.log("ERROR", res.statusCode)
}
})
var POST_DATA = 'data={['
POST_DATA += fs.readFileSync('./path/file.log').toString().replace(/\,+$/,'')
POST_DATA += ']}'
console.log(POST_DATA)
sender.write(POST_DATA)
sender.end()
After gazillion of trial-failure this worked for me. Using FormData with node-fetch. Oh, and request deprecated two days ago, btw.
const FormData = require('form-data');
const fetch = require('node-fetch');
function uploadImage(imageBuffer) {
const form = new FormData();
form.append('file', imageBuffer, {
contentType: 'image/jpeg',
filename: 'dummy.jpg',
});
return fetch(`myserver.cz/upload`, { method: 'POST', body: form })
};
In place of imageBuffer there can be numerous things. I had a buffer containing the image, but you can also pass the result of fs.createReadStream('/foo/bar.jpg') to upload a file from drive.
copied from https://github.com/mikeal/request#forms
var r = request.post('http://service.com/upload', function optionalCallback (err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
})
var form = r.form()
form.append('my_field1', 'my_value23_321')
form.append('my_field2', '123123sdas')
form.append('my_file', fs.createReadStream(path.join(__dirname, 'doodle.png')))
Have a look at the request module.
It will provide you the ability to stream a file to POST requests.

Resources