Dealing with meta data in client request when receiving images - node.js

I am facing issues when trying to process an uploaded picture and write it into a file. Usually this work very well that way:
req.on('data', function (data) {
body += data;
wstream.write(data);
});
// end process data
req.on('end', function () {
wstream.end();
});
In that case written file header looks like that:
PNG ... followed by binary data.
But in some cases written file looks like that:
--72e245e8-38eb-41e8-9118-fc0405e4837c Content-Type: multipart/form-data Content-Disposition: form-data; name=image;
filename=picture.jpg; filename*=utf-8''picture.jpg
As you can imagine, those pictures arent working well anymore until I remove that meta data as content-type etc.
But after I do so, picture is fully functional and useable.
I tried to access the request data and called toString Method to replace the "unwanted" parts, but than I entirely mess up content encoding of that output file and it becomes unuseable at all.
data = data.toString(/---.*/g), "");
Any ideas how to do the trick?

I solved my issue by help of module formidable.
var formidable = require('formidable');
var util = require('util');
form.parse(req, function(err, fields, files) {
logger.debug("Received upload: " + util.inspect({fields: fields, files: files}));
});
form.on('fileBegin', function (name, file){
file.path = "MyPathToDestinationFile";
logger.debug("File upload started for file '" + file.path + "'");
});
form.on('end', function() {
logger.debug("File upload finished for file");
// Send response to client
});
form.on('error', function(err) {
logger.debug("Failed to finish upload due to '" + err + "'");
// Send error to client
});

Related

Upload file to servlet from node without saving it

On my node express server, I am receiving a pdf file. I am using the below code to get the pdf contents from the request
var data = new Buffer('');
request.on('data', function (chunk) {
data = Buffer.concat([data, chunk]);
});
request.on('end', function() {
console.log('PDF data is '+JSON.stringify(data));
});
Now that PDF content is available on node, I need to send it as it is to a J2EE server. In order to do that, I am first saving the PDF file in the node server, reading it from the node server and then piping it to request.post (https://github.com/request/request)
var req = require('request');
fs.writeFile('abc.pdf', data, 'binary', function(err) {
if (err) {
console.log('Error ' + JSON.stringify(err) );
throw err;
}
var source = fs.createReadStream('abc.pdf');
//send our data via POST request
source.pipe(req.post('http://'+j2ee_host+':'+j2ee_port+'/myjavaapp/Upload')
});
This works fine. However, I feel the part of saving the PDF file on the node server and then reading it is (before posting to the J2EE server using request module) is completely unnecessary, as I am not making any changes to the file.
Once I have the PDF contents in 'data' variable, I would like to directly post them to the J2EE server. However, I have not been able to find a way to use the request module to directly post file contents. I have seen some examples related to POST using request module but they refer to formData. In my case, I don't have formData but instead reading the file from request and directly posting it to the J2EE server.
Is there a way to achieve this and avoid the file write and read?
EDIT
Below is my complete code
function upload(request, response) {
var data = new Buffer('');
request.on('data', function (chunk) {
data = Buffer.concat([data, chunk]);
});
request.on('end', function () {
fs.writeFile('abc.pdf', data, 'binary', function(err){
if (err) {
console.log('Error ' + JSON.stringify(err) );
throw err;
}
var source = fs.createReadStream('abc.pdf');
source.pipe(req.post('http://'+j2ee_host+':'+j2ee_port+'/myj2eeapp/Upload'));
})
})
}
You can pipe directly from the data request to the servlet
var req = require('request');
function upload(request, response) {
var target = req.post('http://'+j2ee_host+':'+j2ee_port+'/myjavaapp/Upload');
request.pipe(target);
target.on('finish', function () {
console.log('All done!');
//send the response or make a completed callback here...
});
}

Making an HTTP request on a data:application URL

I'm trying to make an HTTP request in Node on a data:application URL that gets generated, then writing the results
var fileName = "csv-" + uuid() + ".csv";
var fileLocation = process.env.USERPROFILE + "\\" + fileName;
var file=fs.createWriteStream(fileLocation);
var exportCsvData = 'data:application/csv;charset=utf-8,' + encodeURIComponent(data);
var options={
host: location.hostname,
port: location.port,
path: '/' + exportCsvData
}
console.log(exportCsvData);
http.get(options, function(response) {
// write all of our message parts to the file
response.pipe(file);
});
The file gets created, but the only thing that is written is "Not Found". However when I take the exportCsvData data that gets logged out, then copy and paste it into Chrome, it downloads the data just fine.
I've also tried doing
http.get(options,function(res){
res.on('data', function (chunk) {
file.write(chunk);
});
res.on('end',function(){
file.end();
});
});
With the same result.
Edit: I've just realized that in this specific case, I can just grab data and write that out to a csv file and it will work just fine.

mongodb gridfs encoding picture base64

i try to readout an image, saved in mongodb, via gridfs (without temporary file)
it should be directly sent to ajax, which injects it into html
when i use my actual functions a large bit string is formed and sent to client (is saved in ajax response var)
but as it reaches the client, the bits arent correct anymore
so i look for a way to encode the picture before it is sent (into base64)
(or is there any other way?)
Serverside - javascript, gridfs
exports.readFileFromDB = function(req, res, profile, filename, callback){
console.log('Find data from Profile ' + JSON.stringify(profile));
var GridReader = new GridStore(db, filename,"r");
GridReader.open(function(err, gs) {
var streamFile = gs.stream(true);
streamFile.on("end", function(){
});
// Pipe out the data
streamFile.pipe(res);
GridReader.close(function(err, result) {
});
Clientside - javascript ajax call:
function imgUpload(){
var thumb = $("#previewPic");
$('#uploadForm').ajaxSubmit({
beforeSend:function(){
//launchpreloader();
},
error: function(xhr) {
//status('Error: ' + xhr.status);
},
success: function(response) {
console.log(response);
var imageData = $.base64Encode(response);
console.log(imageData);
thumb.attr("src","data:image/png;base64"+imageData);
$("#spanFileName").html("File Uploaded")
}
});
}
I'm doing something similar for a current project, but when the upload is complete, I return a JSON object containing the URL for the uploaded image:
{ success : true, url : '/uploads/GRIDFSID/filename.ext' }
I have a route in Express that handles the /uploads route to retrieve the file from GridFS and stream it back to the client, so I can use the above URL in an IMG SRC. This is effectively what appears in the DOM:
<img src="/uploads/GRIDFSID/filename.ext">
The route handler looks something like this (it uses node-mime and gridfs-stream):
app.get(/^\/uploads\/([a-f0-9]+)\/(.*)$/, function(req, res) {
var id = req.params[0];
var filename = req.params[1];
// Set correct content type.
res.set('Content-type', mime.lookup(filename));
// Find GridFS file by id and pipe it to the response stream.
gridfs
.createReadStream({ _id : id })
.on('error', function(err) {
res.send(404); // or 500
})
.pipe(res);
});
It obviously depends on your exact setup if my solution works for you.

Node.js user input from a website (not using Express.js)

I am still a beginner in Node.js and I am trying to explore as much as I can.
I know that Express.js is a framework used by many people for creating websites in Node.js.
But without using Express.js, I know that it is it possible to read .html files using 'fs.readFile', and then "display" this .html file in the browser.
Is there a way to get user input (say a button click, or fill in a box) from this web page into Node.js? So far, I have not found any examples of this.
Yes, this is possible. Study how the connect bodyParser's urlencoded function works.
When a request comes in from the browser, node is going to represent this as a readable data stream. For web forms, the pattern will be:
Use the request's data and end events to buffer the chunks of data from the stream into a single string.
Parse that string appropriately given its data format. In the case of a web form, this will normally urlencoded (application/x-www-form-urlencoded) MIME type
.
var qs = require('qs'); //https://github.com/visionmedia/node-querystring
function handle(req, res) {
var buf = '';
req.setEncoding('utf8');
req.on('data', function(chunk){
//assemble the request from distinct chunks into a single string
buf += chunk
});
req.on('end', function(){
//OK, you have a usable string request body, parse it and handle it
try {
var formData = qs.parse(buf);
//Yay, it parsed. Now you have your form data
//depending on your form's html, you might have formData.email, for example
} catch (err){
//oops, respond with an error
}
});
}
Tutorial
Long story short:
http.createServer(function (req, res) {
var data = '';
req.on('data', function(chunk) {
console.log("Received body data:");
console.log(chunk);
data += chunk.toString();
});
req.on('end', function() {
console.log('Received Data: ', data);
res.end();
});
}

Node.js: serving dynamic pdf, coming up empty

I have a node service that fetches a pdf from an API and serves that pdf.
When I curl or directly open the API, I do see the correct pdf.
But when I serve it from my Node app, I get an empty pdf.
Here's the section of my code that does the pdf render.
} else if (options.type === 'pdf') {
res.writeHead(200, {'content-type' : 'application/pdf', 'content-disposition': 'attachment; filename=invoice.pdf'});
res.end(data.invoice);
I've console.log'ed data.invoice to know it's the right stuff.
typeof(data.invoice) gives string; but I've also tried res.end(new Buffer(data.invoice)); which didn't work either.
Here's the section of my code that fetches the data
var http_options = {
method : options.method
, host : Config.API.host
, path : options.path
, port : Config.API.port
, headers : options.headers
};
var req = http.request(http_options, function (response) {
var raw_response = "";
response.on('data', function (response_data) {
raw_response += response_data.toString();
});
response.on('end', function () {
if (response.statusCode !== 200) {
cb(raw_response);
} else {
cb(false, raw_response);
}
});
});
req.setTimeout(timeout, function () {
req.abort();
cb("API connection timed out");
});
req.on('error', function (error) {
cb("API error while requesting for " + options.path + '\n' + error + '\n' + "http options: " + JSON.stringify(http_options)
});
req.end();
It's quite likely that the toString() and concatenation when you're receiving the PDF are corrupting it. Try writing raw_response to a file (you can use writeFileSync() since this is just a one-time test) and doing a byte-for-byte comparison with the same PDF retrieved with curl.
Note that if the process of string conversion has corrupted it, trying to convert it back to a buffer before sending it won't help. You'll have to keep the whole thing as a buffer from start to finish.
Since you don't intend to modify or read this data in transit, I suggest just using the pipe function to pipe all the data coming in from response out to req. this question has a good sample, but here's an excerpt.
req.on('response', function (proxy_response) {
proxy_response.pipe(response);
response.writeHead(proxy_response.statusCode, proxy_response.headers);
});
Note that there's no reason to convert the chunks coming in from the response from Buffers to something else, just write them through as unmodified buffers, and stream them (this is what pipe will do for you) instead of accumulating them to get maximum efficiency (and node.js streaming hipster points).

Resources