I'm trying to make an HTTP request in Node on a data:application URL that gets generated, then writing the results
var fileName = "csv-" + uuid() + ".csv";
var fileLocation = process.env.USERPROFILE + "\\" + fileName;
var file=fs.createWriteStream(fileLocation);
var exportCsvData = 'data:application/csv;charset=utf-8,' + encodeURIComponent(data);
var options={
host: location.hostname,
port: location.port,
path: '/' + exportCsvData
}
console.log(exportCsvData);
http.get(options, function(response) {
// write all of our message parts to the file
response.pipe(file);
});
The file gets created, but the only thing that is written is "Not Found". However when I take the exportCsvData data that gets logged out, then copy and paste it into Chrome, it downloads the data just fine.
I've also tried doing
http.get(options,function(res){
res.on('data', function (chunk) {
file.write(chunk);
});
res.on('end',function(){
file.end();
});
});
With the same result.
Edit: I've just realized that in this specific case, I can just grab data and write that out to a csv file and it will work just fine.
Related
I am facing issues when trying to process an uploaded picture and write it into a file. Usually this work very well that way:
req.on('data', function (data) {
body += data;
wstream.write(data);
});
// end process data
req.on('end', function () {
wstream.end();
});
In that case written file header looks like that:
PNG ... followed by binary data.
But in some cases written file looks like that:
--72e245e8-38eb-41e8-9118-fc0405e4837c Content-Type: multipart/form-data Content-Disposition: form-data; name=image;
filename=picture.jpg; filename*=utf-8''picture.jpg
As you can imagine, those pictures arent working well anymore until I remove that meta data as content-type etc.
But after I do so, picture is fully functional and useable.
I tried to access the request data and called toString Method to replace the "unwanted" parts, but than I entirely mess up content encoding of that output file and it becomes unuseable at all.
data = data.toString(/---.*/g), "");
Any ideas how to do the trick?
I solved my issue by help of module formidable.
var formidable = require('formidable');
var util = require('util');
form.parse(req, function(err, fields, files) {
logger.debug("Received upload: " + util.inspect({fields: fields, files: files}));
});
form.on('fileBegin', function (name, file){
file.path = "MyPathToDestinationFile";
logger.debug("File upload started for file '" + file.path + "'");
});
form.on('end', function() {
logger.debug("File upload finished for file");
// Send response to client
});
form.on('error', function(err) {
logger.debug("Failed to finish upload due to '" + err + "'");
// Send error to client
});
I'm trying to upload files to a server using node.js as backend and angular.js as frontend. I'm using express 4 + busboy for this. I have a table in the frontend where I should display all the files I'm uploading. So if I have 3 files and click on upload, angular should post these files to node.js and after getting the response back, refresh the table with those three files.
This is the function I'm using in angular:
function uploadFiles(files){
var fd = new FormData();
for(var i = 0; i<files.length; i++){
fd.append("file", files[i]);
}
$http.post('http://localhost:3000/upload', fd, {
withCredentials: false,
headers: {'Content-Type': undefined },
transformRequest: angular.identity
}).success(refreshTable()).error(function(){
console.log("error uploading");
});
}
and this is from node.js:
app.post('/upload', function(req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function (fieldname, file, filename) {
console.log("Uploading: " + filename);
var fstream = fs.createWriteStream('./files/' + filename);
file.pipe(fstream);
});
busboy.on('finish', function(){
res.writeHead(200, { 'Connection': 'close' });
res.end("");
});
return req.pipe(busboy);
});
the problem is that if I upload three files, as soon as the first file has been uploaded node.js sends the response and hence the table is updated only with the first file uploaded, if I refresh the page, the rest of the files appear.
I think the problem is with this line in node: return req.pipe(busboy); if I remove that line, the post response keeps on pending for a long time and nothing happens, I think this is an async problem, anybody knows if there's a way to send the response back only when all files have been uploaded?
thanks
A simple and common solution to this particular problem is to use a counter variable and listening for the finish event on the fs Writable stream. For example:
app.post('/upload', function(req, res) {
var busboy = new Busboy({ headers: req.headers });
var files = 0, finished = false;
busboy.on('file', function (fieldname, file, filename) {
console.log("Uploading: " + filename);
++files;
var fstream = fs.createWriteStream('./files/' + filename);
fstream.on('finish', function() {
if (--files === 0 && finished) {
res.writeHead(200, { 'Connection': 'close' });
res.end("");
}
});
file.pipe(fstream);
});
busboy.on('finish', function() {
finished = true;
});
return req.pipe(busboy);
});
The reason for this is that busboy's finish event is emitted once the entire request has been fully processed, that includes files. However, there is some delay between when there is no more data to write to a particular file and when the OS/node has flushed its internal buffers to disk (and the closing of the file descriptor). Listening for the finish event for a fs Writable stream lets you know that the file descriptor has been closed and no more writes are going to occur.
I have two node servers, one on port 5000 (call it "Face") and another on port 5001 (call it "Hands")
Both are started via a foreman procfile at the same time. Ports are fixed and the url I'm targeting works in the browser.
When the Hands starts up, it needs to talk to the Face (Facepalm?) and register itself. However, the below code doesn't seem to be working. (This is coffeescript generated JS)
Register gets called during server initialization, after the http server has been started. In case it was a timing issue, I kick off the register function with a setTimeout() of 2 seconds. I know the page that its hitting (/home/register) is available and working.
Right now I can see it get to the "Posting to" console log line. On the Face I have put a console.log in the register code and its never logging anything - meaning I don't think its actually getting hit. (It DOES log if hit from browser) And nothing errors out - it just calls the request and then wanders off to get a sandwich.
Both servers are "roll your own" - not using any frameworks. Let me know if you see a weird typo or need more info. Thanks!
register = function() {
var _this = this;
console.log('Registering with Face Server');
return post_face('/home/register', GLOBAL.data, function(rs) {
console.log(rs);
if (rs.registered) {
GLOBAL.data.registered = true;
return console.log("Registered with face at " + GLOBAL.config.face_host + ":" + GLOBAL.config.face_port);
} else {
throw "ERROR: Could not register with face server! " + GLOBAL.config.face_host + ":" + GLOBAL.config.face_port;
return false;
}
});
};
post_face = function(path, data, cb) {
var post_data, post_options, post_req;
post_data = querystring.stringify({
'registration': data
});
post_options = {
host: GLOBAL.config.face_host,
port: GLOBAL.config.face_port,
path: path,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
};
console.log("Posting to " + post_options.host + ":" + post_options.port);
post_req = http.request(post_options, function(res) {
var response,
_this = this;
console.log(res);
response = "";
res.setEncoding('utf8');
res.on('data', function(chunk) {
return response += chunk;
});
return res.on('end', function() {
return cb.call(_this, response);
});
});
return true;
};
Thanks to Bill above, the answer was that I wasn't actually posting the data and ending the request! Bad copy / paste / edit from some samples I was referring to. Here's the last two lines of code I should have had:
post_req.write(post_data);
post_req.end();
I have a node service that fetches a pdf from an API and serves that pdf.
When I curl or directly open the API, I do see the correct pdf.
But when I serve it from my Node app, I get an empty pdf.
Here's the section of my code that does the pdf render.
} else if (options.type === 'pdf') {
res.writeHead(200, {'content-type' : 'application/pdf', 'content-disposition': 'attachment; filename=invoice.pdf'});
res.end(data.invoice);
I've console.log'ed data.invoice to know it's the right stuff.
typeof(data.invoice) gives string; but I've also tried res.end(new Buffer(data.invoice)); which didn't work either.
Here's the section of my code that fetches the data
var http_options = {
method : options.method
, host : Config.API.host
, path : options.path
, port : Config.API.port
, headers : options.headers
};
var req = http.request(http_options, function (response) {
var raw_response = "";
response.on('data', function (response_data) {
raw_response += response_data.toString();
});
response.on('end', function () {
if (response.statusCode !== 200) {
cb(raw_response);
} else {
cb(false, raw_response);
}
});
});
req.setTimeout(timeout, function () {
req.abort();
cb("API connection timed out");
});
req.on('error', function (error) {
cb("API error while requesting for " + options.path + '\n' + error + '\n' + "http options: " + JSON.stringify(http_options)
});
req.end();
It's quite likely that the toString() and concatenation when you're receiving the PDF are corrupting it. Try writing raw_response to a file (you can use writeFileSync() since this is just a one-time test) and doing a byte-for-byte comparison with the same PDF retrieved with curl.
Note that if the process of string conversion has corrupted it, trying to convert it back to a buffer before sending it won't help. You'll have to keep the whole thing as a buffer from start to finish.
Since you don't intend to modify or read this data in transit, I suggest just using the pipe function to pipe all the data coming in from response out to req. this question has a good sample, but here's an excerpt.
req.on('response', function (proxy_response) {
proxy_response.pipe(response);
response.writeHead(proxy_response.statusCode, proxy_response.headers);
});
Note that there's no reason to convert the chunks coming in from the response from Buffers to something else, just write them through as unmodified buffers, and stream them (this is what pipe will do for you) instead of accumulating them to get maximum efficiency (and node.js streaming hipster points).
I'm calling a node'js process to scrape twitter and create a graph from the data...
here's the code.
var http = require('http');
var url = require('url');
var exec = require('child_process').exec;
var fs = require('fs');
var filepath = 'filenamegraph.png';
http.createServer(function(req,res){
var urlstr = url.parse(req.url,true);
var twit = exec("php -f twitter-scraper.php "+urlstr.query.term ,function(error, stdout, stderr){
var graphstring = stdout;
var stream = fs.createWriteStream("graph.txt");
stream.once('open', function(fd){
stream.write(graphstring, function(){
exec('dot -T png -o filenamegraph.png graph.txt', function(){
fs.stat(filepath,function(err,stat){
res.writeHead(200,{'Content-Type':'image/png', 'Content-Length': stat.size });
fs.readFile(filepath,function(err,file_conts){
res.write(file_conts);
res.end();
});
});
});
});
});
});
}).listen(1337);
all thats is hunky dory...
now I call this with:
$.ajax({
url: 'http://eavesdropper.dev:1337',
data: { term : $.url.parse().params.term },
success: function(data) {
var res = $.parseJSON(data);
console.log(res);
$("body").append('<img src="filenamegraph.png" />');
},
error: function(jqXHR, textStatus, errorThrown) {
console.log(jqXHR,'errror - ' + textStatus + " - " + errorThrown + ' ||');
},
complete: function(jqXHR, textStatus){
console.log(jqXHR, textStatus)
}
});
the work behind the scenes is fine BUT i get this...
Object { readyState=0, status=0, statusText="error"} error - error - ||
Not sure what next step is so any tips muchly appreciated.
PS: running directly in the browser the script correctly procues the graph so this is sure around the data being returned to the ajax call that is getting upset.
Your server is delivering the contents of the newly-created graphic file, but all your client is doing is trying to parse the contents as if it were JSON (which it obviously isn't, causing your error messages) and then inserting an <img> element that will cause the browser to request "filenamegraph.png" from whatever server delivered the original HTML.
It looks to me like you don't need to do an AJAX request at all; just append <img src='http://eavesdropper.dev:1337/?your_params' /> and the browser will automatically get and display the generated image from your server.