pipe file directly from file system - node.js

I'm using "express": "^4.13.3" on node 6.9.0
When i try to pipe data a jpeg image:
const path = config.storageRoot + '/' + req.params.originalFileName;
var mimetype = mime.lookup(req.params.originalFileName);
res.writeHead(200, { 'Content-Type': mimetype});
fs.createReadStream(path).pipe(res);
i get xml data inside the result:
<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.5-c014 79.151739, 2013/04/03-12:12:15 ">
When i use res.end with the result from a fs.readFile instead, the binary content is formatted correctly.
What am i doing wrong?

Take a look how I'm piping files in the examples in this answer:
How to serve an image using nodejs
It's something like this:
// 'type' is the MIME type
var s = fs.createReadStream(file);
s.on('open', function () {
res.set('Content-Type', type);
s.pipe(res);
});
s.on('error', function () {
res.set('Content-Type', 'text/plain');
res.status(404).end('Not found');
});
So I'm just setting the header to be posted by Express instead of posting the headers explicitly. Also I'm handling the stream events. Maybe you should try doing it similarly because the way I did it seems to work, according to Travis:
https://travis-ci.org/rsp/node-static-http-servers
Another thing in addition to handling the stream events and errors would be to make sure that you have the correct encoding, permissions etc. I don't know what result are you expecting and what that XML means or where it comes from, but handling the stream events may tell you more about what is happening.

Related

Redirect Readable object stdout process to file in node

I use an NPM library to parse markdown to HTML like this:
var Markdown = require('markdown-to-html').Markdown;
var md = new Markdown();
...
md.render('./test', opts, function(err) {
md.pipe(process.stdout)
});
This outputs the result to my terminal as intended.
However, I need the result inside the execution of my node program. I thought about writing the output stream to file and then reading it in at a later time but I can't figure out a way to write the output to a file instead.
I tried to play around var file = fs.createWriteStream('./test.html'); but the node.js streams rather give me headaches than results.
I've also looked into the library's repo and Markdown inherits from Readable via util like this:
var util = require('util');
var Readable = require('stream').Readable;
util.inherits(Markdown, Readable);
Any resources or advice would be highly appreciated. (I would also take another library for parsing the markdown, but this gave me the best results so far)
Actually creating a writable file-stream and piping the markdown to this stream should work just fine. Try it with:
const writeStream = fs.createWriteStream('./output.html');
md.render('./test', opts, function(err) {
md.pipe(writeStream)
});
// in case of errors you should handle them
writeStream.on('error', function (err) {
console.log(err);
});

Node writeFileSync encoding options for images

I'm using fs.writeFileSync(file, data[, options]) to save a file returned from http.get(options[, callback])
This works fine for text files but images, pdfs etc end up being corrupted. From the searching around that I've done, it's apparently because fs.writeFileSync(file, data[, options]) defaults to UTF-8
I've tried setting the encoding to 'binary', the mime-type and the extension to no avail. It feels like something really obvious that I'm overlooking, can anyone point me in the right direction?
Thank you in advance
Update
I'm running this through electron. I didn't think it was worth mentioning as electron is just running node, but I'm not a node or electron expert so I'm not sure
Create a Buffer from the image data and set its encoding to binary. Then pass that data into a stream.PassThrough and pipe that into a stream.Writable.
var fs = require('fs');
var stream = require('stream');
var imgStream = new stream.PassThrough();
imgStream.end(Buffer.from(data, 'binary'));
var wStream = fs.createWriteStream('./<dest>.<ext>');
imgStream.once('end', () => {
console.log('Image Written');
});
imgStream.once('error', (err) => {
console.log(err);
});
imgStream.pipe(wStream);

export array to csv Nodejs

I'm trying to export an array of data to a CSV file in Nodejs. I'm using the NodeCSV module, but I just get errors every time I try to follow the examples and documentation. It prints a bunch of code and says stringifier has no method 'indexOf' when I try to use the stringifier and it says 'object is not a function' when I try
csv().from.array(results).to('/temp/users.csv')
It doesn't work if I take the () off csv either. Does anyone know how to do this or can point me to some good documentation?
Hope it help you more:
http.createServer(function(request, response) {
response.setHeader('Content-disposition', 'attachment; filename=testing.csv');
response.writeHead(200, {
'Content-Type': 'text/csv'
});
csv().from(data).to(response)
})
.listen(3000);

I want to pipe a readable css file to the http response

I have an issue with outputting the readable stream to the http response.
behind the scenes there is a regular request and response streams coming from the generic http createServer. I check to see if the 'req.url' ends in css, and I create a readable stream of this file. I see the css contents in the console.log, with the right css code I expect. Then, I try to pipe the readable css file stream to the response, but in Chrome, the file response is blank when I inspect the response. It is a 200 response though. Any thoughts at first glance? I've tried different variations of where I have code commented out.
router.addRoute("[a-aA-z0-9]{1,50}.css$", function(matches){
var cssFile = matches[0];
var pathToCss = process.cwd() + "/" + cssFile;
// takes care of os diffs regarding path delimiters and such
pathToCss = path.normalize(pathToCss);
console.log(matches);
console.log("PATH TO CSS");
console.log(pathToCss)
var readable = fs.createReadStream(pathToCss);
var write = function(chunk){
this.queue(chunk.toString());
console.log(chunk.toString());
}
var end = function(){
this.queue(null);
}
var thru = through(write,end);
//req.on("end",function(){
res.pipe(readable.pipe(thru)).pipe(res);
//res.end();
//});
});
you need to pipe your readable stream into your through-stream, and then pipe it into the response:
readable.pipe(thru).pipe(res);
edit: for preparing your css path, just use path.join instead of concatenating your path and normalizing it:
var pathToCss = path.join(process.cwd(), cssFile);
I separated out this route (css) from my normal html producing routes, the problem I had was that my normal routes in my router object returned strings, like res.end(compiled_html_str), and the css file readable stream was going through that same routing function. I made it separate by isolating it from my router.
var cssMatch = [];
if(cssMatch = req.url.match(/.+\/(.+\.css$)/)){
res.writeHead({"Content-Type":"text/css"});
var cssFile = cssMatch[1];
var pathToCss = process.cwd() + "/" + cssFile;
// takes care of os diffs regarding path delimiters and such
pathToCss = path.normalize(pathToCss);
console.log(cssMatch);
console.log("PATH TO CSS");
console.log(pathToCss)
var readable = fs.createReadStream(pathToCss);
var cssStr = "";
readable.on("data",function(chunk){
cssStr += chunk.toString();
});
readable.on("end",function(){
res.end(cssStr);
});
}

Downloading a Gzip'd file

I'm attempting to download a file using the http module in node. While the file seems to download sucessfully, the resultant file cannot be opened using gzip. I've tried downloading the file through other methods, and that works, and I've tried using multiple ways to open the resultant gzip'd file, but all of those produce the same error.
I did attempt to use the request module, but there seemed to be no way of accessing the returned HTTP headers before the file was finished downloading, which I need because I'd like to offer some sort of visual indicator as to how long this file is going to take to download.
This is (roughly) the code that I've got so far.
var http = require('http');
var fs = require('fs');
var progress = 0;
downloadFile = function() {
http.get(FILE_URL, function(response) {
var maxBytes = parseInt(response.headers['content-length'], 10);
var dumpFile = fs.createWriteStream(FILENAME + '.dl');
response.pipe(dumpFile);
response
.on('data', function(chunk) {
progress += chunk.length;
// progressbar-type code here
})
.on('end', function() {
// pass
})
dumpFile.on('finish', function() {
dumpFile.close();
fs.rename(FILENAME + '.dl', FILENAME);
});
}
So my question: How would you advise I download a file, bearing in mind it's a large file and I need some sort of visual indicator for download progress? Should I give up on http? Or am I doing something monumentally stupid?
Thanks!

Resources