I have the following code
var express = require('express');
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require("path");
var langs = fs.readdirSync({directory having multiple directories});
var arr = [];
for(var v in langs){
var filename = config.path.storage + "/" + langs[v] +"/assets/_assets.json";
fs.readFile(filename, "utf8", function(err, data) {
if (err) throw err;
res.write(data);
//res.end();
});
}
res.end();
What I want to do is, once all the files in the directories (lang in this case - langs contains: [ 'ar-dz', 'en-gb', 'en-us' ]) are read, I want to do a res.send() to the client.
I tried to promisify the 'fs' module via 'bluebird' module. I assume the callback in fs.readFileAsync() is causing the problem, is there a way around it?
Can't set headers after they are sent. is occurring because you are calling res.write() after you've called res.end(). This happens because the async response to fs.readFile() comes later after your for loop is done. Instead, you need to use your promises to coordinate the result of your async operations and when they are all done, you can then send your response with all the data.
You can use your Bluebird promise library to do that like this:
fs.readdirAsync(...).map(function(file) {
return fs.readFileAsync(file, 'utf8');
}).then(function(arrayOfData) {
res.send(arrayOfData.join(""));
}).catch(function(err) {
// send some error response here
});
Related
I am a newbie in Node JS and trying to get my head around functional programming.
I have the following code in a file called findinfo.js and I'm trying to pass the result to the main server.js:
const fs = require('fs');
const values = ["yes", "no", "?", "unknown", "partial"];
var cInfo = [];
function getFile (cb) {
fs.readFile('./scripts/blahblah.json', 'utf-8', function (err, jfile) {
if (err) {
throw new Error (err);
}
console.log("Function is executing...")
JSON.parse(jfile);
console.log('Parsing file done');
cb(jfile);
});
}
Then I'm trying to call this function from server.js,
var findinfo = require('./findinfo');
console.log(getFile());
which as expected crashes the program.
So what changes should I make in order to make it work?
You need to export getFile so it can be imported using require.
const fs = require('fs');
const values = ["yes", "no", "?", "unknown", "partial"];
var cInfo = [];
function getFile (cb) {
fs.readFile('./scripts/blahblah.json', 'utf-8', function (err, jfile) {
if (err) {
// throw new Error (err); // don't throw inside async callback
return cb(err);
}
console.log("Function is executing...")
JSON.parse(jfile);
console.log('Parsing file done');
cb(null, jfile);
});
}
module.exports = getFile;
server.js
var getFile = require('./findinfo');
getFile(function(err, file) {
console.log(err, file);
});
since getFile is an asynchronous function, you have to wait until it finishes, when cb is called, to console.log the result.
And using throw in a asynchronous callback is not recommended, since it will crash the server, it's recommended to pass the error to the callback.
You should take a look at this question, so you learn more about how to handle asynchronous code.
How do I return the response from an asynchronous call?
Let's say I have the following function
'use strict';
var http = require('http');
var getLikes = function(graphId, callback) {
// request to get the # of likes
var req = http.get('http://graph.facebook.com/' + graphId, function(response) {
var str = '';
// while data is incoming, concatenate it
response.on('data', function (chunk) {
str += chunk;
});
// data is fully recieved, and now parsable
response.on('end', function () {
var likes = JSON.parse(str).likes;
var data = {
_id: 'likes',
value: likes
};
callback(null, data);
});
}).on('error', function(err) {
callback(err, null);
});
};
module.exports = getLikes;
I would like to test it with mocha AND sinon, but I don't get how to stub the http.get.
For now I'm doing a real http.get to facebook, but I would like to avoid it.
Here is my current test:
'use strict';
/*jshint expr: true*/
var should = require('chai').should(),
getLikes = require('getLikes');
describe('getLikes', function() {
it('shoud return likes', function(done) {
getLikes(function(err, likes) {
should.not.exist(err);
likes._id.should.equal('likes');
likes.value.should.exist();
done();
});
});
});
How can I achieve what I want, without relying on something else than sinon? (I don't want to use the request module to perform the get, or using another testing lib)
Thanks!
You should be able to do this with just sinon.stub(http, 'get').yields(fakeStream); but you might be better served by looking at nock and/or rewire. nock would let you fake the facebook response without mucking too much in the getLikes implementation details. rewire would let you swap in a mock http variable into the getLikes scope without monkey patching the http.get function globally.
Do do it with just sinon as above, you'll need to create a mock response that will properly resemble the stream. Something like:
var fakeLikes = {_id: 'likes', value: 'foo'};
var resumer = require('resumer');
var stream = resumer().queue(JSON.stringify(fakeLikes)).end()
So I'm trying to fetch a bunch of files from a server. The current code is basically as follows.
var http = require('http');
var fs = require('fs');
var arr = [{id:'fileOne', id:'fileTwo', id:'fileThree',....];
function fetchData() {
for (var i = 0; i < arr.length; i++) {
var file = fs.createWriteStream("../path/file.zip");
var request = http.get("url/AFG_adm.zip", function(response) {
response.pipe(file);
});
}
}
I don't think this is the best approach, trying to figure out how to handle errors, how to make sure that a file gets loaded before the next iteration... Any help is much appreciated.
You should use the async module for handling the async part, also the request module will save you a lot of effort.
You can handle this in many ways using either async.cargo or async.map.
The theory is to group up things or a series of things, and then take action according to what you want it to do, but in async way.
so a basic .map of an array of files to download would be like this.
// required modules
var async = require('async');
var request = require('request');
// array of urls
var URLs = ['hxxp://...ZipFile1.zip', 'hxxp://...ZipFile2.zip'];
// destination directory
var destinationDirectory = 'downloads';
// asyncDownload function
function asyncDownload(url, callback) {
// get filename
var filename = url.substring(url.lastIndexOf(".") + 1);
// create write stream
var stream = fs.createWriteStream(destinationDirectory + "/" + filename);
// listen for open event to start request and pipe
stream.on('open', function () {
request(url).pipe(stream);
})
// when finish , call callback
stream.on('finish', function () {
callback(null, destinationDirectory + "/" + filename);
})
}
async.map(
URLs, asyncDownload, function (err, results) {
console.log(results);
});
i'm pretty new into NodeJs. And i am trying to read a file into a variable.
Here is my code.
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname,"helpers","test.txt"), 'utf8',function (err,data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data,"test","test","test");
});
console.log(content);
But every time i run the script i get
undefined and undefined
What am i missing? Help please!
As stated in the comments under your question, node is asynchronous - meaning that your function has not completed execution when your second console.log function is called.
If you move the log statement inside the the callback after reading the file, you should see the contents outputted:
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname, "helpers", "test.txt"), 'utf8', function (err, data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data, "test", "test", "test");
console.log(content);
});
Even though this will solve your immediately problem, without an understanding of the async nature of node, you're going to encounter a lot of issues.
This similar stackoverflow answer goes into more details of what other alternatives are available.
The following code snippet uses ReadStream. It reads your data in separated chunks, if your data file is small it will read the data in a single chunk. However this is a asynchronous task. So if you want to perform any task with your data, you need to include them within the ReadStream portion.
var fs = require('fs');
var readStream = fs.createReadStream(__dirname + '/readMe.txt', 'utf8');
/* include the file directory and file name instead of <__dirname + '/readMe.txt'> */
var content;
readStream.on('data', function(chunk){
content = chunk;
performTask();
});
function performTask(){
console.log(content);
}
There is also another easy way by using synchronous task. As this is a synchronous task, you do not need to worry about its executions. The program will only move to the next line after execution of the current line unlike the asynchronous task.
A more clear and detailed answer is provided in the following link:
Get data from fs.readFile
var fs = require('fs');
var content = fs.readFileSync('readMe.txt','utf8');
/* include your file name instead of <'readMe.txt'> and make sure the file is in the same directory. */
or easily as follows:
const fs = require('fs');
const doAsync = require('doasync');
doAsync(fs).readFile('./file.txt')
.then((data) => console.log(data));
I'm using the Request module to download files, but I'm not quite sure how to pipe the response to an output stream when the filename must come from the 'Content-Disposition' header. So basically, I need to read the response until the header is found, and then pipe the rest to that filename.
The examples show something like:
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
Where I want to do (pseudocode):
var req = request('http://example.com/download_latest_version?token=XXX');
var filename = req.response.headers['Content-Disposition'];
req.pipe(fs.createWriteStream(filename));
I could get the filename using the Request callback:
request(url, function(err, res, body) {
// get res headers here
});
But wouldn't that negate the benefits of using pipe and not loading the downloaded file into memory?
I'm reqesting a image from yahoo and it isn't using the content-disposition header but I am extracting the date and content-type headers to construct a filename. This seems close enough to what you're trying to do...
var request = require('request'),
fs = require('fs');
var url2 = 'http://l4.yimg.com/nn/fp/rsz/112113/images/smush/aaroncarter_635x250_1385060042.jpg';
var r = request(url2);
r.on('response', function (res) {
res.pipe(fs.createWriteStream('./' + res.headers.date + '.' + res.headers['content-type'].split('/')[1]));
});
Ignore my image choice please :)
Question has been around a while, but I today faced the same problem and solved it differently:
var Request = require( 'request' ),
Fs = require( 'fs' );
// RegExp to extract the filename from Content-Disposition
var regexp = /filename=\"(.*)\"/gi;
// initiate the download
var req = Request.get( 'url.to/somewhere' )
.on( 'response', function( res ){
// extract filename
var filename = regexp.exec( res.headers['content-disposition'] )[1];
// create file write stream
var fws = Fs.createWriteStream( '/some/path/' + filename );
// setup piping
res.pipe( fws );
res.on( 'end', function(){
// go on with processing
});
});
Here's my solution:
var fs = require('fs');
var request = require('request');
var through2 = require('through2');
var req = request(url);
req.on('error', function (e) {
// Handle connection errors
console.log(e);
});
var bufferedResponse = req.pipe(through2(function (chunk, enc, callback) {
this.push(chunk);
callback()
}));
req.on('response', function (res) {
if (res.statusCode === 200) {
try {
var contentDisposition = res.headers['content-disposition'];
var match = contentDisposition && contentDisposition.match(/(filename=|filename\*='')(.*)$/);
var filename = match && match[2] || 'default-filename.out';
var dest = fs.createWriteStream(filename);
dest.on('error', function (e) {
// Handle write errors
console.log(e);
});
dest.on('finish', function () {
// The file has been downloaded
console.log('Downloaded ' + filename);
});
bufferedResponse.pipe(dest);
} catch (e) {
// Handle request errors
console.log(e);
}
}
else {
// Handle HTTP server errors
console.log(res.statusCode);
}
});
The other solutions posted here use res.pipe, which can fail if the content is transferred using gzip encoding, because the response stream contains the raw (compressed) HTTP data. To avoid this problem you have to use request.pipe instead. (See the second example at https://github.com/request/request#examples.)
When using request.pipe I was getting an error: "You cannot pipe after data has been emitted from the response.", because I was doing some async stuff before actually piping (creating a directory to hold the downloaded file). I also had some problems where the file was being written with no content, which might have been due to request reading the HTTP response and buffering it.
So I ended up creating an intermediate buffering stream with through2, so that I could pipe the request to it before the response handler fires, then later piping from the buffering stream into the file stream once the filename is known.
Finally, I'm parsing the content disposition header whether the filename is encoded in plain form or in UTF-8 form using the filename*=''file.txt syntax.
I hope this helps someone else who experiences the same issues that I had.