node.js asynchronous issue? - node.js

Basically I want to loop through existing arrtickers to get each symbol
Then read the content inside each symbol url and save the content into local dir.
In php, it will print out each ticker and each symbol in step by step.
But in node, the sequences are mess up.
it will print out all the url_optionable first...
then sometimes print console.log('path: ' + file), sometimes print console.log("The file was saved!");
Everytime run through fs.writefile function, sym value is not detected, the saved file is show as msn-.html
for(var v=0;v<arrtickers.length;v++)
{
var arrticker= arrtickers[v].split('##');
var sym= $.trim(arrticker[1]);
url_optionable= "http://sample.com/ns/?symbol="+sym;
console.log('url_optionable: ' + url_optionable);
request({ uri:url_optionable }, function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log('Error contacting ' + url_optionable)
}
jsdom.env({
html: body,
scripts: [
jqlib
]
}, function (err, window) {
var $ = window.jQuery;
var data= $('body').html();
var file= "msn-"+sym+".html";
console.log('path: ' + file);
fs.writeFile(file, data, function(err) {
if(err) {
console.log(err);
}
else
{
console.log("The file was saved!");
}
});
});
});
}

ZeissS is right. Basically, you can't use variables declared inside the for loop in a callback function to an asynchronous call, because they will all be set to the last value in the loop. So in your code, url_optionable and sym will correspond to arrtickers[arrtickers.length - 1].
Either use (as ZeissS suggests):
arrtickers.forEach(function(arrticker) {
// Get symbol and url, make request etc
});
Or declare a function which takes sym and does the request, and call that in your loop:
function getSymbol(symbol) {
// Request url and read DOM
}
for(var v=0;v<arrtickers.length;v++) {
var arrticker = arrtickers[v].split('##');
var sym = $.trim(arrticker[1]);
getSymbol(sym);
}
Personally, I would opt for the forEach solution.

Related

Node.js function not running in order. Error: Unhandled stream error in pipe

I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)

NodeJs : Using async for request url - Post method sending response before execution of all urls

I have a post method, whose request input is a list of newLink objects (newLink with attributes linkUrl and status)
I am using async.map for iterating over my URLs to check if the links are active or not.
newLinks contains links like {www.google.com,www.nourl.com,www.xyz.com} I am expecting like after all the request are processed and setting corresponding status as true or false, I want this to send using res.send(newLinks)
But the console is giving the below results: "www.google.com is up", then calling res.send(), then executing "www.nourl.com is up" and "www.xyz.com is up"
So basically here, after the first url request , my code below is executing the function outside the async loop. I thought async will only allow the next piece of code to execute only after all the urls are validated.
app.post('/myposturl', function(req , res){
var request = require('request');
let linkDetails= req.body.linkDetails;
var i = 0;
async.map(linkDetails, function(newLink, callback) {
var Url = "url";
var url = newLink.linkUrl;
var proxiedRequest = request.defaults({'proxy': Url});
proxiedRequest(url , function (error, response, body) {
if(error){
console.log('Err: '+ error);
}
if (!error) {
if(response.statusCode == 200 || response.statusCode == 201 ||
response.statusCode == 202){
console.log(url + ' is up!!');
newLink.isActive = true;
}
if(response.statusCode == 301 || response.statusCode == 302){
console.log(url + ' is redirecting us!!');
return false;
}
}
});
callback();
} , function(err, linkDetails) {
res.send(linkDetails);
});
//tried res.send here as well.
});
}
The callback of async.map should invoke inside proxiedRequest. What your code is doing now: invoke callback immediately before the proxiedRequest finished. Also return false; does not work in asynchronous function. You should return the new status like this callback(null, newLink). After all the request are processed, the newLinkDetails will be the array of all newLink.
Note, that since this function applies the iteratee to each item in parallel, there is no guarantee that the iteratee functions will complete in order.
If you need to keep the order, user mapSeries insted.
Please read the doc of async.map for more. Hope it helps.
app.post('/myposturl', function(req , res){
//other codes
async.map(linkDetails, function(newLink, callback) {
//other codes
proxiedRequest(url , function (error, response, body) {
if(error){
console.log('Err: '+ error);
callback(error);
//^ ^ ^ ^ ^
// Validation failed, return from here
}
else {
//some validation & set newLink.isActive
callback(null, newLink);
// ^ ^ ^ ^ ^
//return newLink status by invoking the callback
}
});
}, function(err, newLinkDetails) {
//err = if any Validation failed
// now all the request are processed,newLinkDetails is array of all newLink's
res.send(newLinkDetails);
});
});
Usually when using async.js, I follow these two principles:
Always call the callback at least AND at most once during your async function.
Call the callback only when the async function is complete OR if an error has occurred. If the latter occurs, call the callback passing the error AND stop further execution of the async function e.g. return callback(error)
I would revise your code as below:
var request = require('request');
app.post('/myposturl', function (req , res) {
async.mapSeries(req.body.linkDetails || [], function(newLink, callback) {
var Url = "url";
var proxiedRequest = request.defaults({ 'proxy': Url });
proxiedRequest(newLink.linkUrl, function (err, response, body) {
if (err)
return callback(err);
// I'm assuming you don't want to stop checking the links for bad status codes
if ([301, 302].indexOf(response.statusCode) > -1){
return callback(null, url + ' is redirecting us!!');
if ([200, 201, 202].indexOf(response.statusCode) == -1) {
return callback(null, url + ' came back with ' + response.statusCode);
console.log(url + ' is up!!');
newLink.isActive = true;
callback(null, newLink);
});
}, function (err, linkDetails) {
// when all links get checked, it will come down here
// or if an error occurs during the iteration, it will come down here
console.log(err, linkdetails);
res.send(linkDetails);
});
});
If you only want to get back active links, you may also want to check out async.filterSeries(). Here the callback would need to be passed a boolean in its second argument.

Mongoose Async Call Issue with Find Query

I have var movieRecommendation which is being populated from data coming from Mongo DB. Issue is Mongoose Movie.findOne() call is asycn call which is not allowing me to get my final populated movieRecommendation which I need to send back as response.
exports.getRecommendation=function(req,res){
var movieRecommendation = [];
var id=req.params.id;
console.log('----- Get User Recommendation - ' + id);
var url = 'http://52.8.48.113:8080/recommender-server/recommender/v1/recommendations/'+id+'.do';
//make http get request
request({
url: url,
json: true
}, function (error, response, recommendations) {
// res.json(recommendations);
if (!error && response.statusCode === 200) {
recommendations.forEach(function(entry) {
**Movie.findOne({'id':parseInt(entry.itemId)},function(err, movieData){**
entry.movie = movieData;
movieRecommendation.push(entry);
//console.log('rec', movieRecommendation);
console.log(movieRecommendation.length);
});
});
}
console.log("====Final========"+movieRecommendation.length);
//Output = 0
});
res.json(movieRecommendation); // Here movieRecommendation is coming as black Array
};
Please let me know how I can get finally populated movieRecommendation var at end to make it available for response.
For this type of issues we can use Async library. To populate the data finally once all the operations done, we can use async.each collection from Async Library.
For example:
NOTE:Install Async by this command
npm install async to use async library
var async = require("async");
var recomenmendations = [{"data2" : "value2"} , {"data1" : "value2"}, {"data3" : "value3"}, {"data4" : "value4"} ]
var movieRecommendation = [];
async.each(recomenmendations,
function(recomenmendationItem, callback){
console.log("Here you can query the required data using current recomenmendations ITEM");
console.log(recomenmendationItem);
callback();
// Movie.find({'id':parseInt(recomenmendationItem.itemId)},function(err, movieData){
// recomenmendationItem.movie = movieData;
// movieRecommendation.push(entry);
// callback();
// });
},
function(err){
console.log("here you can send your resopnse");
console.log("This section will be executed once all the recomenmendations are processed");
//res.json(movieRecommendation)
}
);
You can query the mongoDB as shown with comment section. You should use callback() once all the operations performed for an iteration.
As I mentioned in one of my comments, use the callback passed to the iterator function and call it inside the Movie.findOne() callback. That way, async.each will know when each step has finished:
async.each(recomendations, function (recomendationItem, callback) {
Movie.findOne({'id':parseInt(entry.itemId)},function(err, movieData){
if (err) return callback(err); // if you have an error on you search, just pass it to the iterator callback
recommendationItem.movie = movieData;
movieRecommendation.push(recommendationItem);
callback();
});
}, function (error) {
if (error) return res.json ({ error: error }); // you should also check if an error ocurred
res.json(movieRecomendation);
});
Just to point out: you can also use async.eachSeries, that will just call the next step of your iteration when the previous one has returned (if that matters to you, but I think it's not your case though) and it has the same signature.
#Vivek Panday replace the following code inside your exports.getRecommendation function to get your expected output. We don't need to use the count variable if we use the callback function. And an important thing is we have to use callback(); once all the process done. I think you have not used callback function properly in The example you have worked out. Use the following code If there is any issue please let me know.
var async = require('async');
var request = require("request");
var movieRecommendation = [];
var id=req.params.id;
console.log('----- Get User Recommendation - ' + id);
var url = 'http://52.8.48.113:8080/recommender-server/recommender/v1/recommendations/'+id+'.do';
//make http get request
request({
url: url,
json: true
}, function (error, response, recommendations) {
if (!error && response.statusCode === 200) {
console.log('recommendation lenght '+ recommendations.length);
async.each(recommendations,
function(recommendationItem, callback){
Movie.findOne({'id':parseInt(recommendationItem.itemId)},function(err, movieData){
recommendationItem.movie = movieData;
movieRecommendation.push(recommendationItem);
//you have to use callback(); once all your process is done
callback();
});
},
function(err){
//you should use this function, this will be execute once all the process done
console.log(movieRecommendation);
console.log("finally callback");
res.json(movieRecommendation);
}
);
}
});
I have tried as per given suggestion above ..
var async = require("async");
var recomenmendations = [{"data2" : "value2"} , {"data1" : "value2"}, {"data3" : "value3"}, {"data4" : "value4"} ]
var movieRecommendation = [];
async.each(recomenmendations,
function(recomenmendationItem, callback){
console.log("Here you can query the required data using current recomenmendations ITEM");
console.log(recomenmendationItem);
// Movie.find({'id':parseInt(recomenmendationItem.itemId)},function(err, movieData){
recomenmendationItem.movie = movieData;
movieRecommendation.push(entry);
console.log("any data"); // line y
});
callback();
},
function(err){
console.log("here you can send your resopnse"); // line x
console.log("This section will be executed once all the
recomenmendations are processed");
//res.json(movieRecommendation)
}
);
But still face same issue line x is printing before line y ,which is making again same issue.
However I have tried something given below and achieved expected result .
exports.getRecommendation=function(req,res){
var movieRecommendation = [];
var id=req.params.id;
console.log('----- Get User Recommendation - ' + id);
var url = 'http://52.8.48.113:8080/recommender-server/recommender/v1/recommendations/'+id+'.do';
//make http get request
request({
url: url,
json: true
}, function (error, response, recommendations) {
// res.json(recommendations);
if (!error && response.statusCode === 200) {
console.log('recommendation lenght '+ recommendations.length);
// recommendations.forEach(function(entry) {
var count=0;
async.each(recommendations,function(recommendationItem){
// console.log(recommendationItem);
Movie.findOne({'id':parseInt(recommendationItem.itemId)},function(err, movieData){
recommendationItem.movie = movieData;
movieRecommendation.push(recommendationItem);
count ++;
console.log('final res length : ' + movieRecommendation.length);
console.log('final res length count : ' + count +' and item recomm lenght ' + recommendations.length );
if(count === recommendations.length){
console.log(' =====Final=====> here you can send your response =========' + movieRecommendation.length);
res.json(movieRecommendation);
}
});
// callback();
});
}
});
};
Still I am open for any feedback and suggestions.

Node - how to wait on async operations?

Sorry, just starting with node. This might be a very novice question.
Let's say I have some code which reads some files from a directory in the file system:
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
module.exports.files.push(data);
});
});
});
Note that all of this occurs asynchronously. Let's also say I have a Mocha test which executes this code:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function () {
assert(fileProvider.files.length > 0, 'files.length is zero');
});
});
});
The mocha test runs before the files are finished being read. I know this because I see the console.log statement after I see the little dot that indicates a mocha test being run (at least I think that is what is being indicated). Also, if I surround the assert with a setTimeout, the assert passes.
How should I structure my code so that I can ensure the async file operations are completed? Note that this is not just a problem with testing - I need the files to be loaded fully before I can do real work in my app as well.
I don't think the right answer is to read files synchronously, because that will block the Node request / response loop, right?
Bonus question:
Even if I put the assert in a setTimeout with a 0 timeout value, the test still passes. Is this because just putting it in a setTimeout kicks it to the end of the processing chain or something so the filesystem work finishes first?
You can implement a complete callback after all files have been read.
exports.files = [];
exports.initialize = initialize;
function initialize(callback) {
var fs = require('fs');
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) throw err;
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) throw err;
console.log('finished reading file ' + fileName + ': ' + data);
exports.files.push(data);
if (exports.files.length == files.length) {
callback();
}
});
});
}
You can call the file operation method by doing something like:
var f = require('./files.js');
if (f.files.length < 1) {
console.log('initializing');
f.initialize(function () {
console.log('After: ' + f.files.length);
var another = require('./files.js');
console.log('Another module: ' + another.files.length);
});
}
EDIT: Since you want to only have to call this once, you could initialize it once when the application loads. According to Node.js documentation, modules are cached after the first time they are loaded. The two above examples have been edited as well.
To avoid being caught up in nested callbacks. You might want to use async's each that will allow you to do the tasks asynchronously in a non-blocking manner:
https://github.com/caolan/async#each
I think that's a good test, the same thing would happen in any app that used your module, i.e. it's code could be run before files is set. What you need to do is create a callback like #making3 suggests, or use promises. I haven't used mocha, but there's a section on ascynchronous calls. You could export the promise itself:
module.exports.getFiles = new Promise((resolve, reject) => {
datas = [];
fs.readdir(__dirname + '/myfiles', function (err, files) {
if (err) {
reject(err);
return;
}
files.forEach(function (fileName) {
fs.readFile(__dirname + '/myfiles/' + fileName, function (err, data) {
if (err) {
reject(err);
return;
}
console.log('finished reading file ' + fileName + ': ' + data);
datas.push(data);
if (datas.length == files.length) {
resolve(datas);
}
});
});
});
}
chai-as-promissed lets you work directly with promises using eventually, or you can use the callback passed to your test I think:
describe('fileProvider', function () {
describe('#files', function () {
it.only('files array not empty', function (done) {
fileProvider.getFiles.then(function(value) {
assert(value.length > 0, 'files.length is zero');
done();
}, function(err) {
done(err);
})
});
});
});

Reading and returning multiple files in Node.js using fs.readFile

I'm writing a simple request handler to return a pair of css files. Using fs.readFileSync this was easy. However, I'm having difficulty accomplishing the same task using the async version of readFile. Below is my code. Having my response.write() method calls split among two different callbacks seems to be problematic. Can someone point out what I've done wrong? Interestingly this code works if I put response.end() inside of the first else statement. However, that creates a problem in that the second css file does not get returned (because response.end() has already been fired).
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content);
}
});
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
console.log(error);
}
else{
response.write(content)
}
});
response.end();
}
The primary issue with what you have is that response.end() gets called right away. You need to only call it after the files have done their response.write calls.
The easiest way would be to use a control flow library. Managing multiple asynchronous callbacks is generally complicated.
https://github.com/joyent/node/wiki/modules#wiki-async-flow
I'm going to use the async library because it's the one I know best.
var fs = require('fs');
var async = require('async');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
async.eachSeries(
// Pass items to iterate over
['css/bootstrap.css', 'css/bootstrap-responsive.css'],
// Pass iterator function that is called for each item
function(filename, cb) {
fs.readFile(filename, function(err, content) {
if (!err) {
response.write(content);
}
// Calling cb makes it go to the next item.
cb(err);
});
},
// Final callback after each item has been iterated over.
function(err) {
response.end()
}
);
}
If you want to accomplish this without a library, or just want another way, this is how I would do it more directly. Basically you keep a count and call end once both file reads have finished.
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var count = 0;
var handler = function(error, content){
count++;
if (error){
console.log(error);
}
else{
response.write(content);
}
if (count == 2) {
response.end();
}
}
fs.readFile('css/bootstrap.css', handler);
fs.readFile('css/bootstrap-responsive.css', handler);
}
You can simply rely on html5 Promise. The code can be as simple as follows:
var promises= ['file1.css', 'file2.css'].map(function(_path){
return new Promise(function(_path, resolve, reject){
fs.readFile(_path, 'utf8', function(err, data){
if(err){
console.log(err);
resolve(""); //following the same code flow
}else{
resolve(data);
}
});
}.bind(this, _path));
});
Promise.all(promises).then(function(results){
//Put your callback logic here
response.writeHead(200, {"Content-Type": "text/css"});
results.forEach(function(content){response.write(content)});
response.end();
});
There's a simple common solution to get them all with an one callback.
You can place it anywhere in your project to reuse in many different cases.
var FS = require('fs');
/**
* Abstract helper to asyncly read a bulk of files
* Note that `cb` will receive an array of errors for each file as an array of files data
* Keys in resulting arrays will be the same as in `paths`
*
* #param {Array} paths - file paths array
* #param {Function} cb
* #param {Array} errors - a list of file reading error
* #param {Array} data - a list of file content data
*/
function FS_readFiles (paths, cb) {
var result = [], errors = [], l = paths.length;
paths.forEach(function (path, k) {
FS.readFile(path, function (err, data) {
// decrease waiting files
--l;
// just skip non-npm packages and decrease valid files count
err && (errors[k] = err);
!err && (result[k] = data);
// invoke cb if all read
!l && cb (errors.length? errors : undef, result);
});
});
}
Just put inside it a bulk of files and it will returns to you each of them as a buffer.
Simple example:
var cssFiles = [
'css/bootstrap.css',
'css/bootstrap-responsive.css'
];
function css(response) {
FS_readFiles(cssFiles, function (errors, data) {
response.writeHead(200, {"Content-Type": "text/css"});
data.forEach(function (v) {
response.write(v);
});
response.end();
});
}
Offtopic: Btw, requests like this you better to cache on front-end proxy server like nginx or varnish. It's never change.
const fs = require('fs');
function readFilePromise(fileName) {
return new Promise(function (resolve, reject) {
fs.readFile(fileName, 'utf-8', function(err, data){
if(err){reject(err)} else {
resolve(data)
}
})
})
}
Promise.all([readFilePromise("abc.txt"), readFilePromise("dec.txt")]).then(function(out){
console.log(out)
})
Async is an awesome lib. However the standard for these things is moving in the direction of promises for handling multiple asynchronous operations. In fact in ECMAScript6 this will be a standard part of the library. There are several libraries that implement promises including JQuery. However, for node, I like to use 'q'
Here is the same code using promises: One note.. you might want to move the first writeHead call to coincide with the first successful read.
var Q = require('q');
function css(response) {
response.writeHead(200, {"Content-Type": "text/css"});
var defer = Q.defer();
fs.readFile('css/bootstrap.css', function(error, content){
if(error){
defer.reject(error)
}
else{
response.write(content);
defer.resolve();
}
});
defer.promise.then(function() { //this gets executed when the first read succeeds and is written
var secondDefer = Q.defer();
fs.readFile('css/bootstrap-responsive.css', function(error, content){
if(error){
secondDefer.reject(error);
}
else{
response.write(content);
secondDefer.resolve();
}
});
return secondDefer.promise;
},
function(error) { //this gets called when the first read fails
console.log(error);
//other error handling
}).
done(function() {
response.end();
},
function(error) { //this is the error handler for the second read fails
console.log(error);
response.end(); //gotta call end anyway
});
}

Resources