I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)
The documentation for fs.rmdir is very short and doesn't explain the behavior of rmdir when the directory is not empty.
Q: What happens if I try to use this API to delete a non empty directory ?
Although using a third-party library for such a thing I could not come up with a more elegant solution. So I ended up using the npm-module rimraf.
Install it
npm install rimraf
Or install it and save to 'package.json'
(other save options can be found in the npm-install docs)
npm install --save rimraf
Then you can do the following:
rmdir = require('rimraf');
rmdir('some/directory/with/files', function(error){});
Or in Coffeescript:
rmdir = require 'rimraf'
rmdir 'some/directory/with/files', (error)->
I wrote about this problem exactly.
My previous solution below, while simple, is not preferred. The following function, is a Synchronous solution; while async might be preferred.
deleteFolderRecursive = function(path) {
var files = [];
if( fs.existsSync(path) ) {
files = fs.readdirSync(path);
files.forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
[Edit] Added lstat instead of stat to prevent errors on symlinks
[Previous Solution]
My solution to this is quite easy to implement.
var exec = require('child_process').exec,child;
child = exec('rm -rf test',function(err,out) {
console.log(out); err && console.log(err);
});
This is slimmed down for this page, but the basic idea is simple; execute 'rm -r' on the command line. If your app needs to run across different types of OS, put this in a function and have an if/else/switch to handle it.
You will want to handle all the responses; but the idea is simple enough.
Short answer: node.js fs.rmdir() calls the POSIX rmdir(); this will remove an empty directory, or return an error. In the given case, the call will invoke the callback function and pass the error as an exception.
The problem here is that the node.js documentation refers to POSIX:
The Node.js API Docs File System API started out as
simple wrappers around standard POSIX functions.
This almost changes the question into a duplicate of:
Is there a listing of the POSIX API / functions?
The description for fs.rmdir is terse, but sufficient.
Asynchronous rmdir(2).
The rmdir(2) here is an implicit reference to the documentation for the rmdir() system call. The number (2) here is an old unix man page convention to indicate Section 2 of the Manual pages, containing the kernel interfaces.
Node.js v12.10.0 introduced recursive option into fs.rmdir.
As fs.mkdir supports the same option since v10.12.0, both making and removing directory can be executed recursively.
$ node --experimental-repl-await
# without recursive option -> error
> await fs.promises.mkdir('foo/bar')
Thrown:
[Error: ENOENT: no such file or directory, mkdir 'foo/bar'] {
errno: -2,
code: 'ENOENT',
syscall: 'mkdir',
path: 'foo/bar'
}
# with recursive option -> success
> await fs.promises.mkdir('foo/bar', { recursive: true })
undefined
# without recursive option -> error
> await fs.promises.rmdir('foo')
Thrown:
[Error: ENOTEMPTY: directory not empty, rmdir 'foo'] {
errno: -66,
code: 'ENOTEMPTY',
syscall: 'rmdir',
path: 'foo'
}
# with recursive option -> success
> await fs.promises.rmdir('foo', { recursive: true })
undefined
This worked for me
fs.rmdirSync(folderpath, {recursive: true});
Edit 2021:
Now it seems to have been replaced in v14 with:
fs.rmSync('./output', {recursive: true, force: true});
Just a small dot among this bunch of answers, but I think it's good to point it out.
Personally (and generally) I would prefer to use an already existing library, if there's one available, for doing the task. Taking an already existing thing means, for me and especially in the open source world, using and improving an already existing thing, which could end up in a better result than doing it on my own (I'm improving something that some one other has done).
In this case, with a small search I found out the module fs-extra, which aims to be a replacement also for rimraf and answers to the need to remove recursively directories (apparently with async and sync versions). Furthermore, it has got a good number of stars on github and seems currently mantained: these two conditions, in addition to the fact that answers to the need, make it the way to go (almosto for a bit) for me.
fs.rmdir is not recursive.
You could instead use a recursive fs.readdir module like readdirp in order to find all files and directories .
Then remove all files, followed by all directories.
For an even simpler solution have a look at rimraf.
As of node v16, recursive fs.rmdir is now deprecated. The replacement is fs.rm.
Usage with promises:
const fs = require("fs/promises")
(async () => {
await fs.rm("directory", { recursive: true })
})()
Traditional:
const fs = require("fs")
fs.rm("directory", { recursive: true }, (err) => {
// Callback
})
The force option is also relevant to mention here as it will prevent the method from throwing errors if the folder is missing, which is useful if this is for cleaning up temporary files.
nodejs documentation
Use child_process.execFile it is faster.
NodeJS docs:
child_process.execFile is similar to child_process.exec() except it*
does not execute a subshell but rather the specified file directly.
This works. Mimicking rm -rf DIR...
var child = require('child_process');
var rmdir = function(directories, callback) {
if(typeof directories === 'string') {
directories = [directories];
}
var args = directories;
args.unshift('-rf');
child.execFile('rm', args, {env:process.env}, function(err, stdout, stderr) {
callback.apply(this, arguments);
});
};
// USAGE
rmdir('dir');
rmdir('./dir');
rmdir('dir/*');
rmdir(['dir1', 'dir2']);
Edit: I have to admit this is not cross-platform, will not work on Windows
Here is an asynchronous recursive version that works with promises. I use the 'Q' library but anyone will do with a few changes (eg the 'fail' function).
To make use of it, we must make a few simple wrappers around some core Node functions, namely fs.stat, fs.readdir, fs.unlink and fs.rmdir to make them promise-friendly.
Here they are:
function getStat(fpath) {
var def = Q.defer();
fs.stat(fpath, function(e, stat) {
if (e) { def.reject(); } else { def.resolve(stat); }
});
return def.promise;
}
function readdir(dirpath) {
var def = Q.defer();
fs.readdir(dirpath, function(e, files) {
if (e) { def.reject(e); } else { def.resolve(files); }
});
return def.promise;
}
function rmFile(fpath) {
var def = Q.defer();
fs.unlink(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
function rmDir(fpath) {
var def = Q.defer();
fs.rmdir(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
So here is the recursive rm function:
var path = require('path');
function recursiveDelete(fpath) {
var def = Q.defer();
getStat(fpath)
.then(function(stat) {
if (stat.isDirectory()) {
return readdir(fpath)
.then(function(files) {
if (!files.length) {
return rmDir(fpath);
} else {
return Q.all(files.map(function(f) { return recursiveDelete(path.join(fpath, f)); }))
.then(function() { return rmDir(fpath); });
}
});
} else {
return rmFile(fpath);
}
})
.then(function(res) { def.resolve(res); })
.fail(function(e) { def.reject(e); })
.done();
return def.promise;
}
Figured this was a good excuse to take a dive into the source ;)
From what I can tell, fs.rmdir is bound to the rmdir function from unistd.h. From the POSIX man page for rmdir:
The rmdir() function shall remove a directory whose name is given by
path. The directory shall be removed only if it is an empty directory.
If the directory is not an empty directory, rmdir() shall fail and set errno to [EEXIST] or [ENOTEMPTY].
In addition to the correct "no" answers, the rimraf package provides recursive delete functionality. It mimics rm -rf. It's also officially packaged by Ubuntu.
I realize this isn't exactly answering the question at hand, but I think this might be useful to someone searching here in the future (it would have been to me!): I made a little snippet that allows one to recursively delete only empty directories. If a directory (or any of its descendant directories) has content inside it, it is left alone:
var fs = require("fs");
var path = require("path");
var rmdir = function(dir) {
var empty = true, list = fs.readdirSync(dir);
for(var i = list.length - 1; i >= 0; i--) {
var filename = path.join(dir, list[i]);
var stat = fs.statSync(filename);
if(filename.indexOf('.') > -1) {
//There are files in the directory - we can't empty it!
empty = false;
list.splice(i, 1);
}
}
//Cycle through the list of sub-directories, cleaning each as we go
for(var i = list.length - 1; i >= 0; i--) {
filename = path.join(dir, list[i]);
if (rmdir(filename)) {
list.splice(i, 1);
}
}
//Check if the directory was truly empty
if (!list.length && empty) {
console.log('delete!');
fs.rmdirSync(dir);
return true;
}
return false;
};
https://gist.github.com/azaslavsky/661020d437fa199e95ab
most of the examples I see out there are synchronous implementations of recursively deleting a folder structure in node.
I've also seen a few asynchronous ones that dont actually work well.
I wrote and use one thats completely asynchronous: https://gist.github.com/yoavniran/adbbe12ddf7978e070c0
Removing NON-EMPTY directories SYNCHRONOUSLY:-
Following is the file structure -
var fs = require('fs');
fs.unlink('./stuff/writeMe.txt',function(){
fs.rmdirSync('stuff');
})
I am firstly removing the writeMe.txt file from stuff folder using code fs.unlink('./stuff/writeMe.txt') which makes the stuff folder empty and finally removing it using code fs.rmdirSync('stuff')
This function will recursively delete a directory or file that you specify, synchronously:
var path = require('path');
function deleteRecursiveSync(itemPath) {
if (fs.statSync(itemPath).isDirectory()) {
_.each(fs.readdirSync(itemPath), function(childItemName) {
deleteRecursiveSync(path.join(itemPath, childItemName));
});
fs.rmdirSync(itemPath);
} else {
fs.unlinkSync(itemPath);
}
}
I have not tested this function's behavior if:
the item does not exist, or
the item cannot be deleted (such as due to a permissions issue).
Recursive remove directory for Node.js
It turned out that Node.js fs module does not have a method to remove the directory and its content recursively. Instead you should go through the directory structure and remove atomic items, i.e. individual files and empty directories. So I found a nice gist by Takuo Kihira at https://gist.github.com/2367067 made in JavaScript and decided to make a CoffeeScript version of it:
tried to make it fail safe as sync removal will cause error if file or dir are in use at that time.
var path = require('path');
var fs = require('fs')
var dumpDirs = function (dir, name, cb) {
fs.readdir(dir, function (err, files) {
var dirs = [],
filePath, i = 0, l = files.length;
for (var i = 0; i < l; i++) {
filePath = path.join(dir, files[i]);
var stats = fs.lstatSync(filePath);
if (stats.isDirectory()) {
if (files[i].indexOf(name) != -1) {
dirs.push({
startOn: new Date(stats.ctime),
instance: files[i],
name: name
})
}
}
}
cb(dirs);
});
}
var removeDir = function (dir, callback) {
fs.readdir(dir, function (err, files) {
c = files.length;
(function remfile(i, cb) {
if (i >= c)
return cb();
var p = path.join(dir, files[i])
fs.unlink(p, function (err) {
if (err) console.log(err);
remfile(i + 1, cb)
});
})(0, function () {
fs.rmdir(dir, function (err) {
callback()
});
});
//for (var i = 0; i < c; i++) {
// fs.unlinkSync(path.join(dir, files[i]));
//};
});
}
dumpDirs(maindir, function (dirs) {
if (dirs && dirs.length > 0) {
(function rem(i, cb) {
if (i >= dirs.length) {
return cb();
}
var folder = path.join(dump, dirs[i].instance);
removeDir(folder, function () {
rem(i + 1, cb);
});
})(0, function () {
callback();
})
}
else {
callback();
}
});
Here is the coffee script prototype function I created for fluentnode that deletes a folder recursively
String::folder_Delete_Recursive = ->
path = #.toString()
if path.exists()
for file in path.files()
curPath = path.path_Combine(file)
if curPath.is_Folder()
curPath.folder_Delete_Recursive()
else
curPath.file_Delete()
fs.rmdirSync(path);
return path.not_Exists()
here is the test:
it 'folder_Create and folder_Delete' , ->
tmpDir = "./".temp_Name_In_Folder()
expect(tmpDir.folder_Exists()).to.be.false
expect(tmpDir.folder_Create()).to.equal(tmpDir.realPath())
expect(tmpDir.folder_Exists()).to.be.true
expect(tmpDir.folder_Delete()).to.be.true
expect(tmpDir.folder_Exists()).to.be.false
it 'folder_Delete_Recursive' , ->
tmpDir = "./" .temp_Name_In_Folder().folder_Create()
tmpFile = tmpDir.temp_Name_In_Folder().file_Create()
expect(tmpDir.folder_Delete_Recursive()).to.be.true
A neat synchronous version of rmdirSync.
/**
* use with try ... catch ...
*
* If you have permission to remove all file/dir
* and no race condition and no IO exception...
* then this should work
*
* uncomment the line
* if(!fs.exists(p)) return
* if you care the inital value of dir,
*
*/
var fs = require('fs')
var path = require('path')
function rmdirSync(dir,file){
var p = file? path.join(dir,file):dir;
// if(!fs.exists(p)) return
if(fs.lstatSync(p).isDirectory()){
fs.readdirSync(p).forEach(rmdirSync.bind(null,p))
fs.rmdirSync(p)
}
else fs.unlinkSync(p)
}
And a parallel IO, asynchronous version of rmdir. (faster)
/**
* NOTE:
*
* If there are no error, callback will only be called once.
*
* If there are multiple errors, callback will be called
* exactly as many time as errors occur.
*
* Sometimes, this behavior maybe useful, but users
* should be aware of this and handle errors in callback.
*
*/
var fs = require('fs')
var path = require('path')
function rmfile(dir, file, callback){
var p = path.join(dir, file)
fs.lstat(p, function(err, stat){
if(err) callback.call(null,err)
else if(stat.isDirectory()) rmdir(p, callback)
else fs.unlink(p, callback)
})
}
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ){
var i,j
for(i=j=files.length; i--; ){
rmfile(dir,files[i], function(err){
if(err) callback.call(null, err)
else if(--j === 0 ) fs.rmdir(dir,callback)
})
}
}
else fs.rmdir(dir, callback)
})
}
Anyway, if you want a sequential IO, and callback be called exactly once (either success or with first error encountered). Replace this rmdir with the above. (slower)
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ) rmfile(dir, files[0], function(err){
if(err) callback.call(null,err)
else rmdir(dir, callback)
})
else fs.rmdir(dir, callback)
})
}
All of them depend ONLY on node.js and should be portable.
This post was getting the top answer from google but none of the answers gives a solution that:
doesn't make use of sync functions
doesn't require external libraries
doesn't use bash directly
Here is my async solution which doesn't assume anything else than node installed:
const fs = require('fs'); const path = require('path');
function rm(path){
return stat(path).then((_stat) => {
if(_stat.isDirectory()){
return ls(path)
.then((files) => Promise.all(files.map(file => rm(Path.join(path, file)))))
.then(() => removeEmptyFolder(path));
}else{
return removeFileOrLink(path);
} });
function removeEmptyFolder(path){
return new Promise((done, err) => {
fs.rmdir(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function removeFileOrLink(path){
return new Promise((done, err) => {
fs.unlink(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function ls(path){
return new Promise((done, err) => {
fs.readdir(path, function (error, files) {
if(error) return err(error)
return done(files)
});
}); }
function stat(path){
return new Promise((done, err) => {
fs.stat(path, function (error, _stat) {
if(error){ return err(error); }
return done(_stat);
});
}); } }
Following on #geedew's answer.
Here is an asynchronous implementation of rm -r (ie you can pass a path to a file or directory). I'm not an experienced nodejs developer and appreciate any suggestions or constructive criticism.
var fs = require('fs');
function ResultsCollector (numResultsExpected, runWhenDone) {
this.numResultsExpected = numResultsExpected,
this.runWhenDone = runWhenDone;
this.numResults = 0;
this.errors = [];
this.report = function (err) {
if (err) this.errors.push(err);
this.numResults++;
if (this.numResults == this.numResultsExpected) {
if (this.errors.length > 0) return runWhenDone(this.errors);
else return runWhenDone();
}
};
}
function rmRasync(path, cb) {
fs.lstat(path, function(err, stats) {
if (err && err.code == 'ENOENT') return cb(); // doesn't exist, nothing to do
else if (err) {
return cb(err);
}
if (stats.isDirectory()) {
fs.readdir(path, function (err, files) {
if (err) return cb(err);
var resultsCollector = new ResultsCollector(files.length, function (err) {
if (err) return cb(err);
fs.rmdir(path, function (err) {
if (err) return cb(err);
return cb();
});
});
files.forEach(function (file) {
var filePath = path + '/' + file;
return rmRasync(filePath, function (err) {
return resultsCollector.report(err);
});
});
});
}
else { // file.
// delete file or link
fs.unlink(path, function (err) {
if (err) return cb(err);
return cb();
});
}
});
};
Invoke like so:
rmRasync('/path/to/some/file/or/dir', function (err) {
if (err) return console.error('Could not rm', err);
// else success
});
Surprisingly verbose and bad answers here...
To delete a non-empty directory on most systems:
import * as cp from 'child_process';
const dir = '/the/dir/to/remove';
const k = cp.spawn('bash');
k.stdin.end(`rm -rf "${dir}"`);
k.once('exit', code => {
// check the exit code
// now you are done
});
this will work on MacOS and Linux, but it might not work on some Windows OS.
I have an array which keeps URL of several files. For example:
var files = ['1.html', '2.html', '3.html'];
I need to read them asynchronously and save them in an object named cache (cache = {}).
To do this I used the code:
for(var i = 0; i < files.length; i++){
require('fs').readFile(files[i], 'utf8', function (error,data) {
cache[files[i]]=data;
});
}
In the end I have the result:
cache = { undefined : 'File 3 content' }
I do understand that the "readFile" acts after the loop is ended and it looses it's scope. Is there a way to fix this or another method to read files from an array and cache them?
When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.
The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});