Related
I have a little problem with my function. I would like to get all files in many directories. Currently, I can retrieve the files in the file passed in parameters. I would like to retrieve the html files of each folder in the folder passed as a parameter. I will explain if I put in parameter "test" I retrieve the files in "test" but I would like to retrieve "test / 1 / *. Html", "test / 2 / . /.html ":
var srcpath2 = path.join('.', 'diapo', result);
function getDirectories(srcpath2) {
return fs.readdirSync(srcpath2).filter(function (file) {
return fs.statSync(path.join(srcpath2, file)).isDirectory();
});
}
The result :
[1,2,3]
thanks !
It looks like the glob npm package would help you. Here is an example of how to use it:
File hierarchy:
test
├── one.html
└── test-nested
└── two.html
JS code:
const glob = require("glob");
var getDirectories = function (src, callback) {
glob(src + '/**/*', callback);
};
getDirectories('test', function (err, res) {
if (err) {
console.log('Error', err);
} else {
console.log(res);
}
});
which displays:
[ 'test/one.html',
'test/test-nested',
'test/test-nested/two.html' ]
I've seen many very long answers, and it's kinda a waste of memory space. Some also use packages like glob, but if you don't want to depend on any package, here's my solution.
const Path = require("path");
const FS = require("fs");
let Files = [];
function ThroughDirectory(Directory) {
FS.readdirSync(Directory).forEach(File => {
const Absolute = Path.join(Directory, File);
if (FS.statSync(Absolute).isDirectory()) return ThroughDirectory(Absolute);
else return Files.push(Absolute);
});
}
ThroughDirectory("./input/directory/");
It's pretty self-explanatory. There's an input directory, and it iterates through that. If one of the items is also a directory, go through that and so on. If it's a file, add the absolute path to the array.
Hope this helped :]
Using ES6 yield
const fs = require('fs');
const path = require('path');
function *walkSync(dir) {
const files = fs.readdirSync(dir, { withFileTypes: true });
for (const file of files) {
if (file.isDirectory()) {
yield* walkSync(path.join(dir, file.name));
} else {
yield path.join(dir, file.name);
}
}
}
for (const filePath of walkSync(__dirname)) {
console.log(filePath);
}
I really liked Smally's Solution but didn't like the Syntax.
Same solution but slightly easier to read:
const fs = require("fs");
const path = require("path");
let files = [];
const getFilesRecursively = (directory) => {
const filesInDirectory = fs.readdirSync(directory);
for (const file of filesInDirectory) {
const absolute = path.join(directory, file);
if (fs.statSync(absolute).isDirectory()) {
getFilesRecursively(absolute);
} else {
files.push(absolute);
}
}
};
Here's mine. Like all good answers it's hard to understand:
const isDirectory = path => statSync(path).isDirectory();
const getDirectories = path =>
readdirSync(path).map(name => join(path, name)).filter(isDirectory);
const isFile = path => statSync(path).isFile();
const getFiles = path =>
readdirSync(path).map(name => join(path, name)).filter(isFile);
const getFilesRecursively = (path) => {
let dirs = getDirectories(path);
let files = dirs
.map(dir => getFilesRecursively(dir)) // go through each directory
.reduce((a,b) => a.concat(b), []); // map returns a 2d array (array of file arrays) so flatten
return files.concat(getFiles(path));
};
With modern JavaScript (NodeJs 10) you can use async generator function and loop through them using for-await...of
// ES modules syntax that is included by default in NodeJS 14.
// For earlier versions, use `--experimental-modules` flag
import fs from "fs/promises"
// or, without ES modules, use this:
// const fs = require('fs').promises
async function run() {
for await (const file of getFiles()) {
console.log(file.path)
}
}
async function* getFiles(path = `./`) {
const entries = await fs.readdir(path, { withFileTypes: true })
for (let file of entries) {
if (file.isDirectory()) {
yield* getFiles(`${path}${file.name}/`)
} else {
yield { ...file, path: path + file.name }
}
}
}
run()
Packed into library:
https://www.npmjs.com/package/node-recursive-directory
https://github.com/vvmspace/node-recursive-directory
List of files:
const getFiles = require('node-recursive-directory');
(async () => {
const files = await getFiles('/home');
console.log(files);
})()
List of files with parsed data:
const getFiles = require('node-resursive-directory');
(async () => {
const files = await getFiles('/home', true); // add true
console.log(files);
})()
You will get something like that:
[
...,
{
fullpath: '/home/vvm/Downloads/images/Some/Some Image.jpg',
filepath: '/home/vvm/Downloads/images/Some/',
filename: 'Some Image.jpg',
dirname: 'Some'
},
]
You can also write your own code like below to traverse the directory as shown below :
var fs = require('fs');
function traverseDirectory(dirname, callback) {
var directory = [];
fs.readdir(dirname, function(err, list) {
dirname = fs.realpathSync(dirname);
if (err) {
return callback(err);
}
var listlength = list.length;
list.forEach(function(file) {
file = dirname + '\\' + file;
fs.stat(file, function(err, stat) {
directory.push(file);
if (stat && stat.isDirectory()) {
traverseDirectory(file, function(err, parsed) {
directory = directory.concat(parsed);
if (!--listlength) {
callback(null, directory);
}
});
} else {
if (!--listlength) {
callback(null, directory);
}
}
});
});
});
}
traverseDirectory(__dirname, function(err, result) {
if (err) {
console.log(err);
}
console.log(result);
});
You can check more information about it here : http://www.codingdefined.com/2014/09/how-to-navigate-through-directories-in.html
I needed to so something similar, in an Electron app: get all subfolders in a given base folder, using TypeScript, and came up with this:
import { readdirSync, statSync, existsSync } from "fs";
import * as path from "path";
// recursive synchronous "walk" through a folder structure, with the given base path
getAllSubFolders = (baseFolder, folderList = []) => {
let folders:string[] = readdirSync(baseFolder).filter(file => statSync(path.join(baseFolder, file)).isDirectory());
folders.forEach(folder => {
folderList.push(path.join(baseFolder,folder));
this.getAllSubFolders(path.join(baseFolder,folder), folderList);
});
}
const fs = require('fs');
const path = require('path');
var filesCollection = [];
const directoriesToSkip = ['bower_components', 'node_modules', 'www', 'platforms'];
function readDirectorySynchronously(directory) {
var currentDirectorypath = path.join(__dirname + directory);
var currentDirectory = fs.readdirSync(currentDirectorypath, 'utf8');
currentDirectory.forEach(file => {
var fileShouldBeSkipped = directoriesToSkip.indexOf(file) > -1;
var pathOfCurrentItem = path.join(__dirname + directory + '/' + file);
if (!fileShouldBeSkipped && fs.statSync(pathOfCurrentItem).isFile()) {
filesCollection.push(pathOfCurrentItem);
}
else if (!fileShouldBeSkipped) {
var directorypath = path.join(directory + '\\' + file);
readDirectorySynchronously(directorypath);
}
});
}
readDirectorySynchronously('');
This will fill filesCollection with all the files in the directory and its subdirectories (it's recursive). You have the option to skip some directory names in the directoriesToSkip array.
Speaking of npm packages - another short option is to use fs-readdir-recursive:
const read = require("fs-readdir-recursive");
const foundFiles = read("test");
console.log(foundFiles);
Output:
[ 'one.html', 'test-nested/some_text.txt', 'test-nested/two.html' ]
If you're interested only in files with specific extension (like .html mentioned in the question) you can filter them using .endsWith():
const filteredFiles = read("test").filter(item => item.endsWith(".html"));
The accepted answer needs to install a package.
If you want a native option that is ES6:
import { readdirSync } from 'fs'
import { join } from 'path'
function walk(dir) {
return readdirSync(dir, { withFileTypes: true }).flatMap((file) => file.isDirectory() ? walk(join(dir, file.name)) : join(dir, file.name))
}
This works for me.
Read root directory with readdirSync
Then map over but flatten as we go
if it's a directory, go recursive; else return the filename
If you rather work synchronously with glob, use the glob.sync() function as mentioned in their documentation. Here's the equivalent example provided by #Paul Mougel but written synchronously:
const glob = require("glob");
var getDirectories = function (src) {
return glob.sync(src + '/**/*');
};
var rest = getDirectories('test');
console.log(res);
A solution with Promises based on globby:
import { globby } from 'globby';
(async () => {
const path = '/path/to/dir';
const files = await globby([`${path}/**/*`]);
console.log(files);
// [
// '/path/to/dir/file1.txt',
// '/path/to/dir/subdir/file2.txt',
// ...
// ]
})()
Synchrone method with two option, simple and efficacy.
const path = require('path');const fs = require('fs');
function toHierarchie_files(pathDir, output_normalize=false, removeEmpty=true)
{
var result = {}, enqueue = [pathDir];
//normalize slash separator if output_normalize is true or just return val
output_normalize = output_normalize == false?val => {return val}:val => {return path.normalize(val)};
//allows absolute or relative path with extended resolution. Returns path normalize absolute to work with or 'none' string.
const path_exist = (path_test) => {var tmpTab = fs.existsSync(path.normalize(path.resolve(path_test))) == true?[path.normalize(path.resolve(path_test))]:['', '../', '../../'].map(val => path.normalize(path.resolve(__dirname, val+path_test))).filter((val, index) => fs.existsSync(path.normalize(path.resolve(__dirname, val+path_test))) == true);return tmpTab.length > 0?tmpTab[0]:'none'};
//Check if file exist and return her type or 'none' string
const getType = (path_test) => {path_test = path_exist(path_test);return path_test == 'none'?'none':fs.lstatSync(path_test).isDirectory() == true?'dir':fs.lstatSync(path_test).isFile() == true?'file':'none';};
function recursive()
{
//init new entrie
var parentDir = enqueue.pop();result[parentDir]=[];
//read dir
fs.readdirSync(path_exist(parentDir)).forEach((file, index) =>{
switch(getType(parentDir+'/'+file))
{
//if detect dir push in queue
case 'dir': enqueue.push(output_normalize(parentDir+'/'+file)); break;
//if file, add in entrie
case 'file': result[parentDir].push(file); break;
//else done
default: break;
};
});
//if optional arg remove empty is true, delete entries if not contains files
if(result[parentDir].length == 0 && removeEmpty == true){Reflect.deleteProperty(result, parentDir);}
//if queue is not empty continue processing
if(enqueue.length > 0){recursive();}
};
//if dir renseign exist, go recusive
if(getType(pathDir) == 'dir'){recursive();}
return result;
};
Result:
{
"public/assets": [
"favicon.ico"
],
"public/assets/js": [
"dede.js",
"test.js"
],
"public/assets/js/css/secure": [
"config.json",
"index.js"
],
"public/assets/css": [
"style.css"
]
}
You can use loop through all the files and directories of the root folder, if it's a directory, then get inside it and repeat the process.
Consider the code below:
const fs = require('fs');
const path = require('path');
const target = './'; // choose the directory to target
var result = []
var filePaths = []
var tempFolder = []
const targetPath = fs.readdirSync(target);
function hit(mainPath = targetPath) {
mainPath.forEach((file) => {
let check = fs.statSync(file);
if (!check.isDirectory()) {
filePaths.push(file)
}
else {
if (file[0] != '.') {
tempFolder.push(file)
}
}
});
// get files from folder
if (tempFolder.length > 0) {
tempFolder.forEach((dir) => {
getFiles(dir)
})
}
// filePaths contains path to every file
}
function getFiles(dir) {
var paths = fs.readdirSync(dir);
var files = [];
paths.forEach(function (file) {
var fullPath = dir + '/' + file;
files.push(fullPath);
});
files.forEach((tempFile) => {
let check = fs.statSync(tempFile);
if (check.isDirectory()) {
getFiles(tempFile)
} else {
filePaths.push(tempFile)
}
})
}
hit(); // main function
Although not perfect in some scenarios, it must be helpful in many.
const getAllFilePath = (path: string) => {
const addData = (_paths: string[]) => {
const newFoldersToScrape: string[] = [];
_paths.forEach(_path => {
fs.readdirSync(_path).forEach((file: string) => {
if (file.indexOf(".") === -1) {
newFoldersToScrape.push(`${_path}/${file}`);
} else {
filePaths.push(`${_path}/${file}`);
}
});
});
foldersToScrape = newFoldersToScrape;
};
const baseDirPath = `<YOUR BASE PATH HERE>/${path}`;
let foldersToScrape: string[] = [];
const filePaths: string[] = [];
addData([baseDirPath]);
while (foldersToScrape.length !== 0) {
addData(foldersToScrape);
}
return filePaths;
};
This is how I did it, I think it is similar to yet simpler than most of the other answers here.
const fs = require('fs')
let files = []
const getFiles = (path) => {
if (fs.lstatSync(path).isDirectory()) { // is this a folder?
fs.readdirSync(path).forEach(f => { // for everything in this folder
getFiles(path + '/' + f) // process it recursively
})
} else if (path.endsWith(".ts")) { // is this a file we are searching for?
files.push(path) // record it
}
}
getFiles("src")
It fills the "files" array with every .ts file under the "src/" directory.
Slightly modified version of #Stephen's response (https://stackoverflow.com/a/66083078/4421370) above that returns the files' path relative to the directory you are searching. Or any arbitrary base path you supply to the function call in-place of the default base. If you want the full path just call it as walkSync(dir, dir).
Search Path is: c:\tmp,
File path is c:\tmp\test\myfile.txt,
Result is test\myfile.txt
Hopefully helpful to some.
const fs = require('fs');
const path = require('path');
function *walkSync(dir, base="") {
const files = fs.readdirSync(dir, { withFileTypes: true })
for (const file of files) {
if (file.isDirectory()) {
yield* walkSync(path.join(dir, file.name), path.join(base, file.name));
} else {
yield path.join(base, file.name);
}
}
}
for (const filePath of walkSync(__dirname)) {
console.log(filePath);
}
Here is a compact pure function that returns all the paths (relatives) in the directory.
import path from 'path'
const getFilesPathsRecursively = (directory: string, origin?: string): string[] =>
fs.readdirSync(directory).reduce((files, file) => {
const absolute = path.join(directory, file)
return [
...files,
...(fs.statSync(absolute).isDirectory()
? getFilesPathsRecursively(absolute, origin || directory)
: [path.relative(origin || directory, absolute)]),
]
}, [])
The solution is written in TypeScript.
modern solution with async/await
No external dependencies.
Asynchronous function (non-blocking like other solutions with readdirSync and statSync)
It is extremely fast because multiple processes work in parallel (it is not waiting for a response from every file in the list).
It has also some naive error handling (if something happens with one file or folder it will not blow whole process)
import path from "path";
import fs from "fs/promises"
export default async function readDirectory(directory: string): Promise<string[]> {
const files = await fs.readdir(directory)
const filesPromises = files.map(async (file) => {
try {
const absolutePath = path.join(directory, file);
const fileStat = await fs.stat(absolutePath)
if (fileStat.isDirectory()) {
return await readDirectory(absolutePath);
} else {
return absolutePath;
}
} catch (err) {
// error handling
return [];
}
});
const filesWithArrays = await Promise.all(filesPromises)
const flatArray = filesWithArrays.reduce<string[]>((acc, fileOrArray) => acc.concat(fileOrArray), []);
return flatArray;
}
usage (if this is a separate file please remember to import)
const results = await readDirectory('some/path');
I did mine with typescript works well fairly easy to understand
import * as fs from 'fs';
import * as path from 'path';
export const getAllSubFolders = (
baseFolder: string,
folderList: string[] = []
) => {
const folders: string[] = fs
.readdirSync(baseFolder)
.filter(file => fs.statSync(path.join(baseFolder, file)).isDirectory());
folders.forEach(folder => {
folderList.push(path.join(baseFolder, folder));
getAllSubFolders(path.join(baseFolder, folder), folderList);
});
return folderList;
};
export const getFilesInFolder = (rootPath: string) => {
return fs
.readdirSync(rootPath)
.filter(
filePath => !fs.statSync(path.join(rootPath, filePath)).isDirectory()
)
.map(filePath => path.normalize(path.join(rootPath, filePath)));
};
export const getFilesRecursively = (rootPath: string) => {
const subFolders: string[] = getAllSubFolders(rootPath);
const allFiles: string[][] = subFolders.map(folder =>
getFilesInFolder(folder)
);
return [].concat.apply([], allFiles);
};
I'm building out a firebase function that uses the html-pdf package (which uses PhantomJS). The function works fine on my local machine, but whenever I deploy the function on Firebase, I get the following error:
Error: html-pdf: PDF generation timeout. Phantom.js script did not exit.
I've changed the timeout parameter for pdf.create() and keep getting the same result. Any idea on what might be creating this issue that is unique to only when a deploy this to Firebase? Code is below.
const pdf = require('html-pdf');
const runtimeOpts = {
timeoutSeconds: 540, // in seconds
memory: '2GB'
}
exports.sendToKindle = functions.runWith(runtimeOpts).https.onRequest(async (req, res) => {
// REMOVED A BLOCK OF CODE FOR SIMPLICITY, BUT CAN PUT BACK IN IF NEEDED//
var options = {
format: 'Letter',
directory: "/tmp",
timeout: 540000, // in milliseconds
};
const blookFileName = createFileName(blookData.title) + '.pdf';
const tempFilePath = path.join(os.tmpdir(), `${blookFileName}`);
const htmlFilePath = path.join(os.tmpdir(), 'book.html');
const htmlFs = fs.openSync(htmlFilePath, 'w');
await fs.promises.appendFile(htmlFilePath, bookHTML);
const fd = fs.openSync(tempFilePath, 'w');
var html = fs.readFileSync(htmlFilePath, 'utf8');
let mailgunObject = null;
pdf.create(html, options).toFile(tempFilePath, async (err, res) => {
if (err) return console.error(err);
mailgunObject = await sendEmail(tempFilePath, kindleEmail);
return console.log(res);
});
fs.closeSync(fd);
fs.closeSync(htmlFs);
return cors(req, res, () => {
res.status(200).type('application/json').send({'response': 'Success'})
})
I was able to solve this issue by modifying the code by having the pdf.create().toFile() placed within the return of the cloud function.
const pdf = require('html-pdf');
const runtimeOpts = {
timeoutSeconds: 300, // in seconds
memory: '1GB'
}
exports.sendToKindle = functions.runWith(runtimeOpts).https.onRequest(async (req, res) => {
// REMOVED A BLOCK OF CODE FOR SIMPLICITY, BUT CAN PUT BACK IN IF NEEDED//
var options = {
format: 'Letter',
directory: "/tmp",
timeout: 540000, // in milliseconds
};
const blookFileName = createFileName(blookData.title) + '.pdf';
const tempFilePath = path.join(os.tmpdir(), `${blookFileName}`);
const htmlFilePath = path.join(os.tmpdir(), 'book.html');
const htmlFs = fs.openSync(htmlFilePath, 'w');
await fs.promises.appendFile(htmlFilePath, bookHTML);
const fd = fs.openSync(tempFilePath, 'w');
var html = fs.readFileSync(htmlFilePath, 'utf8');
return cors(req, res, () => {
pdf.create(html, options).toFile(tempFilePath, async (err, res) => {
if (err) return console.error(err);
let mailgunObject = await sendEmail(tempFilePath, kindleEmail);
fs.closeSync(fd);
fs.closeSync(htmlFs);
return console.log(res);
});
res.status(200).type('application/json').send({'response': 'Success'})
})
I got the same issue. Actualy I realized that when I called the function using html-pdf through Postman, or simply through a request by Google Chrome, the pdf used to generate within a 2 or 3 seconds, whereas it was more 2 or 3 minutes when calling it directly from my app.
So this is what I did : putting html-pdf in a separate function that I deployed, and then calling it :
https = require('https');
https.get(https://us-central1-your-project-name.cloudfunctions.net/your-function-using-html-pdf)
I have tried with this code but it's not working it display error like that file not exists on that directory.
System take .txt as file not as extension of file.
const fs = require('fs');
var oldPath = '/abc/def/ghi/*.txt'
var newPath = '/xyz/cbi/'
fs.rename(oldPath, newPath, function (err) {
if (err) throw err
console.log('Successfully renamed - AKA moved!')
})
Try this one:
const shell = require('child_process').execSync ;
const src= `/abc/def/ghi`;
const dist= `/xyz/cbi`;
shell(`mv ${src}/* ${dist}`);
This will solve your problem Check here
const fs = require('fs-extra')
// With a callback:
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
if (err) return console.error(err)
console.log('success!')
})
Try this one
For One File:
const moveThem = async () => {
// Move file ./js/foo.js to ./ns/qux.js
const original = join(__dirname, 'js/foo.js');
const target = join(__dirname, 'ns/qux.js');
await mv(original, target);
}
For Many Files:
mv('source/dir', 'dest/a/b/c/dir', {mkdirp: true}, function(err) {
});
OR
var spawn = require('child_process').spawn,
mv = spawn('mv', ['/dir1/dir2/*','dir1/']);
I have a little problem with my function. I would like to get all files in many directories. Currently, I can retrieve the files in the file passed in parameters. I would like to retrieve the html files of each folder in the folder passed as a parameter. I will explain if I put in parameter "test" I retrieve the files in "test" but I would like to retrieve "test / 1 / *. Html", "test / 2 / . /.html ":
var srcpath2 = path.join('.', 'diapo', result);
function getDirectories(srcpath2) {
return fs.readdirSync(srcpath2).filter(function (file) {
return fs.statSync(path.join(srcpath2, file)).isDirectory();
});
}
The result :
[1,2,3]
thanks !
It looks like the glob npm package would help you. Here is an example of how to use it:
File hierarchy:
test
├── one.html
└── test-nested
└── two.html
JS code:
const glob = require("glob");
var getDirectories = function (src, callback) {
glob(src + '/**/*', callback);
};
getDirectories('test', function (err, res) {
if (err) {
console.log('Error', err);
} else {
console.log(res);
}
});
which displays:
[ 'test/one.html',
'test/test-nested',
'test/test-nested/two.html' ]
I've seen many very long answers, and it's kinda a waste of memory space. Some also use packages like glob, but if you don't want to depend on any package, here's my solution.
const Path = require("path");
const FS = require("fs");
let Files = [];
function ThroughDirectory(Directory) {
FS.readdirSync(Directory).forEach(File => {
const Absolute = Path.join(Directory, File);
if (FS.statSync(Absolute).isDirectory()) return ThroughDirectory(Absolute);
else return Files.push(Absolute);
});
}
ThroughDirectory("./input/directory/");
It's pretty self-explanatory. There's an input directory, and it iterates through that. If one of the items is also a directory, go through that and so on. If it's a file, add the absolute path to the array.
Hope this helped :]
Using ES6 yield
const fs = require('fs');
const path = require('path');
function *walkSync(dir) {
const files = fs.readdirSync(dir, { withFileTypes: true });
for (const file of files) {
if (file.isDirectory()) {
yield* walkSync(path.join(dir, file.name));
} else {
yield path.join(dir, file.name);
}
}
}
for (const filePath of walkSync(__dirname)) {
console.log(filePath);
}
I really liked Smally's Solution but didn't like the Syntax.
Same solution but slightly easier to read:
const fs = require("fs");
const path = require("path");
let files = [];
const getFilesRecursively = (directory) => {
const filesInDirectory = fs.readdirSync(directory);
for (const file of filesInDirectory) {
const absolute = path.join(directory, file);
if (fs.statSync(absolute).isDirectory()) {
getFilesRecursively(absolute);
} else {
files.push(absolute);
}
}
};
Here's mine. Like all good answers it's hard to understand:
const isDirectory = path => statSync(path).isDirectory();
const getDirectories = path =>
readdirSync(path).map(name => join(path, name)).filter(isDirectory);
const isFile = path => statSync(path).isFile();
const getFiles = path =>
readdirSync(path).map(name => join(path, name)).filter(isFile);
const getFilesRecursively = (path) => {
let dirs = getDirectories(path);
let files = dirs
.map(dir => getFilesRecursively(dir)) // go through each directory
.reduce((a,b) => a.concat(b), []); // map returns a 2d array (array of file arrays) so flatten
return files.concat(getFiles(path));
};
With modern JavaScript (NodeJs 10) you can use async generator function and loop through them using for-await...of
// ES modules syntax that is included by default in NodeJS 14.
// For earlier versions, use `--experimental-modules` flag
import fs from "fs/promises"
// or, without ES modules, use this:
// const fs = require('fs').promises
async function run() {
for await (const file of getFiles()) {
console.log(file.path)
}
}
async function* getFiles(path = `./`) {
const entries = await fs.readdir(path, { withFileTypes: true })
for (let file of entries) {
if (file.isDirectory()) {
yield* getFiles(`${path}${file.name}/`)
} else {
yield { ...file, path: path + file.name }
}
}
}
run()
Packed into library:
https://www.npmjs.com/package/node-recursive-directory
https://github.com/vvmspace/node-recursive-directory
List of files:
const getFiles = require('node-recursive-directory');
(async () => {
const files = await getFiles('/home');
console.log(files);
})()
List of files with parsed data:
const getFiles = require('node-resursive-directory');
(async () => {
const files = await getFiles('/home', true); // add true
console.log(files);
})()
You will get something like that:
[
...,
{
fullpath: '/home/vvm/Downloads/images/Some/Some Image.jpg',
filepath: '/home/vvm/Downloads/images/Some/',
filename: 'Some Image.jpg',
dirname: 'Some'
},
]
You can also write your own code like below to traverse the directory as shown below :
var fs = require('fs');
function traverseDirectory(dirname, callback) {
var directory = [];
fs.readdir(dirname, function(err, list) {
dirname = fs.realpathSync(dirname);
if (err) {
return callback(err);
}
var listlength = list.length;
list.forEach(function(file) {
file = dirname + '\\' + file;
fs.stat(file, function(err, stat) {
directory.push(file);
if (stat && stat.isDirectory()) {
traverseDirectory(file, function(err, parsed) {
directory = directory.concat(parsed);
if (!--listlength) {
callback(null, directory);
}
});
} else {
if (!--listlength) {
callback(null, directory);
}
}
});
});
});
}
traverseDirectory(__dirname, function(err, result) {
if (err) {
console.log(err);
}
console.log(result);
});
You can check more information about it here : http://www.codingdefined.com/2014/09/how-to-navigate-through-directories-in.html
I needed to so something similar, in an Electron app: get all subfolders in a given base folder, using TypeScript, and came up with this:
import { readdirSync, statSync, existsSync } from "fs";
import * as path from "path";
// recursive synchronous "walk" through a folder structure, with the given base path
getAllSubFolders = (baseFolder, folderList = []) => {
let folders:string[] = readdirSync(baseFolder).filter(file => statSync(path.join(baseFolder, file)).isDirectory());
folders.forEach(folder => {
folderList.push(path.join(baseFolder,folder));
this.getAllSubFolders(path.join(baseFolder,folder), folderList);
});
}
const fs = require('fs');
const path = require('path');
var filesCollection = [];
const directoriesToSkip = ['bower_components', 'node_modules', 'www', 'platforms'];
function readDirectorySynchronously(directory) {
var currentDirectorypath = path.join(__dirname + directory);
var currentDirectory = fs.readdirSync(currentDirectorypath, 'utf8');
currentDirectory.forEach(file => {
var fileShouldBeSkipped = directoriesToSkip.indexOf(file) > -1;
var pathOfCurrentItem = path.join(__dirname + directory + '/' + file);
if (!fileShouldBeSkipped && fs.statSync(pathOfCurrentItem).isFile()) {
filesCollection.push(pathOfCurrentItem);
}
else if (!fileShouldBeSkipped) {
var directorypath = path.join(directory + '\\' + file);
readDirectorySynchronously(directorypath);
}
});
}
readDirectorySynchronously('');
This will fill filesCollection with all the files in the directory and its subdirectories (it's recursive). You have the option to skip some directory names in the directoriesToSkip array.
Speaking of npm packages - another short option is to use fs-readdir-recursive:
const read = require("fs-readdir-recursive");
const foundFiles = read("test");
console.log(foundFiles);
Output:
[ 'one.html', 'test-nested/some_text.txt', 'test-nested/two.html' ]
If you're interested only in files with specific extension (like .html mentioned in the question) you can filter them using .endsWith():
const filteredFiles = read("test").filter(item => item.endsWith(".html"));
The accepted answer needs to install a package.
If you want a native option that is ES6:
import { readdirSync } from 'fs'
import { join } from 'path'
function walk(dir) {
return readdirSync(dir, { withFileTypes: true }).flatMap((file) => file.isDirectory() ? walk(join(dir, file.name)) : join(dir, file.name))
}
This works for me.
Read root directory with readdirSync
Then map over but flatten as we go
if it's a directory, go recursive; else return the filename
If you rather work synchronously with glob, use the glob.sync() function as mentioned in their documentation. Here's the equivalent example provided by #Paul Mougel but written synchronously:
const glob = require("glob");
var getDirectories = function (src) {
return glob.sync(src + '/**/*');
};
var rest = getDirectories('test');
console.log(res);
A solution with Promises based on globby:
import { globby } from 'globby';
(async () => {
const path = '/path/to/dir';
const files = await globby([`${path}/**/*`]);
console.log(files);
// [
// '/path/to/dir/file1.txt',
// '/path/to/dir/subdir/file2.txt',
// ...
// ]
})()
Synchrone method with two option, simple and efficacy.
const path = require('path');const fs = require('fs');
function toHierarchie_files(pathDir, output_normalize=false, removeEmpty=true)
{
var result = {}, enqueue = [pathDir];
//normalize slash separator if output_normalize is true or just return val
output_normalize = output_normalize == false?val => {return val}:val => {return path.normalize(val)};
//allows absolute or relative path with extended resolution. Returns path normalize absolute to work with or 'none' string.
const path_exist = (path_test) => {var tmpTab = fs.existsSync(path.normalize(path.resolve(path_test))) == true?[path.normalize(path.resolve(path_test))]:['', '../', '../../'].map(val => path.normalize(path.resolve(__dirname, val+path_test))).filter((val, index) => fs.existsSync(path.normalize(path.resolve(__dirname, val+path_test))) == true);return tmpTab.length > 0?tmpTab[0]:'none'};
//Check if file exist and return her type or 'none' string
const getType = (path_test) => {path_test = path_exist(path_test);return path_test == 'none'?'none':fs.lstatSync(path_test).isDirectory() == true?'dir':fs.lstatSync(path_test).isFile() == true?'file':'none';};
function recursive()
{
//init new entrie
var parentDir = enqueue.pop();result[parentDir]=[];
//read dir
fs.readdirSync(path_exist(parentDir)).forEach((file, index) =>{
switch(getType(parentDir+'/'+file))
{
//if detect dir push in queue
case 'dir': enqueue.push(output_normalize(parentDir+'/'+file)); break;
//if file, add in entrie
case 'file': result[parentDir].push(file); break;
//else done
default: break;
};
});
//if optional arg remove empty is true, delete entries if not contains files
if(result[parentDir].length == 0 && removeEmpty == true){Reflect.deleteProperty(result, parentDir);}
//if queue is not empty continue processing
if(enqueue.length > 0){recursive();}
};
//if dir renseign exist, go recusive
if(getType(pathDir) == 'dir'){recursive();}
return result;
};
Result:
{
"public/assets": [
"favicon.ico"
],
"public/assets/js": [
"dede.js",
"test.js"
],
"public/assets/js/css/secure": [
"config.json",
"index.js"
],
"public/assets/css": [
"style.css"
]
}
You can use loop through all the files and directories of the root folder, if it's a directory, then get inside it and repeat the process.
Consider the code below:
const fs = require('fs');
const path = require('path');
const target = './'; // choose the directory to target
var result = []
var filePaths = []
var tempFolder = []
const targetPath = fs.readdirSync(target);
function hit(mainPath = targetPath) {
mainPath.forEach((file) => {
let check = fs.statSync(file);
if (!check.isDirectory()) {
filePaths.push(file)
}
else {
if (file[0] != '.') {
tempFolder.push(file)
}
}
});
// get files from folder
if (tempFolder.length > 0) {
tempFolder.forEach((dir) => {
getFiles(dir)
})
}
// filePaths contains path to every file
}
function getFiles(dir) {
var paths = fs.readdirSync(dir);
var files = [];
paths.forEach(function (file) {
var fullPath = dir + '/' + file;
files.push(fullPath);
});
files.forEach((tempFile) => {
let check = fs.statSync(tempFile);
if (check.isDirectory()) {
getFiles(tempFile)
} else {
filePaths.push(tempFile)
}
})
}
hit(); // main function
Although not perfect in some scenarios, it must be helpful in many.
const getAllFilePath = (path: string) => {
const addData = (_paths: string[]) => {
const newFoldersToScrape: string[] = [];
_paths.forEach(_path => {
fs.readdirSync(_path).forEach((file: string) => {
if (file.indexOf(".") === -1) {
newFoldersToScrape.push(`${_path}/${file}`);
} else {
filePaths.push(`${_path}/${file}`);
}
});
});
foldersToScrape = newFoldersToScrape;
};
const baseDirPath = `<YOUR BASE PATH HERE>/${path}`;
let foldersToScrape: string[] = [];
const filePaths: string[] = [];
addData([baseDirPath]);
while (foldersToScrape.length !== 0) {
addData(foldersToScrape);
}
return filePaths;
};
This is how I did it, I think it is similar to yet simpler than most of the other answers here.
const fs = require('fs')
let files = []
const getFiles = (path) => {
if (fs.lstatSync(path).isDirectory()) { // is this a folder?
fs.readdirSync(path).forEach(f => { // for everything in this folder
getFiles(path + '/' + f) // process it recursively
})
} else if (path.endsWith(".ts")) { // is this a file we are searching for?
files.push(path) // record it
}
}
getFiles("src")
It fills the "files" array with every .ts file under the "src/" directory.
Slightly modified version of #Stephen's response (https://stackoverflow.com/a/66083078/4421370) above that returns the files' path relative to the directory you are searching. Or any arbitrary base path you supply to the function call in-place of the default base. If you want the full path just call it as walkSync(dir, dir).
Search Path is: c:\tmp,
File path is c:\tmp\test\myfile.txt,
Result is test\myfile.txt
Hopefully helpful to some.
const fs = require('fs');
const path = require('path');
function *walkSync(dir, base="") {
const files = fs.readdirSync(dir, { withFileTypes: true })
for (const file of files) {
if (file.isDirectory()) {
yield* walkSync(path.join(dir, file.name), path.join(base, file.name));
} else {
yield path.join(base, file.name);
}
}
}
for (const filePath of walkSync(__dirname)) {
console.log(filePath);
}
Here is a compact pure function that returns all the paths (relatives) in the directory.
import path from 'path'
const getFilesPathsRecursively = (directory: string, origin?: string): string[] =>
fs.readdirSync(directory).reduce((files, file) => {
const absolute = path.join(directory, file)
return [
...files,
...(fs.statSync(absolute).isDirectory()
? getFilesPathsRecursively(absolute, origin || directory)
: [path.relative(origin || directory, absolute)]),
]
}, [])
The solution is written in TypeScript.
modern solution with async/await
No external dependencies.
Asynchronous function (non-blocking like other solutions with readdirSync and statSync)
It is extremely fast because multiple processes work in parallel (it is not waiting for a response from every file in the list).
It has also some naive error handling (if something happens with one file or folder it will not blow whole process)
import path from "path";
import fs from "fs/promises"
export default async function readDirectory(directory: string): Promise<string[]> {
const files = await fs.readdir(directory)
const filesPromises = files.map(async (file) => {
try {
const absolutePath = path.join(directory, file);
const fileStat = await fs.stat(absolutePath)
if (fileStat.isDirectory()) {
return await readDirectory(absolutePath);
} else {
return absolutePath;
}
} catch (err) {
// error handling
return [];
}
});
const filesWithArrays = await Promise.all(filesPromises)
const flatArray = filesWithArrays.reduce<string[]>((acc, fileOrArray) => acc.concat(fileOrArray), []);
return flatArray;
}
usage (if this is a separate file please remember to import)
const results = await readDirectory('some/path');
I did mine with typescript works well fairly easy to understand
import * as fs from 'fs';
import * as path from 'path';
export const getAllSubFolders = (
baseFolder: string,
folderList: string[] = []
) => {
const folders: string[] = fs
.readdirSync(baseFolder)
.filter(file => fs.statSync(path.join(baseFolder, file)).isDirectory());
folders.forEach(folder => {
folderList.push(path.join(baseFolder, folder));
getAllSubFolders(path.join(baseFolder, folder), folderList);
});
return folderList;
};
export const getFilesInFolder = (rootPath: string) => {
return fs
.readdirSync(rootPath)
.filter(
filePath => !fs.statSync(path.join(rootPath, filePath)).isDirectory()
)
.map(filePath => path.normalize(path.join(rootPath, filePath)));
};
export const getFilesRecursively = (rootPath: string) => {
const subFolders: string[] = getAllSubFolders(rootPath);
const allFiles: string[][] = subFolders.map(folder =>
getFilesInFolder(folder)
);
return [].concat.apply([], allFiles);
};
I was hoping this would be a simple thing, but I cannot find anything out there to do so.
I just want to get all folders/directories within a given folder/directory.
So for example:
<MyFolder>
|- SomeFolder
|- SomeOtherFolder
|- SomeFile.txt
|- SomeOtherFile.txt
|- x-directory
I would expect to get an array of:
["SomeFolder", "SomeOtherFolder", "x-directory"]
Or the above with the path if that was how it was served...
So does anything already exist to do the above?
Promise
import { readdir } from 'fs/promises'
const getDirectories = async source =>
(await readdir(source, { withFileTypes: true }))
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
Callback
import { readdir } from 'fs'
const getDirectories = (source, callback) =>
readdir(source, { withFileTypes: true }, (err, files) => {
if (err) {
callback(err)
} else {
callback(
files
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
)
}
})
Syncronous
import { readdirSync } from 'fs'
const getDirectories = source =>
readdirSync(source, { withFileTypes: true })
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
List directories using a path.
function getDirectories(path) {
return fs.readdirSync(path).filter(function (file) {
return fs.statSync(path+'/'+file).isDirectory();
});
}
Recursive solution
I came here in search of a way to get all of the subdirectories, and all of their subdirectories, etc. Building on the accepted answer, I wrote this:
const fs = require('fs');
const path = require('path');
function flatten(lists) {
return lists.reduce((a, b) => a.concat(b), []);
}
function getDirectories(srcpath) {
return fs.readdirSync(srcpath)
.map(file => path.join(srcpath, file))
.filter(path => fs.statSync(path).isDirectory());
}
function getDirectoriesRecursive(srcpath) {
return [srcpath, ...flatten(getDirectories(srcpath).map(getDirectoriesRecursive))];
}
This should do it:
CoffeeScript (sync)
fs = require 'fs'
getDirs = (rootDir) ->
files = fs.readdirSync(rootDir)
dirs = []
for file in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
stat = fs.statSync(filePath)
if stat.isDirectory()
dirs.push(file)
return dirs
CoffeeScript (async)
fs = require 'fs'
getDirs = (rootDir, cb) ->
fs.readdir rootDir, (err, files) ->
dirs = []
for file, index in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
fs.stat filePath, (err, stat) ->
if stat.isDirectory()
dirs.push(file)
if files.length == (index + 1)
cb(dirs)
JavaScript (async)
var fs = require('fs');
var getDirs = function(rootDir, cb) {
fs.readdir(rootDir, function(err, files) {
var dirs = [];
for (var index = 0; index < files.length; ++index) {
var file = files[index];
if (file[0] !== '.') {
var filePath = rootDir + '/' + file;
fs.stat(filePath, function(err, stat) {
if (stat.isDirectory()) {
dirs.push(this.file);
}
if (files.length === (this.index + 1)) {
return cb(dirs);
}
}.bind({index: index, file: file}));
}
}
});
}
Alternatively, if you are able to use external libraries, you can use filehound. It supports callbacks, promises and sync calls.
Using promises:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory() // only search for directories
.find()
.then((subdirectories) => {
console.log(subdirectories);
});
Using callbacks:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory()
.find((err, subdirectories) => {
if (err) return console.error(err);
console.log(subdirectories);
});
Sync call:
const Filehound = require('filehound');
const subdirectories = Filehound.create()
.path("MyFolder")
.directory()
.findSync();
console.log(subdirectories);
For further information (and examples), check out the docs: https://github.com/nspragg/filehound
Disclaimer: I'm the author.
With node.js version >= v10.13.0, fs.readdirSync will return an array of fs.Dirent objects if withFileTypes option is set to true.
So you can use,
const fs = require('fs')
const directories = source => fs.readdirSync(source, {
withFileTypes: true
}).reduce((a, c) => {
c.isDirectory() && a.push(c.name)
return a
}, [])
var getDirectories = (rootdir , cb) => {
fs.readdir(rootdir, (err, files) => {
if(err) throw err ;
var dirs = files.map(filename => path.join(rootdir,filename)).filter( pathname => fs.statSync(pathname).isDirectory());
return cb(dirs);
})
}
getDirectories( myDirectories => console.log(myDirectories));``
Using fs-extra, which promises the async fs calls, and the new await async syntax:
const fs = require("fs-extra");
async function getDirectories(path){
let filesAndDirectories = await fs.readdir(path);
let directories = [];
await Promise.all(
filesAndDirectories.map(name =>{
return fs.stat(path + name)
.then(stat =>{
if(stat.isDirectory()) directories.push(name)
})
})
);
return directories;
}
let directories = await getDirectories("/")
This answer does not use blocking functions like readdirSync or statSync. It does not use external dependencies nor find itself in the depths of callback hell.
Instead we use modern JavaScript conveniences like Promises and and async-await syntaxes. And asynchronous results are processed in parallel; not sequentially -
const { readdir, stat } =
require ("fs") .promises
const { join } =
require ("path")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Promise
.all
( (await readdir (path))
.map (p => dirs (join (path, p)))
)
.then
( results =>
[] .concat (path, ...results)
)
: []
I'll install an example package, and then test our function -
$ npm install ramda
$ node
Let's see it work -
> dirs (".") .then (console.log, console.error)
[ '.'
, 'node_modules'
, 'node_modules/ramda'
, 'node_modules/ramda/dist'
, 'node_modules/ramda/es'
, 'node_modules/ramda/es/internal'
, 'node_modules/ramda/src'
, 'node_modules/ramda/src/internal'
]
Using a generalised module, Parallel, we can simplify the definition of dirs -
const Parallel =
require ("./Parallel")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Parallel (readdir (path))
.flatMap (f => dirs (join (path, f)))
.then (results => [ path, ...results ])
: []
The Parallel module used above was a pattern that was extracted from a set of functions designed to solve a similar problem. For more explanation, see this related Q&A.
And a async version of getDirectories, you need the async module for this:
var fs = require('fs');
var path = require('path');
var async = require('async'); // https://github.com/caolan/async
// Original function
function getDirsSync(srcpath) {
return fs.readdirSync(srcpath).filter(function(file) {
return fs.statSync(path.join(srcpath, file)).isDirectory();
});
}
function getDirs(srcpath, cb) {
fs.readdir(srcpath, function (err, files) {
if(err) {
console.error(err);
return cb([]);
}
var iterator = function (file, cb) {
fs.stat(path.join(srcpath, file), function (err, stats) {
if(err) {
console.error(err);
return cb(false);
}
cb(stats.isDirectory());
})
}
async.filter(files, iterator, cb);
});
}
Fully async version with ES6, only native packages, fs.promises and async/await, does file operations in parallel:
const fs = require('fs');
const path = require('path');
async function listDirectories(rootPath) {
const fileNames = await fs.promises.readdir(rootPath);
const filePaths = fileNames.map(fileName => path.join(rootPath, fileName));
const filePathsAndIsDirectoryFlagsPromises = filePaths.map(async filePath => ({path: filePath, isDirectory: (await fs.promises.stat(filePath)).isDirectory()}))
const filePathsAndIsDirectoryFlags = await Promise.all(filePathsAndIsDirectoryFlagsPromises);
return filePathsAndIsDirectoryFlags.filter(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.isDirectory)
.map(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.path);
}
Tested, it works nicely.
You can use graph-fs
const {Node} = require("graph-fs");
const directory = new Node("/path/to/directory");
const subDirectories = directory.children.filter(child => child.is.directory);
Using the glob package, just add a trailing slash to find directories only:
import {promise as glob} from "glob-promise"
const firstLevelFolders = await glob("MyFolder/*/")
const recursiveFolders = await glob("MyFolder/**/")
CoffeeScript version of this answer, with proper error handling:
fs = require "fs"
{join} = require "path"
async = require "async"
get_subdirs = (root, callback)->
fs.readdir root, (err, files)->
return callback err if err
subdirs = []
async.each files,
(file, callback)->
fs.stat join(root, file), (err, stats)->
return callback err if err
subdirs.push file if stats.isDirectory()
callback null
(err)->
return callback err if err
callback null, subdirs
Depends on async
Alternatively, use a module for this!
(There are modules for everything. [citation needed])
If you need to use all async version. You can have something like this.
Record the directory length, uses it as an indicator to tell if all async stat tasks are finished.
If the async stat tasks are finished, all the file stat has been checked, so call the callback
This will only work as long as Node.js is single thread, because it assumes no two async tasks will increase the counter at the same time.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
files.forEach((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
fs.stat(fullPath, (err, stat) => {
// this will work because Node.js is single thread
// therefore, the counter will not increment at the same time by two callback
finished++;
if (stat.isFile()) {
results.push({
fileName: fileName,
isFile: stat.isFile()
});
}
if (finished == total) {
result_callback(results);
}
});
});
});
As you can see, this is a "depth first" approach, and this could result in callback hell, and it is not quite "functional" . People try to solve this problem with Promise, by wrapping the async task into an Promise object.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
var promises = files.map((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
return new Promise((resolve, reject) => {
// try to replace fullPath wil "aaa", it will reject
fs.stat(fullPath, (err, stat) => {
if (err) {
reject(err);
return;
}
var obj = {
fileName: fileName,
isFile: stat.isFile()
};
resolve(obj);
});
});
});
Promise.all(promises).then((values) => {
console.log("All the promise resolved");
console.log(values);
console.log("Filter out folder: ");
values
.filter((obj) => obj.isFile)
.forEach((obj) => {
console.log(obj.fileName);
});
}, (reason) => {
console.log("Not all the promise resolved");
console.log(reason);
});
});
use fs、path module can got the folder. this use Promise. If your will get the fill, your can change isDirectory() to isFile() Nodejs--fs--fs.Stats.At last, you can get the file'name file'extname and so on Nodejs---Path
var fs = require("fs"),
path = require("path");
//your <MyFolder> path
var p = "MyFolder"
fs.readdir(p, function (err, files) {
if (err) {
throw err;
}
//this can get all folder and file under <MyFolder>
files.map(function (file) {
//return file or folder path, such as **MyFolder/SomeFile.txt**
return path.join(p, file);
}).filter(function (file) {
//use sync judge method. The file will add next files array if the file is directory, or not.
return fs.statSync(file).isDirectory();
}).forEach(function (files) {
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s", files);
});
});
Just in case anyone else ends up here from a web search, and has Grunt already in their dependency list, the answer to this becomes trivial. Here's my solution:
/**
* Return all the subfolders of this path
* #param {String} parentFolderPath - valid folder path
* #param {String} glob ['/*'] - optional glob so you can do recursive if you want
* #returns {String[]} subfolder paths
*/
getSubfolders = (parentFolderPath, glob = '/*') => {
return grunt.file.expand({filter: 'isDirectory'}, parentFolderPath + glob);
}
Another recursive approach
Thanks to Mayur for knowing me about withFileTypes. I written following code for getting files of particular folder recursively. It can be easily modified to get only directories.
const getFiles = (dir, base = '') => readdirSync(dir, {withFileTypes: true}).reduce((files, file) => {
const filePath = path.join(dir, file.name)
const relativePath = path.join(base, file.name)
if(file.isDirectory()) {
return files.concat(getFiles(filePath, relativePath))
} else if(file.isFile()) {
file.__fullPath = filePath
file.__relateivePath = relativePath
return files.concat(file)
}
}, [])
functional programming
const fs = require('fs')
const path = require('path')
const R = require('ramda')
const getDirectories = pathName => {
const isDirectory = pathName => fs.lstatSync(pathName).isDirectory()
const mapDirectories = pathName => R.map(name => path.join(pathName, name), fs.readdirSync(pathName))
const filterDirectories = listPaths => R.filter(isDirectory, listPaths)
return {
paths:R.pipe(mapDirectories)(pathName),
pathsFiltered: R.pipe(mapDirectories, filterDirectories)(pathName)
}
}
You could use dree, if using a module is affordable
const dree = require('dree');
const options = {
depth: 1
};
const fileCallback = function() {};
const directories = [];
const dirCallback = function(dir) {
directories.push(dir.name);
};
dree.scan('./dir', {});
console.log(directories);
The directories which are directed children of the specified path ("./dir") will be printed.
If you do not put the option depth: 1, you would even obtain all the directories in a recursively way, so not only the directed children of the specified path.