Remove directory which is not empty - node.js
In my Node application I need to remove a directory which has some files, but fs.rmdir only works on empty directories. How can I do this?
As of Node.js 14.14.0, the recommended way is to use fs.rmSync:
fs.rmSync(dir, { recursive: true, force: true });
There is a module for this called rimraf (https://npmjs.org/package/rimraf). It provides the same functionality as rm -Rf
Async usage:
var rimraf = require("rimraf");
rimraf("/some/directory", function () { console.log("done"); });
Sync usage:
rimraf.sync("/some/directory");
To remove folder synchronously
const fs = require('fs');
const Path = require('path');
const deleteFolderRecursive = function (directoryPath) {
if (fs.existsSync(directoryPath)) {
fs.readdirSync(directoryPath).forEach((file, index) => {
const curPath = path.join(directoryPath, file);
if (fs.lstatSync(curPath).isDirectory()) {
// recurse
deleteFolderRecursive(curPath);
} else {
// delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(directoryPath);
}
};
Most of the people using fs with Node.js would like functions close to the "Unix way" of dealing with files. I'm using fs-extra to bring all the cool stuff :
fs-extra contains methods that aren't included in the vanilla Node.js
fs package. Such as mkdir -p, cp -r, and rm -rf.
Even better, fs-extra is a drop in replacement for native fs. All methods in fs are unmodified and attached to it.
It means that you can replace fs by fs-extra :
// this can be replaced
const fs = require('fs')
// by this
const fs = require('fs-extra')
And then you can remove a folder this way:
fs.removeSync('/tmp/myFolder');
//or
fs.remove('/tmp/myFolder', callback);
As of Node v14 (October 2020), the fs module has fs.rm and rs.rmSync that support recursive, non-empty directory unlinking:
https://nodejs.org/docs/latest-v14.x/api/fs.html#fs_fs_rm_path_options_callback
So you can now do something like this:
const fs = require('fs');
fs.rm('/path/to/delete', { recursive: true }, () => console.log('done'));
or:
const fs = require('fs');
fs.rmSync('/path/to/delete', { recursive: true });
console.log('done');
My modified answer from #oconnecp (https://stackoverflow.com/a/25069828/3027390)
Uses path.join for better cross-platform experience.
So, don't forget to require it.
var path = require('path');
Also renamed function to rimraf ;)
/**
* Remove directory recursively
* #param {string} dir_path
* #see https://stackoverflow.com/a/42505874/3027390
*/
function rimraf(dir_path) {
if (fs.existsSync(dir_path)) {
fs.readdirSync(dir_path).forEach(function(entry) {
var entry_path = path.join(dir_path, entry);
if (fs.lstatSync(entry_path).isDirectory()) {
rimraf(entry_path);
} else {
fs.unlinkSync(entry_path);
}
});
fs.rmdirSync(dir_path);
}
}
I don't usually resurrect old threads but there is a lot on churn here and sans the rimraf answer these all seem overly complicated to me.
First in modern Node (>= v8.0.0) you can simplify the process using only node core modules, fully asynchronous, and parallelize the unlinking of files concurrently all in a function of five lines and still keep readability:
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const readdir = promisify(fs.readdir);
const rmdir = promisify(fs.rmdir);
const unlink = promisify(fs.unlink);
exports.rmdirs = async function rmdirs(dir) {
let entries = await readdir(dir, { withFileTypes: true });
await Promise.all(entries.map(entry => {
let fullPath = path.join(dir, entry.name);
return entry.isDirectory() ? rmdirs(fullPath) : unlink(fullPath);
}));
await rmdir(dir);
};
On another note a guard for path traversal attacks is inappropriate for this function because
It is out of scope based on the Single Responsibility Principle.
Should be handled by the caller not this function. This is akin to the command-line rm -rf in that it takes an argument and will allow the user to rm -rf / if asked to. It would be the responsibility of a script to guard not the rm program itself.
This function would be unable to determine such an attack since it does not have a frame of reference. Again that is the responsibility of the caller who would have the context of intent which would provide it a reference to compare the path traversal.
Sym-links are not a concern as .isDirectory() is false for sym-links and are unlinked not recursed into.
Last but not least, there is a rare race condition that the recursion could error if one of the entries was unlinked or deleted outside this script at just the right time while this recursion is running. Since this scenario is not typical in most environments it can likely be overlooked. However, if required (for some edge cases) this issue can be mitigated with this slightly more complex example:
exports.rmdirs = async function rmdirs(dir) {
let entries = await readdir(dir, { withFileTypes: true });
let results = await Promise.all(entries.map(entry => {
let fullPath = path.join(dir, entry.name);
let task = entry.isDirectory() ? rmdirs(fullPath) : unlink(fullPath);
return task.catch(error => ({ error }));
}));
results.forEach(result => {
// Ignore missing files/directories; bail on other errors
if (result && result.error.code !== 'ENOENT') throw result.error;
});
await rmdir(dir);
};
EDIT: Make isDirectory() a function. Remove the actual directory at the end. Fix missing recursion.
Here is an async version of #SharpCoder's answer
const fs = require('fs');
const path = require('path');
function deleteFile(dir, file) {
return new Promise(function (resolve, reject) {
var filePath = path.join(dir, file);
fs.lstat(filePath, function (err, stats) {
if (err) {
return reject(err);
}
if (stats.isDirectory()) {
resolve(deleteDirectory(filePath));
} else {
fs.unlink(filePath, function (err) {
if (err) {
return reject(err);
}
resolve();
});
}
});
});
};
function deleteDirectory(dir) {
return new Promise(function (resolve, reject) {
fs.access(dir, function (err) {
if (err) {
return reject(err);
}
fs.readdir(dir, function (err, files) {
if (err) {
return reject(err);
}
Promise.all(files.map(function (file) {
return deleteFile(dir, file);
})).then(function () {
fs.rmdir(dir, function (err) {
if (err) {
return reject(err);
}
resolve();
});
}).catch(reject);
});
});
});
};
From Node docs, as one can see here.
To get a behavior similar to the rm -rf Unix command, use fs.rm() with options { recursive: true, force: true }.
For Example (ESM)
import { rm } from 'node:fs/promises';
await rm('/path/to', { recursive: true, force: true });
I wrote this function called remove folder. It will recursively remove all the files and folders in a location. The only package it requires is async.
var async = require('async');
function removeFolder(location, next) {
fs.readdir(location, function (err, files) {
async.each(files, function (file, cb) {
file = location + '/' + file
fs.stat(file, function (err, stat) {
if (err) {
return cb(err);
}
if (stat.isDirectory()) {
removeFolder(file, cb);
} else {
fs.unlink(file, function (err) {
if (err) {
return cb(err);
}
return cb();
})
}
})
}, function (err) {
if (err) return next(err)
fs.rmdir(location, function (err) {
return next(err)
})
})
})
}
[EDIT: using node.js v15.5.0]
Having just tried using some of the solutions posted here, I encountered the following deprecation warning:
(node:13202) [DEP0147] DeprecationWarning: In future versions of
Node.js, fs.rmdir(path, { recursive: true }) will throw if path does
not exist or is a file. Use fs.rm(path, { recursive: true, force: true
}) instead
fs.rm(path, { recursive: true, force: true }); works nicely, with fs.rmSync(path, { recursive: true, force: true }); if you want to use the blocking version.
If you are using node 8+ want asyncronicity and don't want external dependencies, here is the async/await version:
const path = require('path');
const fs = require('fs');
const util = require('util');
const readdir = util.promisify(fs.readdir);
const lstat = util.promisify(fs.lstat);
const unlink = util.promisify(fs.unlink);
const rmdir = util.promisify(fs.rmdir);
const removeDir = async (dir) => {
try {
const files = await readdir(dir);
await Promise.all(files.map(async (file) => {
try {
const p = path.join(dir, file);
const stat = await lstat(p);
if (stat.isDirectory()) {
await removeDir(p);
} else {
await unlink(p);
console.log(`Removed file ${p}`);
}
} catch (err) {
console.error(err);
}
}))
await rmdir(dir);
console.log(`Removed dir ${dir}`);
} catch (err) {
console.error(err);
}
}
const fs = require("fs");
fs.rmdir("./test", { recursive: true }, (err) => {
if (err) {
console.error(err);
}
});
Provide the recursive: true option. And it will recursively delete all the files and directories of the given path. (Assuming test is directory present at root. )
2020 Update
From version 12.10.0 recursiveOption has been added for options.
Note that recursive deletion is experimental.
So you would do for sync:
fs.rmdirSync(dir, {recursive: true});
or for async:
fs.rmdir(dir, {recursive: true});
According to the fs documentation, fsPromises currently provides the recursive option on an experimental basis, which, at least in my own case on Windows, removes the directory and any files therein.
fsPromises.rmdir(path, {
recursive: true
})
Does recursive: true remove the files on Linux and MacOS?
Explanations
As of Node.js v14, we can now use the require("fs").promises.rm function to remove a file using a promise. The first argument will be the file or folder to remove (even non-existant ones). You can use the recursive and force options in the second argument's object to mimic the behavior of the rm Shell command utility with the -rf options.
Example
"use strict";
require("fs").promises.rm("directory", {recursive: true, force: true}).then(() => {
console.log("removed");
}).catch(error => {
console.error(error.message);
});
See
Node.js v14 Documentation
Mozilla Developer Promises Documentation
rm command manual page
I reached here while trying to get over with the gulp and I'm writing for further reaches.
gulp-clean deprecated for gulp-rimraf
gulp-rimraf deprecated in favor of delete-files-folders
When you want to delete files and folders using del, you should append /** for recursive deletion.
gulp.task('clean', function () {
return del(['some/path/to/delete/**']);
});
In the latest version of Node.js (12.10.0 or later), the rmdir style functions fs.rmdir(), fs.rmdirSync(), and fs.promises.rmdir() have a new experimental option recursive that allows deleting non-empty directories, e.g.
fs.rmdir(path, { recursive: true });
The related PR on GitHub: https://github.com/nodejs/node/pull/29168
Async version of #SharpCoder's answer using fs.promises:
const fs = require('fs');
const afs = fs.promises;
const deleteFolderRecursive = async path => {
if (fs.existsSync(path)) {
for (let entry of await afs.readdir(path)) {
const curPath = path + "/" + entry;
if ((await afs.lstat(curPath)).isDirectory())
await deleteFolderRecursive(curPath);
else await afs.unlink(curPath);
}
await afs.rmdir(path);
}
};
return new Promise((resolve, reject) => {
const fs = require("fs");
// directory path
const dir = "your/dir";
// delete directory recursively <------
fs.rmdir(dir, { recursive: true }, (err) => {
if (err) {
reject(err);
}
resolve(`${dir} is deleted!`);
});
});
If you prefer async/await, you can use the fs/promises API.
const fs = require('fs/promises');
const removeDir = async (dirPath) => {
await fs.rm(dirPath, {recursive: true});
}
If you know the path of a single file in the folder and wish to delete the folder where that file is contained.
const fs = require('fs/promises');
const path = require('path');
const removeDir = async (filePath) => {
const { dir } = path.parse(filePath);
await fs.rm(dir, { recursive: true });
}
Just use rmdir module! it's easy and simple.
A quick and dirty way (maybe for testing) could be to directly use the exec or spawn method to invoke OS call to remove the directory. Read more on NodeJs child_process.
let exec = require('child_process').exec
exec('rm -Rf /tmp/*.zip', callback)
Downsides are:
You are depending on underlying OS i.e. the same method would run in unix/linux but probably not in windows.
You cannot hijack the process on conditions or errors. You just give the task to underlying OS and wait for the exit code to be returned.
Benefits:
These processes can run asynchronously.
You can listen for the output/error of the command, hence command output is not lost. If operation is not completed, you can check the error code and retry.
The de facto package is rimraf, but here is my tiny async version:
const fs = require('fs')
const path = require('path')
const Q = require('q')
function rmdir (dir) {
return Q.nfcall(fs.access, dir, fs.constants.W_OK)
.then(() => {
return Q.nfcall(fs.readdir, dir)
.then(files => files.reduce((pre, f) => pre.then(() => {
var sub = path.join(dir, f)
return Q.nfcall(fs.lstat, sub).then(stat => {
if (stat.isDirectory()) return rmdir(sub)
return Q.nfcall(fs.unlink, sub)
})
}), Q()))
})
.then(() => Q.nfcall(fs.rmdir, dir))
}
In an asynchronous approach , to delete a non empty dir using the
rmdir(path,{recursive:true,force:true}
rm(path,{recursive:true,force:true}
will work
A code snippet:
const fsp = require("fs/promises");
deleteDirRecursively("./b");
removeRecursively("./BCD/b+");
async function deleteDirRecursively(dirPath) {
try {
// fsPromises.rmdir() on a file (not a directory) results in the promise being rejected
// with an ENOENT error on Windows and an ENOTDIR error on POSIX.
// To get a behavior similar to the rm -rf Unix command,
// use fsPromises.rm() with options { recursive: true, force: true }.
//will not thorw error if dir is empty
//will thow error if dir is not present
await fsp.rmdir(dirPath, { recursive: true, force: true });
console.log(dirPath, "deleted successfully");
} catch (err) {
console.log(err);
}
async function removeRecursively(path) {
try {
//has ability to remove both file and dir
//can delete dir recursively and forcefully
//will delete an empty dir.
//will remove all the contents of a dir.
// the only difference between rmdir and rm is that rmdir can only delete dir's
await fsp.rm(path, { recursive: true, force: true });
console.log(path, "deleted successfully");
} catch (err) {
console.log(err);
}
}
Ultra-speed and fail-proof
You can use the lignator package (https://www.npmjs.com/package/lignator), it's faster than any async code (e.g. rimraf) and more fail-proof (especially in Windows, where file removal is not instantaneous and files might be locked by other processes).
4,36 GB of data, 28 042 files, 4 217 folders on Windows removed in 15 seconds vs rimraf's 60 seconds on old HDD.
const lignator = require('lignator');
lignator.remove('./build/');
Sync folder remove with the files or only a file.
I am not much of a giver nor a contributor but I couldn't find a good solution of this problem and I had to find my way... so I hope you'll like it :)
Works perfect for me with any number of nested directories and sub directories. Caution for the scope of 'this' when recursing the function, your implementation may be different. In my case this function stays into the return of another function that's why I am calling it with this.
const fs = require('fs');
deleteFileOrDir(path, pathTemp = false){
if (fs.existsSync(path)) {
if (fs.lstatSync(path).isDirectory()) {
var files = fs.readdirSync(path);
if (!files.length) return fs.rmdirSync(path);
for (var file in files) {
var currentPath = path + "/" + files[file];
if (!fs.existsSync(currentPath)) continue;
if (fs.lstatSync(currentPath).isFile()) {
fs.unlinkSync(currentPath);
continue;
}
if (fs.lstatSync(currentPath).isDirectory() && !fs.readdirSync(currentPath).length) {
fs.rmdirSync(currentPath);
} else {
this.deleteFileOrDir(currentPath, path);
}
}
this.deleteFileOrDir(path);
} else {
fs.unlinkSync(path);
}
}
if (pathTemp) this.deleteFileOrDir(pathTemp);
}
2020 Answer
If you want to do it in a npm script, You DON'T need to previously install any 3rd party package if you use the command npx
If for example, you want to delete the folders dist and .cache when you run npm run clean then just add this command to your package.json
{
"scripts": {
"clean": "npx rimraf dist .cache"
}
}
It will work in any operative system
Another alternative is using the fs-promise module that provides promisified versions of the fs-extra modules
you could then write like this example:
const { remove, mkdirp, writeFile, readFile } = require('fs-promise')
const { join, dirname } = require('path')
async function createAndRemove() {
const content = 'Hello World!'
const root = join(__dirname, 'foo')
const file = join(root, 'bar', 'baz', 'hello.txt')
await mkdirp(dirname(file))
await writeFile(file, content)
console.log(await readFile(file, 'utf-8'))
await remove(join(__dirname, 'foo'))
}
createAndRemove().catch(console.error)
note: async/await requires a recent nodejs version (7.6+)
I wish there was a way to do this without additional modules for something so minuscule and common, but this is the best I could come up with.
Update:
Should now work on Windows (tested Windows 10), and should also work on Linux/Unix/BSD/Mac systems.
const
execSync = require("child_process").execSync,
fs = require("fs"),
os = require("os");
let removeDirCmd, theDir;
removeDirCmd = os.platform() === 'win32' ? "rmdir /s /q " : "rm -rf ";
theDir = __dirname + "/../web-ui/css/";
// WARNING: Do not specify a single file as the windows rmdir command will error.
if (fs.existsSync(theDir)) {
console.log(' removing the ' + theDir + ' directory.');
execSync(removeDirCmd + '"' + theDir + '"', function (err) {
console.log(err);
});
}
Related
Getting error AWS Lambda : EROFS: read-only file system, open '/var/task/assets/docs.zip'
Can any one help me that why i got this issue I run this code locally it runs perfectly but at aws lambda i got this error even i increase the time over lambda time out function as well memory. In this code i do a basic task for get call i just convert a xlsx to json and in post i just convert a test dir to zip file.I tried it from last few hrs for uploading at aws lambda now I am stuck and seeing continously this error can anyone help me out from this situation thanks in advance. here is my code index.js "use strict" const fs = require("fs"); const path = require("path"); const ctrlFuns = require("./functionality"); const output = fs.createWriteStream(path.join(__dirname, "./assets/docs.zip")); const archiver = require("archiver"); const zipArchive = archiver("zip", { gzip: true, zlib: { level: 9 } // Sets the compression level. }); exports.handleHttpRequest = function (event, context, callback) { if (event.http_method == "GET") { ctrlFuns.xlsxToJson().then((jsonObjs) => { callback(null, { users: jsonObjs, }); }).catch((err) => { callback(err); }); } else if (event.http_method == "POST") { fs.readFile(path.join(__dirname + "/test/test.xlsx"), (err, file) => { if (err) { callback(err); } else { //pipe archive data to the file zipArchive.pipe(output); zipArchive.append(file, { name: "test.xlsx", prefix: "test-data" //used for folder name in zip file }); // to catch this error explicitly zipArchive.on("error", (err) => { callback(err); }); //to perform end tasks while zip converted zipArchive.on("end", () => { fs.readFile(path.join(__dirname + "/assets/docs.zip"), (err, success) => { if (err) { callback(err); } else { callback(null, success.toString("base64")); } }); }); //filnalizing the zip file for user use zipArchive.finalize(); } }); } else { callback(null, "run default case"); } } //handler-closes here is my functionality.js /** * OBJECTIVE: TO CREATE THE FUNCTINALITY */ "use strict" const XLSX = require("xlsx"); const fs = require("fs"); const path = require("path"); var ctrlFuns = {}; ctrlFuns.xlsxToJson = function () { return new Promise((resolve, reject) => { fs.readFile(path.join(__dirname + "/test/test.xlsx"), (err, file) => { if (err) { reject(err); } else { let workbook = XLSX.read(file.buffer, { type: "buffer" }); //if workbook is null if (!workbook) { reject("Workbook not found."); } /* Getting first workbook sheetName */ let first_sheet_name = workbook.SheetNames[0]; /* Get worksheet */ let worksheet = workbook.Sheets[first_sheet_name]; /**Convert Into JSON */ resolve(XLSX.utils.sheet_to_json(worksheet, { raw: true })); } }); }) } //fun-closes module.exports = ctrlFuns; when I saw the logs at cloud watch then i got: START RequestId: 720cf48f-01c4-11e9-b715-9d54f664a1e8 Version: $LATEST 2018-12-17T06:24:45.756Z 720cf48f-01c4-11e9-b715-9d54f664a1e8 Error: EROFS: read-only file system, open '/var/task/assets/docs.zip' END RequestId: 720cf48f-01c4-11e9-b715-9d54f664a1e8 with below error message: { "errorMessage": "RequestId: 98b9e509-01c7-11e9-94dc-03cfdf0dae93 Process exited before completing request" }
The error seems self-explanatory: Error: EROFS: read-only file system, open '/var/task/assets/docs.zip' /var/task is where your Lambda function code is located, and in the actual Lambda environment, that filesystem is read-only. If you need to write to a file, you need to write to /tmp. Q: What if I need scratch space on disk for my AWS Lambda function? Each Lambda function receives 500MB of non-persistent disk space in its own /tmp directory. https://aws.amazon.com/lambda/faqs/ Note that you also need to clean up after yourself and remove any temporary files you created, because once a function finishes executing, its container is available for reuse by a later invocation of the same function... which means this same temp space may persist for a short time and be seen again (but only by this same function).
NodeJs are module loaded in order?
I use the module tough-cookie-filestore which saves cookies to a local file. When setting request to use this cookie jar, it requires that the file already exists on disk. As I use this cookie jar in multiple modules, I want to avoid a big block of code at the top of my modules which checks if the cookie file exists and if not creates it, and so I made a module initcookie.js that does this. My question is, is this a safe and good way to do this? initcookie.init() makes sure that the file exists, but can I be sure that it is run before new FileCookieStore(config.cookiePath) is executed? var initcookie = require('../initcookie.js').init() var config = require('../config.js') var FileCookieStore = require('tough-cookie-filestore') var request = require('request') var j = request.jar(new FileCookieStore(config.cookiePath)) request = request.defaults({ jar: j }) Where initcookie.js is: var config = require('./config.js') var fs = require('fs') // Initialize cookie file which stores the login info exports.init = function () { try { fs.openSync(config.cookiePath, 'r') } catch (e) { if (e.code === 'ENOENT') { // File not found, so make one fs.writeFileSync(config.cookiePath, '', { flags: 'wx' }, function (err) { if (err) { throw (err) } }) } else { throw (e) } } }
This way will work, but isn't the best way to do this. fs.writeFileSync and fs.openSync will make sure that your code executes synchronously, but it would be better to use async so you aren't holding up the thread. You could write var config = require('./config.js') var fs = require('fs') // Initialize cookie file which stores the login info exports.init = function () { return new Promise(function (resolve, reject) { try { fs.openSync(config.cookiePath, 'r') resolve() } catch (e) { if (e.code === 'ENOENT') { // File not found, so make one fs.writeFile(config.cookiePath, '', { flags: 'wx' }, function (err) { if (err) { reject(err) } else { resolve() } }) } else { reject(e) } } } } This way you can use Promises in your other files and be sure cookiePath is created without holding up the thread. The only issue I could see you running into would be if async leads you to run this function a bunch of times before it completes and so create a race condition. To avoid that I would only call this function once at the beginning of the program and put everything else in the success function of that promise.
Node, mongo and loop, how to break loop when I find data
My code looks similar to that: var mongo_client = require('mongodb').MongoClient, dataStorage; lib = { [...] find: function(res, param, callback) { var parentPath = param.path; while (parentPath !== '/') { collection.findOne({'paths' : parentPath}, {...}, function(err, data)) { if (data) { dataStorage = data; callback(data, res); } } if (dataStorage) { return; } parentPath = lib.removeLastBlockOfPath(parentPath); } if (!dataStorage) { callback(someDefaultData, res); } } [...] } What I want to do is to find some path stored in mongo, or if there is no match, try do find first matching parent path. I can't set dataStorage value from findOne callback is it any way to do that? Eaven if I find path it always run thru all path blocks.
Node is asynchronous, so your code must be written accordingly. An option is to use the async module, that has lots of tools to manage asynchronous flows. For example, you could use the whilst function to manage your while loop: find: function(res, param, callback) { var parentPath = param.path, dataStorage = null; async.whilst( function () { return parentPath !== '/'; }, function (done) { collection.findOne({'paths' : parentPath}, {...}, function(err, data) { if (data) { dataStorage = data; return callback(data, res); } parentPath = lib.removeLastBlockOfPath(parentPath); done(); }); }, function (error) { if (!dataStorage) return callback(someDefaultData, res); } ); } Don't forget to install and require the async module: var async = require('async');
Your code is written as if it is "traditional synchronous" -- which its not. You cannot check for dataStorage validity till results from findOne() come back -- so your checks need to be moved all the way into the inner "if (data)" statement. This is not a mongodb issue, this is purely how nodejs works and the fact that everything is asynchronous and works on callbacks.
How to write file if parent folder doesn't exist?
I need to write file to the following path: fs.writeFile('/folder1/folder2/file.txt', 'content', function () {…}); But '/folder1/folder2' path may not exists. So I get the following error: message=ENOENT, open /folder1/folder2/file.txt How can I write content to that path?
As of Node v10, this is built into the fs.mkdir function, which we can use in combination with path.dirname: var fs = require('fs'); var getDirName = require('path').dirname; function writeFile(path, contents, cb) { fs.mkdir(getDirName(path), { recursive: true}, function (err) { if (err) return cb(err); fs.writeFile(path, contents, cb); }); } For older versions, you can use mkdirp: var mkdirp = require('mkdirp'); var fs = require('fs'); var getDirName = require('path').dirname; function writeFile(path, contents, cb) { mkdirp(getDirName(path), function (err) { if (err) return cb(err); fs.writeFile(path, contents, cb); }); } If the whole path already exists, mkdirp is a noop. Otherwise it creates all missing directories for you. This module does what you want: https://npmjs.org/package/writefile . Got it when googling for "writefile mkdirp". This module returns a promise instead of taking a callback, so be sure to read some introduction to promises first. It might actually complicate things for you. The function I gave works in any case.
I find that the easiest way to do this is to use the outputFile() method from the fs-extra module. Almost the same as writeFile (i.e. it overwrites), except that if the parent directory does not exist, it's created. options are what you'd pass to fs.writeFile(). Example: var fs = require('fs-extra'); var file = '/tmp/this/path/does/not/exist/file.txt' fs.outputFile(file, 'hello!', function (err) { console.log(err); // => null fs.readFile(file, 'utf8', function (err, data) { console.log(data); // => hello! }); }); It also has promise support out of the box these days!.
Edit NodeJS version 10.12.0 has added a native support for both mkdir and mkdirSync to create the parent director recursively with recursive: true option as the following: fs.mkdirSync(targetDir, { recursive: true }); And if you prefer fs Promises API, you can write fs.promises.mkdir(targetDir, { recursive: true }); Original Answer Create the parent directories recursively if they do not exist! (Zero dependencies) const fs = require('fs'); const path = require('path'); function mkDirByPathSync(targetDir, { isRelativeToScript = false } = {}) { const sep = path.sep; const initDir = path.isAbsolute(targetDir) ? sep : ''; const baseDir = isRelativeToScript ? __dirname : '.'; return targetDir.split(sep).reduce((parentDir, childDir) => { const curDir = path.resolve(baseDir, parentDir, childDir); try { fs.mkdirSync(curDir); } catch (err) { if (err.code === 'EEXIST') { // curDir already exists! return curDir; } // To avoid `EISDIR` error on Mac and `EACCES`-->`ENOENT` and `EPERM` on Windows. if (err.code === 'ENOENT') { // Throw the original parentDir error on curDir `ENOENT` failure. throw new Error(`EACCES: permission denied, mkdir '${parentDir}'`); } const caughtErr = ['EACCES', 'EPERM', 'EISDIR'].indexOf(err.code) > -1; if (!caughtErr || caughtErr && curDir === path.resolve(targetDir)) { throw err; // Throw if it's just the last created dir. } } return curDir; }, initDir); } Usage // Default, make directories relative to current working directory. mkDirByPathSync('path/to/dir'); // Make directories relative to the current script. mkDirByPathSync('path/to/dir', {isRelativeToScript: true}); // Make directories with an absolute path. mkDirByPathSync('/path/to/dir'); Demo Try It! Explanations [UPDATE] This solution handles platform-specific errors like EISDIR for Mac and EPERM and EACCES for Windows. This solution handles both relative and absolute paths. In the case of relative paths, target directories will be created (resolved) in the current working directory. To Resolve them relative to the current script dir, pass {isRelativeToScript: true}. Using path.sep and path.resolve(), not just / concatenation, to avoid cross-platform issues. Using fs.mkdirSync and handling the error with try/catch if thrown to handle race conditions: another process may add the file between the calls to fs.existsSync() and fs.mkdirSync() and causes an exception. The other way to achieve that could be checking if a file exists then creating it, I.e, if (!fs.existsSync(curDir) fs.mkdirSync(curDir);. But this is an anti-pattern that leaves the code vulnerable to race conditions. Requires Node v6 and newer to support destructuring. (If you have problems implementing this solution with old Node versions, just leave me a comment)
Perhaps most simply, you can just use the fs-path npm module. Your code would then look like: var fsPath = require('fs-path'); fsPath.writeFile('/folder1/folder2/file.txt', 'content', function(err){ if(err) { throw err; } else { console.log('wrote a file like DaVinci drew machines'); } });
With node-fs-extra you can do it easily. Install it npm install --save fs-extra Then use the outputFile method instead of writeFileSync const fs = require('fs-extra'); fs.outputFile('tmp/test.txt', 'Hey there!', err => { if(err) { console.log(err); } else { console.log('The file was saved!'); } })
You can use fs.stat('/folder1/folder2', function(err, stats){ ... }); stats is a fs.Stats type of object, you may check stats.isDirectory(). Depending on the examination of err and stats you can do something, fs.mkdir( ... ) or throw an error. Reference Update: Fixed the commas in the code.
Here's my custom function to recursively create directories (with no external dependencies): var fs = require('fs'); var path = require('path'); var myMkdirSync = function(dir){ if (fs.existsSync(dir)){ return } try{ fs.mkdirSync(dir) }catch(err){ if(err.code == 'ENOENT'){ myMkdirSync(path.dirname(dir)) //create parent dir myMkdirSync(dir) //create dir } } } myMkdirSync(path.dirname(filePath)); var file = fs.createWriteStream(filePath);
Here is my function which works in Node 10.12.0. Hope this will help. const fs = require('fs'); function(dir,filename,content){ fs.promises.mkdir(dir, { recursive: true }).catch(error => { console.error('caught exception : ', error.message); }); fs.writeFile(dir+filename, content, function (err) { if (err) throw err; console.info('file saved!'); }); }
Here's part of Myrne Stol's answer broken out as a separate answer: This module does what you want: https://npmjs.org/package/writefile . Got it when googling for "writefile mkdirp". This module returns a promise instead of taking a callback, so be sure to read some introduction to promises first. It might actually complicate things for you.
let name = "./new_folder/" + file_name + ".png"; await driver.takeScreenshot().then( function(image, err) { require('mkdirp')(require('path').dirname(name), (err) => { require('fs').writeFile(name, image, 'base64', function(err) { console.log(err); }); }); } );
In Windows you can use this code: try { fs.writeFileSync( './/..//..//filename.txt' , 'the text to write in the file', 'utf-8' ); } catch(e){ console.log(" catch XXXXXXXXX "); } This code in windows create file in 2 folder above the current folder. but I Can't create file in C:\ Directly
Is node.js rmdir recursive ? Will it work on non empty directories?
The documentation for fs.rmdir is very short and doesn't explain the behavior of rmdir when the directory is not empty. Q: What happens if I try to use this API to delete a non empty directory ?
Although using a third-party library for such a thing I could not come up with a more elegant solution. So I ended up using the npm-module rimraf. Install it npm install rimraf Or install it and save to 'package.json' (other save options can be found in the npm-install docs) npm install --save rimraf Then you can do the following: rmdir = require('rimraf'); rmdir('some/directory/with/files', function(error){}); Or in Coffeescript: rmdir = require 'rimraf' rmdir 'some/directory/with/files', (error)->
I wrote about this problem exactly. My previous solution below, while simple, is not preferred. The following function, is a Synchronous solution; while async might be preferred. deleteFolderRecursive = function(path) { var files = []; if( fs.existsSync(path) ) { files = fs.readdirSync(path); files.forEach(function(file,index){ var curPath = path + "/" + file; if(fs.lstatSync(curPath).isDirectory()) { // recurse deleteFolderRecursive(curPath); } else { // delete file fs.unlinkSync(curPath); } }); fs.rmdirSync(path); } }; [Edit] Added lstat instead of stat to prevent errors on symlinks [Previous Solution] My solution to this is quite easy to implement. var exec = require('child_process').exec,child; child = exec('rm -rf test',function(err,out) { console.log(out); err && console.log(err); }); This is slimmed down for this page, but the basic idea is simple; execute 'rm -r' on the command line. If your app needs to run across different types of OS, put this in a function and have an if/else/switch to handle it. You will want to handle all the responses; but the idea is simple enough.
Short answer: node.js fs.rmdir() calls the POSIX rmdir(); this will remove an empty directory, or return an error. In the given case, the call will invoke the callback function and pass the error as an exception. The problem here is that the node.js documentation refers to POSIX: The Node.js API Docs File System API started out as simple wrappers around standard POSIX functions. This almost changes the question into a duplicate of: Is there a listing of the POSIX API / functions? The description for fs.rmdir is terse, but sufficient. Asynchronous rmdir(2). The rmdir(2) here is an implicit reference to the documentation for the rmdir() system call. The number (2) here is an old unix man page convention to indicate Section 2 of the Manual pages, containing the kernel interfaces.
Node.js v12.10.0 introduced recursive option into fs.rmdir. As fs.mkdir supports the same option since v10.12.0, both making and removing directory can be executed recursively. $ node --experimental-repl-await # without recursive option -> error > await fs.promises.mkdir('foo/bar') Thrown: [Error: ENOENT: no such file or directory, mkdir 'foo/bar'] { errno: -2, code: 'ENOENT', syscall: 'mkdir', path: 'foo/bar' } # with recursive option -> success > await fs.promises.mkdir('foo/bar', { recursive: true }) undefined # without recursive option -> error > await fs.promises.rmdir('foo') Thrown: [Error: ENOTEMPTY: directory not empty, rmdir 'foo'] { errno: -66, code: 'ENOTEMPTY', syscall: 'rmdir', path: 'foo' } # with recursive option -> success > await fs.promises.rmdir('foo', { recursive: true }) undefined
This worked for me fs.rmdirSync(folderpath, {recursive: true}); Edit 2021: Now it seems to have been replaced in v14 with: fs.rmSync('./output', {recursive: true, force: true});
Just a small dot among this bunch of answers, but I think it's good to point it out. Personally (and generally) I would prefer to use an already existing library, if there's one available, for doing the task. Taking an already existing thing means, for me and especially in the open source world, using and improving an already existing thing, which could end up in a better result than doing it on my own (I'm improving something that some one other has done). In this case, with a small search I found out the module fs-extra, which aims to be a replacement also for rimraf and answers to the need to remove recursively directories (apparently with async and sync versions). Furthermore, it has got a good number of stars on github and seems currently mantained: these two conditions, in addition to the fact that answers to the need, make it the way to go (almosto for a bit) for me.
fs.rmdir is not recursive. You could instead use a recursive fs.readdir module like readdirp in order to find all files and directories . Then remove all files, followed by all directories. For an even simpler solution have a look at rimraf.
As of node v16, recursive fs.rmdir is now deprecated. The replacement is fs.rm. Usage with promises: const fs = require("fs/promises") (async () => { await fs.rm("directory", { recursive: true }) })() Traditional: const fs = require("fs") fs.rm("directory", { recursive: true }, (err) => { // Callback }) The force option is also relevant to mention here as it will prevent the method from throwing errors if the folder is missing, which is useful if this is for cleaning up temporary files. nodejs documentation
Use child_process.execFile it is faster. NodeJS docs: child_process.execFile is similar to child_process.exec() except it* does not execute a subshell but rather the specified file directly. This works. Mimicking rm -rf DIR... var child = require('child_process'); var rmdir = function(directories, callback) { if(typeof directories === 'string') { directories = [directories]; } var args = directories; args.unshift('-rf'); child.execFile('rm', args, {env:process.env}, function(err, stdout, stderr) { callback.apply(this, arguments); }); }; // USAGE rmdir('dir'); rmdir('./dir'); rmdir('dir/*'); rmdir(['dir1', 'dir2']); Edit: I have to admit this is not cross-platform, will not work on Windows
Here is an asynchronous recursive version that works with promises. I use the 'Q' library but anyone will do with a few changes (eg the 'fail' function). To make use of it, we must make a few simple wrappers around some core Node functions, namely fs.stat, fs.readdir, fs.unlink and fs.rmdir to make them promise-friendly. Here they are: function getStat(fpath) { var def = Q.defer(); fs.stat(fpath, function(e, stat) { if (e) { def.reject(); } else { def.resolve(stat); } }); return def.promise; } function readdir(dirpath) { var def = Q.defer(); fs.readdir(dirpath, function(e, files) { if (e) { def.reject(e); } else { def.resolve(files); } }); return def.promise; } function rmFile(fpath) { var def = Q.defer(); fs.unlink(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }}); return def.promise; } function rmDir(fpath) { var def = Q.defer(); fs.rmdir(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }}); return def.promise; } So here is the recursive rm function: var path = require('path'); function recursiveDelete(fpath) { var def = Q.defer(); getStat(fpath) .then(function(stat) { if (stat.isDirectory()) { return readdir(fpath) .then(function(files) { if (!files.length) { return rmDir(fpath); } else { return Q.all(files.map(function(f) { return recursiveDelete(path.join(fpath, f)); })) .then(function() { return rmDir(fpath); }); } }); } else { return rmFile(fpath); } }) .then(function(res) { def.resolve(res); }) .fail(function(e) { def.reject(e); }) .done(); return def.promise; }
Figured this was a good excuse to take a dive into the source ;) From what I can tell, fs.rmdir is bound to the rmdir function from unistd.h. From the POSIX man page for rmdir: The rmdir() function shall remove a directory whose name is given by path. The directory shall be removed only if it is an empty directory. If the directory is not an empty directory, rmdir() shall fail and set errno to [EEXIST] or [ENOTEMPTY].
In addition to the correct "no" answers, the rimraf package provides recursive delete functionality. It mimics rm -rf. It's also officially packaged by Ubuntu.
I realize this isn't exactly answering the question at hand, but I think this might be useful to someone searching here in the future (it would have been to me!): I made a little snippet that allows one to recursively delete only empty directories. If a directory (or any of its descendant directories) has content inside it, it is left alone: var fs = require("fs"); var path = require("path"); var rmdir = function(dir) { var empty = true, list = fs.readdirSync(dir); for(var i = list.length - 1; i >= 0; i--) { var filename = path.join(dir, list[i]); var stat = fs.statSync(filename); if(filename.indexOf('.') > -1) { //There are files in the directory - we can't empty it! empty = false; list.splice(i, 1); } } //Cycle through the list of sub-directories, cleaning each as we go for(var i = list.length - 1; i >= 0; i--) { filename = path.join(dir, list[i]); if (rmdir(filename)) { list.splice(i, 1); } } //Check if the directory was truly empty if (!list.length && empty) { console.log('delete!'); fs.rmdirSync(dir); return true; } return false; }; https://gist.github.com/azaslavsky/661020d437fa199e95ab
most of the examples I see out there are synchronous implementations of recursively deleting a folder structure in node. I've also seen a few asynchronous ones that dont actually work well. I wrote and use one thats completely asynchronous: https://gist.github.com/yoavniran/adbbe12ddf7978e070c0
Removing NON-EMPTY directories SYNCHRONOUSLY:- Following is the file structure - var fs = require('fs'); fs.unlink('./stuff/writeMe.txt',function(){ fs.rmdirSync('stuff'); }) I am firstly removing the writeMe.txt file from stuff folder using code fs.unlink('./stuff/writeMe.txt') which makes the stuff folder empty and finally removing it using code fs.rmdirSync('stuff')
This function will recursively delete a directory or file that you specify, synchronously: var path = require('path'); function deleteRecursiveSync(itemPath) { if (fs.statSync(itemPath).isDirectory()) { _.each(fs.readdirSync(itemPath), function(childItemName) { deleteRecursiveSync(path.join(itemPath, childItemName)); }); fs.rmdirSync(itemPath); } else { fs.unlinkSync(itemPath); } } I have not tested this function's behavior if: the item does not exist, or the item cannot be deleted (such as due to a permissions issue).
Recursive remove directory for Node.js It turned out that Node.js fs module does not have a method to remove the directory and its content recursively. Instead you should go through the directory structure and remove atomic items, i.e. individual files and empty directories. So I found a nice gist by Takuo Kihira at https://gist.github.com/2367067 made in JavaScript and decided to make a CoffeeScript version of it:
tried to make it fail safe as sync removal will cause error if file or dir are in use at that time. var path = require('path'); var fs = require('fs') var dumpDirs = function (dir, name, cb) { fs.readdir(dir, function (err, files) { var dirs = [], filePath, i = 0, l = files.length; for (var i = 0; i < l; i++) { filePath = path.join(dir, files[i]); var stats = fs.lstatSync(filePath); if (stats.isDirectory()) { if (files[i].indexOf(name) != -1) { dirs.push({ startOn: new Date(stats.ctime), instance: files[i], name: name }) } } } cb(dirs); }); } var removeDir = function (dir, callback) { fs.readdir(dir, function (err, files) { c = files.length; (function remfile(i, cb) { if (i >= c) return cb(); var p = path.join(dir, files[i]) fs.unlink(p, function (err) { if (err) console.log(err); remfile(i + 1, cb) }); })(0, function () { fs.rmdir(dir, function (err) { callback() }); }); //for (var i = 0; i < c; i++) { // fs.unlinkSync(path.join(dir, files[i])); //}; }); } dumpDirs(maindir, function (dirs) { if (dirs && dirs.length > 0) { (function rem(i, cb) { if (i >= dirs.length) { return cb(); } var folder = path.join(dump, dirs[i].instance); removeDir(folder, function () { rem(i + 1, cb); }); })(0, function () { callback(); }) } else { callback(); } });
Here is the coffee script prototype function I created for fluentnode that deletes a folder recursively String::folder_Delete_Recursive = -> path = #.toString() if path.exists() for file in path.files() curPath = path.path_Combine(file) if curPath.is_Folder() curPath.folder_Delete_Recursive() else curPath.file_Delete() fs.rmdirSync(path); return path.not_Exists() here is the test: it 'folder_Create and folder_Delete' , -> tmpDir = "./".temp_Name_In_Folder() expect(tmpDir.folder_Exists()).to.be.false expect(tmpDir.folder_Create()).to.equal(tmpDir.realPath()) expect(tmpDir.folder_Exists()).to.be.true expect(tmpDir.folder_Delete()).to.be.true expect(tmpDir.folder_Exists()).to.be.false it 'folder_Delete_Recursive' , -> tmpDir = "./" .temp_Name_In_Folder().folder_Create() tmpFile = tmpDir.temp_Name_In_Folder().file_Create() expect(tmpDir.folder_Delete_Recursive()).to.be.true
A neat synchronous version of rmdirSync. /** * use with try ... catch ... * * If you have permission to remove all file/dir * and no race condition and no IO exception... * then this should work * * uncomment the line * if(!fs.exists(p)) return * if you care the inital value of dir, * */ var fs = require('fs') var path = require('path') function rmdirSync(dir,file){ var p = file? path.join(dir,file):dir; // if(!fs.exists(p)) return if(fs.lstatSync(p).isDirectory()){ fs.readdirSync(p).forEach(rmdirSync.bind(null,p)) fs.rmdirSync(p) } else fs.unlinkSync(p) } And a parallel IO, asynchronous version of rmdir. (faster) /** * NOTE: * * If there are no error, callback will only be called once. * * If there are multiple errors, callback will be called * exactly as many time as errors occur. * * Sometimes, this behavior maybe useful, but users * should be aware of this and handle errors in callback. * */ var fs = require('fs') var path = require('path') function rmfile(dir, file, callback){ var p = path.join(dir, file) fs.lstat(p, function(err, stat){ if(err) callback.call(null,err) else if(stat.isDirectory()) rmdir(p, callback) else fs.unlink(p, callback) }) } function rmdir(dir, callback){ fs.readdir(dir, function(err,files){ if(err) callback.call(null,err) else if( files.length ){ var i,j for(i=j=files.length; i--; ){ rmfile(dir,files[i], function(err){ if(err) callback.call(null, err) else if(--j === 0 ) fs.rmdir(dir,callback) }) } } else fs.rmdir(dir, callback) }) } Anyway, if you want a sequential IO, and callback be called exactly once (either success or with first error encountered). Replace this rmdir with the above. (slower) function rmdir(dir, callback){ fs.readdir(dir, function(err,files){ if(err) callback.call(null,err) else if( files.length ) rmfile(dir, files[0], function(err){ if(err) callback.call(null,err) else rmdir(dir, callback) }) else fs.rmdir(dir, callback) }) } All of them depend ONLY on node.js and should be portable.
This post was getting the top answer from google but none of the answers gives a solution that: doesn't make use of sync functions doesn't require external libraries doesn't use bash directly Here is my async solution which doesn't assume anything else than node installed: const fs = require('fs'); const path = require('path'); function rm(path){ return stat(path).then((_stat) => { if(_stat.isDirectory()){ return ls(path) .then((files) => Promise.all(files.map(file => rm(Path.join(path, file))))) .then(() => removeEmptyFolder(path)); }else{ return removeFileOrLink(path); } }); function removeEmptyFolder(path){ return new Promise((done, err) => { fs.rmdir(path, function(error){ if(error){ return err(error); } return done("ok"); }); }); } function removeFileOrLink(path){ return new Promise((done, err) => { fs.unlink(path, function(error){ if(error){ return err(error); } return done("ok"); }); }); } function ls(path){ return new Promise((done, err) => { fs.readdir(path, function (error, files) { if(error) return err(error) return done(files) }); }); } function stat(path){ return new Promise((done, err) => { fs.stat(path, function (error, _stat) { if(error){ return err(error); } return done(_stat); }); }); } }
Following on #geedew's answer. Here is an asynchronous implementation of rm -r (ie you can pass a path to a file or directory). I'm not an experienced nodejs developer and appreciate any suggestions or constructive criticism. var fs = require('fs'); function ResultsCollector (numResultsExpected, runWhenDone) { this.numResultsExpected = numResultsExpected, this.runWhenDone = runWhenDone; this.numResults = 0; this.errors = []; this.report = function (err) { if (err) this.errors.push(err); this.numResults++; if (this.numResults == this.numResultsExpected) { if (this.errors.length > 0) return runWhenDone(this.errors); else return runWhenDone(); } }; } function rmRasync(path, cb) { fs.lstat(path, function(err, stats) { if (err && err.code == 'ENOENT') return cb(); // doesn't exist, nothing to do else if (err) { return cb(err); } if (stats.isDirectory()) { fs.readdir(path, function (err, files) { if (err) return cb(err); var resultsCollector = new ResultsCollector(files.length, function (err) { if (err) return cb(err); fs.rmdir(path, function (err) { if (err) return cb(err); return cb(); }); }); files.forEach(function (file) { var filePath = path + '/' + file; return rmRasync(filePath, function (err) { return resultsCollector.report(err); }); }); }); } else { // file. // delete file or link fs.unlink(path, function (err) { if (err) return cb(err); return cb(); }); } }); }; Invoke like so: rmRasync('/path/to/some/file/or/dir', function (err) { if (err) return console.error('Could not rm', err); // else success });
Surprisingly verbose and bad answers here... To delete a non-empty directory on most systems: import * as cp from 'child_process'; const dir = '/the/dir/to/remove'; const k = cp.spawn('bash'); k.stdin.end(`rm -rf "${dir}"`); k.once('exit', code => { // check the exit code // now you are done }); this will work on MacOS and Linux, but it might not work on some Windows OS.