Asynchronously reading and caching multiple files in nodejs - node.js

I have an array which keeps URL of several files. For example:
var files = ['1.html', '2.html', '3.html'];
I need to read them asynchronously and save them in an object named cache (cache = {}).
To do this I used the code:
for(var i = 0; i < files.length; i++){
require('fs').readFile(files[i], 'utf8', function (error,data) {
cache[files[i]]=data;
});
}
In the end I have the result:
cache = { undefined : 'File 3 content' }
I do understand that the "readFile" acts after the loop is ended and it looses it's scope. Is there a way to fix this or another method to read files from an array and cache them?

When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.

The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});

Related

Nodejs module export returns undefined

I am using the recursive-readdir module to list all files types in a directory, the code below works, however I get "undefined" being displayed before my array.
The code below is inserted into a js file "test.js" and is ran from the cmd line using "node test.js".
Any suggestings on how best to return an array of all the required files without the undefined string being returned?
const recursive = require('recursive-readdir');
exports.file = function() {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
});
};
console.log( exports.file() );
As mentioned in the comments exports.file does not have a return value, which is why you get undefined when you try to log it.
If you want the caller of exports.file to have access to the files, you need to update exports.file to take a callback that can be invoked with the files as a parameter.
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
callback(error, arr);
});
};
exports.file(function(error, files) {
console.log(files);
});
That will log the files with minimal changes to your original code.
Depending on how you need this code to evolve, you can probably simplify this quite a bit. First, do you really need arr in addition to files? If not, you can simply pass files to the callback:
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
callback(error, files);
});
};
exports.file(function(error, files) {
console.log(files);
});
And now since your callback to recursive just invokes the callback to exports.file, you could simply pass the exports.file callback to recursive
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], callback);
};
exports.file(function(error, files) {
console.log(files);
});

How can i upload multiple files in node.js?

I need help in upload multiple files using node.js upload file reader.
I am using the fs = require('fs').
I have problem in choose two files,only one file only write in upload directory.
This is my backend
var files = req.files.files[0];
for (var i = 0; i < files.length; i++) {
file = files[i];
fs.readFile(files[i].path, function(error, data) {
// console.log( files[i].path ) ,here displayed two same both
fs.writeFile(uploadDirectory() + newFileName, data, function(error) {
});
});
}
Please help me.
what is the problem in my code.
Thanks.
You should avoid using files[i] in asynchronous function's callback which is directly written inside of for-loop.
the reason why console.log( files[i].path ) displays same thing twice is because when the code is loaded,the for-loop has already done. so you always get the last element of the array.
the easiest way to fix that is making a new scope(function)
for (var i = 0; i < files.length; i++) {
readAndWriteFile(files[i]);
}
var readAndWriteFile = function(file){
fs.readFile(file.path, function(error, data) {
// console.log( file.path ) displays what you expect.
fs.writeFile(/* define new file name */, data, function(error) {
});
});
}

NodeJS + redis gives weird results

Maybe the results ain't weird, but I started using Node 1-2 months ago so for me they are...
I have a loop which sorts out every other value of the array returned by hgetall (Redis command) and in that loop I call a function to get all values from another table with keys stored in the sorted array. This was more difficult to explain than I thought. Here's my code:
Pastebin: http://pastebin.com/tAVhSUV1 (or see below)
function getInfo (cn, callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
getInfo(vars[i], function (hej) {
rArr.push(hej);
});
}
}
});
The callback from the call to getInfo() is executed after the entire loop. Am I missing out on something here? Because it can't do that, right? (when I use rArr (right after the loop) it's empty, nbBut if I log it in the callback it gets logged after everything else written after the loop)
Yes, that's probably normal.
Understand that callbacks are executed after the hgetall call. Which mean that when the redis functions receive somehting it will call the callbacks. In other words, all the callbacks can be executed later.
As javascript only works in one thread, the calls to hgetall should be blocking to be executed as they come in the for loop. But as you're more certainly using async IO. The for loop ends and then it will start calling each callbacks that were queued inside the javascript event loop.
Edit
Unfortunately, to achieve what you're trying to do, you should wrap your code inside many other callbacks. You can use this project to make it easier: https://github.com/caolan/async
You should be able to install it using npm install async.
You'd have to do something like that:
function getInfo (cn) {
return function(callback) {
var anArray = [];
redis_client.hgetall('chat_info:' + cn, function (err, vals) {
if(err) { throw err; }
for(i in vals) {
anArray.push(vals[i]);
}
return callback(anArray);
});
};
}
redis_client.hgetall('chat_rooms:' + POST.chat_name, function (err, val) {
if(err) { throw err; }
var vars = [],
rArr = [],
callbacks = [];
for (i in val) {
vars.push(i);
}
for(var i = 0; i < vars.length; i += 1) {
if(i%2 === 0) {
callbacks.push(getInfo(vars[i]));
}
}
async.series(callbacks, function (err, results) {
// Final code here
});
});

How node.js implement async-callback with single process

I don't know how node implement its amazing idea. And i have a question when use it.
I have to read four files file1.js file2.js file3.js file4.js and concat them into one big javascript file result.js. It's important to keep their order.
So it's normal for me to use readFileSync instead of readFile.
I know it's a bad solution. Anyone has a good idea to do that?
Q: Is it possible for node.js to read four files at the same time?
Hope someone can explain the principle of node.js and when process.nextTick will be fired.
A: yes it is possible for node to read 4 files at the same time.
My answer would be, it depends on your situation, for reading the files synchronously or asynchronously. If it's configuration data, or the files can be cached, I would suggest just doing it synchronously, it's easy, and it's only done once. So you won't be waiting around very much. Long operations on initialization are typical, and can make things in the long run more efficient. That being said, reading four files in order, asynchronously, so that your program can do other things in the background isn't that hard. I will work on sync and async examples of each and add an edit.
/* jshint node:true*/
var fs = require('fs');
function readFilesSync(fileNames) {
'use strict';
var results = '';
for (var i = 0; i < fileNames.length; i++) {
results += fs.readFileSync(fileNames[i]);
}
return results;
}
function readFiles(fileNames, callback) {
'use strict';
var results = '';
function readFile(index) {
if (index < fileNames.length) {
fs.readFile(fileNames[index], function (err, data) {
results += data;
readFile(index + 1);
});
} else {
callback(results);
}
}
readFile(0);
}
function readAllFilesAtOnce(fileNames, callback) {
'use strict';
var results = {};
var numFiles = fileNames.length;
function callBackWrapper() {
var resultsOrdered = '';
for (var i = 0; i < fileNames.length; i++) {
resultsOrdered += results[fileNames[i]];
}
callback(resultsOrdered);
}
function readFileAsync(fileName) {
fs.readFile(fileName, function (err, data) {
results[fileName] = data;
numFiles--;
if (numFiles === 0) {
callBackWrapper();
}
});
}
for (var i = 0; i < fileNames.length; i++) {
readFileAsync(fileNames[i]);
}
}
function doSomethingWithTheData(data) {
'use strict';
console.log('Results async: ' + data);
}
function doSomethingWithTheData2(data) {
'use strict';
console.log('Results async all at once: ' + data);
}
var fileNamesArray = ['blah.js', 'file.js', 'hello.txt'];
console.log('The results sync: ' + readFilesSync(fileNamesArray));
readFiles(fileNamesArray, doSomethingWithTheData);
readAllFilesAtOnce(fileNamesArray, doSomethingWithTheData2);
EDIT: There I added a method to read all of the files at once.
Process.nextTick does no more than process this function on the next time around the event loop. EX:
process.nextTick(function() {
console.log('never printed out');
});
while(true);
ex 2:
process.nextTick(function() {
console.log('printed last');
});
console.log('printed first');

Is node.js rmdir recursive ? Will it work on non empty directories?

The documentation for fs.rmdir is very short and doesn't explain the behavior of rmdir when the directory is not empty.
Q: What happens if I try to use this API to delete a non empty directory ?
Although using a third-party library for such a thing I could not come up with a more elegant solution. So I ended up using the npm-module rimraf.
Install it
npm install rimraf
Or install it and save to 'package.json'
(other save options can be found in the npm-install docs)
npm install --save rimraf
Then you can do the following:
rmdir = require('rimraf');
rmdir('some/directory/with/files', function(error){});
Or in Coffeescript:
rmdir = require 'rimraf'
rmdir 'some/directory/with/files', (error)->
I wrote about this problem exactly.
My previous solution below, while simple, is not preferred. The following function, is a Synchronous solution; while async might be preferred.
deleteFolderRecursive = function(path) {
var files = [];
if( fs.existsSync(path) ) {
files = fs.readdirSync(path);
files.forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
[Edit] Added lstat instead of stat to prevent errors on symlinks
[Previous Solution]
My solution to this is quite easy to implement.
var exec = require('child_process').exec,child;
child = exec('rm -rf test',function(err,out) {
console.log(out); err && console.log(err);
});
This is slimmed down for this page, but the basic idea is simple; execute 'rm -r' on the command line. If your app needs to run across different types of OS, put this in a function and have an if/else/switch to handle it.
You will want to handle all the responses; but the idea is simple enough.
Short answer: node.js fs.rmdir() calls the POSIX rmdir(); this will remove an empty directory, or return an error. In the given case, the call will invoke the callback function and pass the error as an exception.
The problem here is that the node.js documentation refers to POSIX:
The Node.js API Docs File System API started out as
simple wrappers around standard POSIX functions.
This almost changes the question into a duplicate of:
Is there a listing of the POSIX API / functions?
The description for fs.rmdir is terse, but sufficient.
Asynchronous rmdir(2).
The rmdir(2) here is an implicit reference to the documentation for the rmdir() system call. The number (2) here is an old unix man page convention to indicate Section 2 of the Manual pages, containing the kernel interfaces.
Node.js v12.10.0 introduced recursive option into fs.rmdir.
As fs.mkdir supports the same option since v10.12.0, both making and removing directory can be executed recursively.
$ node --experimental-repl-await
# without recursive option -> error
> await fs.promises.mkdir('foo/bar')
Thrown:
[Error: ENOENT: no such file or directory, mkdir 'foo/bar'] {
errno: -2,
code: 'ENOENT',
syscall: 'mkdir',
path: 'foo/bar'
}
# with recursive option -> success
> await fs.promises.mkdir('foo/bar', { recursive: true })
undefined
# without recursive option -> error
> await fs.promises.rmdir('foo')
Thrown:
[Error: ENOTEMPTY: directory not empty, rmdir 'foo'] {
errno: -66,
code: 'ENOTEMPTY',
syscall: 'rmdir',
path: 'foo'
}
# with recursive option -> success
> await fs.promises.rmdir('foo', { recursive: true })
undefined
This worked for me
fs.rmdirSync(folderpath, {recursive: true});
Edit 2021:
Now it seems to have been replaced in v14 with:
fs.rmSync('./output', {recursive: true, force: true});
Just a small dot among this bunch of answers, but I think it's good to point it out.
Personally (and generally) I would prefer to use an already existing library, if there's one available, for doing the task. Taking an already existing thing means, for me and especially in the open source world, using and improving an already existing thing, which could end up in a better result than doing it on my own (I'm improving something that some one other has done).
In this case, with a small search I found out the module fs-extra, which aims to be a replacement also for rimraf and answers to the need to remove recursively directories (apparently with async and sync versions). Furthermore, it has got a good number of stars on github and seems currently mantained: these two conditions, in addition to the fact that answers to the need, make it the way to go (almosto for a bit) for me.
fs.rmdir is not recursive.
You could instead use a recursive fs.readdir module like readdirp in order to find all files and directories .
Then remove all files, followed by all directories.
For an even simpler solution have a look at rimraf.
As of node v16, recursive fs.rmdir is now deprecated. The replacement is fs.rm.
Usage with promises:
const fs = require("fs/promises")
(async () => {
await fs.rm("directory", { recursive: true })
})()
Traditional:
const fs = require("fs")
fs.rm("directory", { recursive: true }, (err) => {
// Callback
})
The force option is also relevant to mention here as it will prevent the method from throwing errors if the folder is missing, which is useful if this is for cleaning up temporary files.
nodejs documentation
Use child_process.execFile it is faster.
NodeJS docs:
child_process.execFile is similar to child_process.exec() except it*
does not execute a subshell but rather the specified file directly.
This works. Mimicking rm -rf DIR...
var child = require('child_process');
var rmdir = function(directories, callback) {
if(typeof directories === 'string') {
directories = [directories];
}
var args = directories;
args.unshift('-rf');
child.execFile('rm', args, {env:process.env}, function(err, stdout, stderr) {
callback.apply(this, arguments);
});
};
// USAGE
rmdir('dir');
rmdir('./dir');
rmdir('dir/*');
rmdir(['dir1', 'dir2']);
Edit: I have to admit this is not cross-platform, will not work on Windows
Here is an asynchronous recursive version that works with promises. I use the 'Q' library but anyone will do with a few changes (eg the 'fail' function).
To make use of it, we must make a few simple wrappers around some core Node functions, namely fs.stat, fs.readdir, fs.unlink and fs.rmdir to make them promise-friendly.
Here they are:
function getStat(fpath) {
var def = Q.defer();
fs.stat(fpath, function(e, stat) {
if (e) { def.reject(); } else { def.resolve(stat); }
});
return def.promise;
}
function readdir(dirpath) {
var def = Q.defer();
fs.readdir(dirpath, function(e, files) {
if (e) { def.reject(e); } else { def.resolve(files); }
});
return def.promise;
}
function rmFile(fpath) {
var def = Q.defer();
fs.unlink(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
function rmDir(fpath) {
var def = Q.defer();
fs.rmdir(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
So here is the recursive rm function:
var path = require('path');
function recursiveDelete(fpath) {
var def = Q.defer();
getStat(fpath)
.then(function(stat) {
if (stat.isDirectory()) {
return readdir(fpath)
.then(function(files) {
if (!files.length) {
return rmDir(fpath);
} else {
return Q.all(files.map(function(f) { return recursiveDelete(path.join(fpath, f)); }))
.then(function() { return rmDir(fpath); });
}
});
} else {
return rmFile(fpath);
}
})
.then(function(res) { def.resolve(res); })
.fail(function(e) { def.reject(e); })
.done();
return def.promise;
}
Figured this was a good excuse to take a dive into the source ;)
From what I can tell, fs.rmdir is bound to the rmdir function from unistd.h. From the POSIX man page for rmdir:
The rmdir() function shall remove a directory whose name is given by
path. The directory shall be removed only if it is an empty directory.
If the directory is not an empty directory, rmdir() shall fail and set errno to [EEXIST] or [ENOTEMPTY].
In addition to the correct "no" answers, the rimraf package provides recursive delete functionality. It mimics rm -rf. It's also officially packaged by Ubuntu.
I realize this isn't exactly answering the question at hand, but I think this might be useful to someone searching here in the future (it would have been to me!): I made a little snippet that allows one to recursively delete only empty directories. If a directory (or any of its descendant directories) has content inside it, it is left alone:
var fs = require("fs");
var path = require("path");
var rmdir = function(dir) {
var empty = true, list = fs.readdirSync(dir);
for(var i = list.length - 1; i >= 0; i--) {
var filename = path.join(dir, list[i]);
var stat = fs.statSync(filename);
if(filename.indexOf('.') > -1) {
//There are files in the directory - we can't empty it!
empty = false;
list.splice(i, 1);
}
}
//Cycle through the list of sub-directories, cleaning each as we go
for(var i = list.length - 1; i >= 0; i--) {
filename = path.join(dir, list[i]);
if (rmdir(filename)) {
list.splice(i, 1);
}
}
//Check if the directory was truly empty
if (!list.length && empty) {
console.log('delete!');
fs.rmdirSync(dir);
return true;
}
return false;
};
https://gist.github.com/azaslavsky/661020d437fa199e95ab
most of the examples I see out there are synchronous implementations of recursively deleting a folder structure in node.
I've also seen a few asynchronous ones that dont actually work well.
I wrote and use one thats completely asynchronous: https://gist.github.com/yoavniran/adbbe12ddf7978e070c0
Removing NON-EMPTY directories SYNCHRONOUSLY:-
Following is the file structure -
var fs = require('fs');
fs.unlink('./stuff/writeMe.txt',function(){
fs.rmdirSync('stuff');
})
I am firstly removing the writeMe.txt file from stuff folder using code fs.unlink('./stuff/writeMe.txt') which makes the stuff folder empty and finally removing it using code fs.rmdirSync('stuff')
This function will recursively delete a directory or file that you specify, synchronously:
var path = require('path');
function deleteRecursiveSync(itemPath) {
if (fs.statSync(itemPath).isDirectory()) {
_.each(fs.readdirSync(itemPath), function(childItemName) {
deleteRecursiveSync(path.join(itemPath, childItemName));
});
fs.rmdirSync(itemPath);
} else {
fs.unlinkSync(itemPath);
}
}
I have not tested this function's behavior if:
the item does not exist, or
the item cannot be deleted (such as due to a permissions issue).
Recursive remove directory for Node.js
It turned out that Node.js fs module does not have a method to remove the directory and its content recursively. Instead you should go through the directory structure and remove atomic items, i.e. individual files and empty directories. So I found a nice gist by Takuo Kihira at https://gist.github.com/2367067 made in JavaScript and decided to make a CoffeeScript version of it:
tried to make it fail safe as sync removal will cause error if file or dir are in use at that time.
var path = require('path');
var fs = require('fs')
var dumpDirs = function (dir, name, cb) {
fs.readdir(dir, function (err, files) {
var dirs = [],
filePath, i = 0, l = files.length;
for (var i = 0; i < l; i++) {
filePath = path.join(dir, files[i]);
var stats = fs.lstatSync(filePath);
if (stats.isDirectory()) {
if (files[i].indexOf(name) != -1) {
dirs.push({
startOn: new Date(stats.ctime),
instance: files[i],
name: name
})
}
}
}
cb(dirs);
});
}
var removeDir = function (dir, callback) {
fs.readdir(dir, function (err, files) {
c = files.length;
(function remfile(i, cb) {
if (i >= c)
return cb();
var p = path.join(dir, files[i])
fs.unlink(p, function (err) {
if (err) console.log(err);
remfile(i + 1, cb)
});
})(0, function () {
fs.rmdir(dir, function (err) {
callback()
});
});
//for (var i = 0; i < c; i++) {
// fs.unlinkSync(path.join(dir, files[i]));
//};
});
}
dumpDirs(maindir, function (dirs) {
if (dirs && dirs.length > 0) {
(function rem(i, cb) {
if (i >= dirs.length) {
return cb();
}
var folder = path.join(dump, dirs[i].instance);
removeDir(folder, function () {
rem(i + 1, cb);
});
})(0, function () {
callback();
})
}
else {
callback();
}
});
Here is the coffee script prototype function I created for fluentnode that deletes a folder recursively
String::folder_Delete_Recursive = ->
path = #.toString()
if path.exists()
for file in path.files()
curPath = path.path_Combine(file)
if curPath.is_Folder()
curPath.folder_Delete_Recursive()
else
curPath.file_Delete()
fs.rmdirSync(path);
return path.not_Exists()
here is the test:
it 'folder_Create and folder_Delete' , ->
tmpDir = "./".temp_Name_In_Folder()
expect(tmpDir.folder_Exists()).to.be.false
expect(tmpDir.folder_Create()).to.equal(tmpDir.realPath())
expect(tmpDir.folder_Exists()).to.be.true
expect(tmpDir.folder_Delete()).to.be.true
expect(tmpDir.folder_Exists()).to.be.false
it 'folder_Delete_Recursive' , ->
tmpDir = "./" .temp_Name_In_Folder().folder_Create()
tmpFile = tmpDir.temp_Name_In_Folder().file_Create()
expect(tmpDir.folder_Delete_Recursive()).to.be.true
A neat synchronous version of rmdirSync.
/**
* use with try ... catch ...
*
* If you have permission to remove all file/dir
* and no race condition and no IO exception...
* then this should work
*
* uncomment the line
* if(!fs.exists(p)) return
* if you care the inital value of dir,
*
*/
var fs = require('fs')
var path = require('path')
function rmdirSync(dir,file){
var p = file? path.join(dir,file):dir;
// if(!fs.exists(p)) return
if(fs.lstatSync(p).isDirectory()){
fs.readdirSync(p).forEach(rmdirSync.bind(null,p))
fs.rmdirSync(p)
}
else fs.unlinkSync(p)
}
And a parallel IO, asynchronous version of rmdir. (faster)
/**
* NOTE:
*
* If there are no error, callback will only be called once.
*
* If there are multiple errors, callback will be called
* exactly as many time as errors occur.
*
* Sometimes, this behavior maybe useful, but users
* should be aware of this and handle errors in callback.
*
*/
var fs = require('fs')
var path = require('path')
function rmfile(dir, file, callback){
var p = path.join(dir, file)
fs.lstat(p, function(err, stat){
if(err) callback.call(null,err)
else if(stat.isDirectory()) rmdir(p, callback)
else fs.unlink(p, callback)
})
}
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ){
var i,j
for(i=j=files.length; i--; ){
rmfile(dir,files[i], function(err){
if(err) callback.call(null, err)
else if(--j === 0 ) fs.rmdir(dir,callback)
})
}
}
else fs.rmdir(dir, callback)
})
}
Anyway, if you want a sequential IO, and callback be called exactly once (either success or with first error encountered). Replace this rmdir with the above. (slower)
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ) rmfile(dir, files[0], function(err){
if(err) callback.call(null,err)
else rmdir(dir, callback)
})
else fs.rmdir(dir, callback)
})
}
All of them depend ONLY on node.js and should be portable.
This post was getting the top answer from google but none of the answers gives a solution that:
doesn't make use of sync functions
doesn't require external libraries
doesn't use bash directly
Here is my async solution which doesn't assume anything else than node installed:
const fs = require('fs'); const path = require('path');
function rm(path){
return stat(path).then((_stat) => {
if(_stat.isDirectory()){
return ls(path)
.then((files) => Promise.all(files.map(file => rm(Path.join(path, file)))))
.then(() => removeEmptyFolder(path));
}else{
return removeFileOrLink(path);
} });
function removeEmptyFolder(path){
return new Promise((done, err) => {
fs.rmdir(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function removeFileOrLink(path){
return new Promise((done, err) => {
fs.unlink(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function ls(path){
return new Promise((done, err) => {
fs.readdir(path, function (error, files) {
if(error) return err(error)
return done(files)
});
}); }
function stat(path){
return new Promise((done, err) => {
fs.stat(path, function (error, _stat) {
if(error){ return err(error); }
return done(_stat);
});
}); } }
Following on #geedew's answer.
Here is an asynchronous implementation of rm -r (ie you can pass a path to a file or directory). I'm not an experienced nodejs developer and appreciate any suggestions or constructive criticism.
var fs = require('fs');
function ResultsCollector (numResultsExpected, runWhenDone) {
this.numResultsExpected = numResultsExpected,
this.runWhenDone = runWhenDone;
this.numResults = 0;
this.errors = [];
this.report = function (err) {
if (err) this.errors.push(err);
this.numResults++;
if (this.numResults == this.numResultsExpected) {
if (this.errors.length > 0) return runWhenDone(this.errors);
else return runWhenDone();
}
};
}
function rmRasync(path, cb) {
fs.lstat(path, function(err, stats) {
if (err && err.code == 'ENOENT') return cb(); // doesn't exist, nothing to do
else if (err) {
return cb(err);
}
if (stats.isDirectory()) {
fs.readdir(path, function (err, files) {
if (err) return cb(err);
var resultsCollector = new ResultsCollector(files.length, function (err) {
if (err) return cb(err);
fs.rmdir(path, function (err) {
if (err) return cb(err);
return cb();
});
});
files.forEach(function (file) {
var filePath = path + '/' + file;
return rmRasync(filePath, function (err) {
return resultsCollector.report(err);
});
});
});
}
else { // file.
// delete file or link
fs.unlink(path, function (err) {
if (err) return cb(err);
return cb();
});
}
});
};
Invoke like so:
rmRasync('/path/to/some/file/or/dir', function (err) {
if (err) return console.error('Could not rm', err);
// else success
});
Surprisingly verbose and bad answers here...
To delete a non-empty directory on most systems:
import * as cp from 'child_process';
const dir = '/the/dir/to/remove';
const k = cp.spawn('bash');
k.stdin.end(`rm -rf "${dir}"`);
k.once('exit', code => {
// check the exit code
// now you are done
});
this will work on MacOS and Linux, but it might not work on some Windows OS.

Resources