nodejs create directory in sync way is not reliable - node.js

I am using following code to create directories in sync way. It checks the existence of the directory, deletes it if exists and creates it. All operations are in sync way. I am looping this operation for 5 times. I am getting different results each time. Sometimes it creates only 4 directories, sometimes it creates all 5. What is the reason for this unstability in the code?
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
fs.readFile(dir + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
AsyncFunc(content, ....)
.then(newContent => {
filenames.forEach(function(filename) {
if (fs.existsSync(currentDirName)) {
fs.rmdirSync(currentDirName);
}
fs.mkdirSync(currentDirName, '0766');
});
});
});

If you are using sync functions you can not use callbacks. Also if you want to remove a folder you need to use rmdirSync(filename);
var fs = require('fs');
var filenames = ['1','2','3','4'];
filenames.forEach(function(filename) {
if (fs.existsSync(filename)) {
fs.rmdirSync(filename);
}
fs.mkdirSync(filename, '0766');
});

Related

find matches in files and parse out the line number

I am trying to find matches in files and parse out the line number and what was the match along with the file name. So far I am able to read the files from the directory / sub directories and then use indexOf() which in this case is not very efficient. The goal would be go through all the files and find matches for the following
.http(
.httpContinue(
$httpUrl(
httpURL
getHttpImageURL(
getHttpURL(
The code I have so far looks like this
var fs = require('fs');
var path = [my directory];
function readFiles(dirname) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
return;
}
filenames.forEach(function(filename) {
if (fs.lstatSync(dirname+'/'+filename).isDirectory() ){
readFiles(dirname+'/'+filename);
};
fs.readFile(dirname+'/'+filename, { encoding: 'utf8' }, function(err, content) {
if (err) {
return;
}
//This is not very effective and I need to check each line for all these possible matches
if (content.indexOf('http(') > -1) {
if(err) {
return console.log(err);
}
console.log(filename);
}
});
});
});
}
readFiles(path);
The challenge I am facing is to read lines and parse line numbers where I found a match and what was the match. Cant figure out how to accomplish that.
You could try this for your if statement
// This should really go somewhere near the top of the file
const wantedStrings = ['.http(',
'.httpContinue(',
'$httpUrl(',
'httpURL',
'getHttpImageURL(',
'getHttpURL('];
if (content.toLowerCase().includes('http')
&& wantedStrings.filter(s => content.includes(s)).length > 0) {
// Don't need another err check here
console.log(filename);
}

Listing all the directories and all the files and uploading them to my bucket (S3 Amazon) with Node.JS

Code below:
I'm using the findit walker, documentation here -> https://github.com/substack/node-findit
With this package i'm listing all the directories and files of my application, and i'm trying to send to my bucket on Amazon S3 (with my own code).
I'm not sure if the code is right, and i don't know what i need to put in the Body, inside the params object.
This part it's listening all the Directories of my app:
finder.on('directory', function (dir, stat, stop) {
var base = path.basename(dir);
if (base === '.git' || base === 'node_modules' || base === 'bower_components') {
stop();
}
else {
console.log(dir + '/');
}
});
And this one it's listening all the files of my app:
finder.on('file', function (file, stat) {
console.log(file);
});
I updated it to send data to my bucket, like this:
finder.on('file', function (file, stat) {
console.log(file);
var params = {
Bucket: BUCKET_NAME,
Key: file,
//Body:
};
//console.log(params.body);
s3.putObject(params, function(err) {
if(err) {
console.log(err);
}
else {
console.log("Success!");
}
});
});
I really don't know what i need to put inside the Body, and i don't know if the code is right. Anyone could help me?
Thanks.
to help, all code, all the code:
var fs = require('fs');
var finder = require('findit')(process.argv[2] || '.');
var path = require('path');
var aws = require('aws-sdk');
var s3 = new aws.S3();
aws.config.loadFromPath('./AwsConfig.json');
var BUCKET_NAME = 'test-dev-2';
finder.on('directory', function (dir, stat, stop) {
var base = path.basename(dir);
if (base === '.git' || base === 'node_modules' || base === 'bower_components') {
stop();
}
else {
console.log(dir + '/');
}
});
finder.on('file', function (file, stat) {
console.log(file);
var params = {
Bucket: BUCKET_NAME,
Key: file,
//Body:
};
//console.log(params.body);
s3.putObject(params, function(err) {
if(err) {
console.log(err);
}
else {
console.log("Success");
}
});
});
finder.on('error', function (err) {
console.log(err);
});
finder.on('end', function () {
console.log('Done!');
});
Based on the documentation, the Body parameter of s3.putObject can take a Buffer, Typed Array, Blob, String, or ReadableStream. The best one of those to use in most cases would be a ReadableString. You can create a ReadableString from any file using the createReadStream() function in the fs module.
So, that part your code would look something like:
finder.on('file', function (file, stat) {
console.log(file);
var params = {
Bucket: BUCKET_NAME,
Key: file,
Body: fs.createReadStream(file) // NOTE: You might need to adjust "file" so that it's either an absolute path, or relative to your code's directory.
};
s3.putObject(params, function(err) {
if(err) {
console.log(err);
}
else {
console.log("Success!");
}
});
});
I also want to point out that you might run in to a problem with this code if you pass it a directory with a lot of files. putObject is an asynchronous function, which means it'll be called and then the code will move on to something else while it's doing its thing (ok, that's a gross simplification, but you can think of it that way). What that means in terms of this code is that you'll essentially be uploading all the files it finds at the same time; that's not good.
What I'd suggest is to use something like the async module to queue your file uploads so that only a few of them happen at a time.
Essentially you'd move the code you have in your file event handler to the queue's worker method, like so:
var async = require('async');
var uploadQueue = async.queue(function(file, callback) {
var params = {
Bucket: BUCKET_NAME,
Key: file,
Body: fs.createReadStream(file) // NOTE: You might need to adjust "file" so that it's either an absolute path, or relative to your code's directory.
};
s3.putObject(params, function(err) {
if(err) {
console.log(err);
}
else {
console.log("Success!");
}
callback(err); // <-- Don't forget the callback call here so that the queue knows this item is done
});
}, 2); // <-- This "2" is the maximum number of files to upload at once
Note the 2 at the end there, that specifies your concurrency which, in this case, is how many files to upload at once.
Then, your file event handler simply becomes:
finder.on('file', function (file, stat) {
uploadQueue.push(file);
});
That will queue up all the files it finds and upload them 2 at a time until it goes through all of them.
An easier and arguably more efficient solution may be to just tar up the directory and upload that single tar file (also gzipped if you want). There are tar modules on npm, but you could also just spawn a child process for it too.

Node js remove old files in a Directory

I want to remove some files in a directory after reaching some limits.(for example remove files if number of files more than 20)
It would be great if any automation can be done to remove those files.
In details:
In my case there is a uploads directory, where I'm uploading the images. For each new image, a directory is created and the image resides in the directory. So I want to keep some of the newly created or recently used directories and remove others after a certain limit(for example after reaching 20 numbers of directories). While creating new images, it'll check the limit and if exceeds the max dir limits, it'll remove the unused directories.
Note: The directories are not empty.
How can i do that using Node.js
Any help would be appreciable.
The most widely used technique would be to have an API that can delete files in your folder. Take a look at
fs.unlink
You can get more details here
Once you have this API, it is preferable to have a cron call this API every month or so. Take a look at
crontab -e
If you're running Node on a Linux server, you can use the exec module to execute Linux commands. For example, here is a function I use to move old log files:
var exec = require('child_process').exec;
exec('mv ' + __dirname + '/log/*.log ' + __dirname + '/log/archive',
function(err, stdout, stderr) {
if (err) {
console.log('Error archiving log files: ' + stderr);
} else {
console.log('Log files archived to ' + __dirname + '/log/archive');
}
});
You can use any Linux command - so you could use this approach to remove files as well.
I create a "cronjob" function in node.js to remove files in a folder (note child folders will be ignore)
USAGE:
// keep only 5 newest files in `logs` folder
watchAndRemoveOldFiles('logs', 5, function (err, removeFiles) {
console.log('These files has been remove:', removeFiles);
});
Full code (you need npm install async to run the code):
var fs = require('fs');
var path = require('path');
var async = require('async');
function findAndRemoveOldFiles(inputDir, keepCount, callback) {
if(!callback) {
callback = function (err, removeFiles) {
// default callback: doing nothing
};
};
fs.readdir(inputDir, function (err, files) {
if(err) {
return callback(err);
}
fileNames = files.map(function (fileName) {
return path.join(inputDir, fileName);
});
async.map(fileNames, function (fileName, cb) {
fs.stat(fileName, function (err, stat) {
if(err) {
return cb(err);
};
cb(null, {
name: fileName,
isFile: stat.isFile(),
time: stat.mtime,
});
});
}, function (err, files) {
if(err) {
return callback(err);
};
files = files.filter(function (file) {
return file.isFile;
})
files.sort(function (filea, fileb) {
return filea.time < fileb.time;
});
files = files.slice(keepCount);
async.map(files, function (file, cb) {
fs.unlink(file.name, function (err) {
if(err) {
return cb(err);
};
cb(null, file.name);
});
}, function (err, removedFiles) {
if(err) {
return callback(err);
}
callback(null, removedFiles);
});
});
});
}
function watchAndRemoveOldFiles(inputDir, keepCount, callback) {
findAndRemoveOldFiles(inputDir, keepCount, callback);
fs.watch(inputDir, function () {
findAndRemoveOldFiles(inputDir, keepCount, callback);
});
}
// USAGE: watch and remove old files, keep only 5 newest files
watchAndRemoveOldFiles('log', 5, function (err, removeFiles) {
console.log('These files has been remove:', removeFiles);
});
you might consider setting up a kue task:
https://github.com/learnboost/kue
Kue (or a slight wrapper/mod on top of it) is likely to be what makes it into core for our scheduled jobs down the road.

Is node.js rmdir recursive ? Will it work on non empty directories?

The documentation for fs.rmdir is very short and doesn't explain the behavior of rmdir when the directory is not empty.
Q: What happens if I try to use this API to delete a non empty directory ?
Although using a third-party library for such a thing I could not come up with a more elegant solution. So I ended up using the npm-module rimraf.
Install it
npm install rimraf
Or install it and save to 'package.json'
(other save options can be found in the npm-install docs)
npm install --save rimraf
Then you can do the following:
rmdir = require('rimraf');
rmdir('some/directory/with/files', function(error){});
Or in Coffeescript:
rmdir = require 'rimraf'
rmdir 'some/directory/with/files', (error)->
I wrote about this problem exactly.
My previous solution below, while simple, is not preferred. The following function, is a Synchronous solution; while async might be preferred.
deleteFolderRecursive = function(path) {
var files = [];
if( fs.existsSync(path) ) {
files = fs.readdirSync(path);
files.forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
[Edit] Added lstat instead of stat to prevent errors on symlinks
[Previous Solution]
My solution to this is quite easy to implement.
var exec = require('child_process').exec,child;
child = exec('rm -rf test',function(err,out) {
console.log(out); err && console.log(err);
});
This is slimmed down for this page, but the basic idea is simple; execute 'rm -r' on the command line. If your app needs to run across different types of OS, put this in a function and have an if/else/switch to handle it.
You will want to handle all the responses; but the idea is simple enough.
Short answer: node.js fs.rmdir() calls the POSIX rmdir(); this will remove an empty directory, or return an error. In the given case, the call will invoke the callback function and pass the error as an exception.
The problem here is that the node.js documentation refers to POSIX:
The Node.js API Docs File System API started out as
simple wrappers around standard POSIX functions.
This almost changes the question into a duplicate of:
Is there a listing of the POSIX API / functions?
The description for fs.rmdir is terse, but sufficient.
Asynchronous rmdir(2).
The rmdir(2) here is an implicit reference to the documentation for the rmdir() system call. The number (2) here is an old unix man page convention to indicate Section 2 of the Manual pages, containing the kernel interfaces.
Node.js v12.10.0 introduced recursive option into fs.rmdir.
As fs.mkdir supports the same option since v10.12.0, both making and removing directory can be executed recursively.
$ node --experimental-repl-await
# without recursive option -> error
> await fs.promises.mkdir('foo/bar')
Thrown:
[Error: ENOENT: no such file or directory, mkdir 'foo/bar'] {
errno: -2,
code: 'ENOENT',
syscall: 'mkdir',
path: 'foo/bar'
}
# with recursive option -> success
> await fs.promises.mkdir('foo/bar', { recursive: true })
undefined
# without recursive option -> error
> await fs.promises.rmdir('foo')
Thrown:
[Error: ENOTEMPTY: directory not empty, rmdir 'foo'] {
errno: -66,
code: 'ENOTEMPTY',
syscall: 'rmdir',
path: 'foo'
}
# with recursive option -> success
> await fs.promises.rmdir('foo', { recursive: true })
undefined
This worked for me
fs.rmdirSync(folderpath, {recursive: true});
Edit 2021:
Now it seems to have been replaced in v14 with:
fs.rmSync('./output', {recursive: true, force: true});
Just a small dot among this bunch of answers, but I think it's good to point it out.
Personally (and generally) I would prefer to use an already existing library, if there's one available, for doing the task. Taking an already existing thing means, for me and especially in the open source world, using and improving an already existing thing, which could end up in a better result than doing it on my own (I'm improving something that some one other has done).
In this case, with a small search I found out the module fs-extra, which aims to be a replacement also for rimraf and answers to the need to remove recursively directories (apparently with async and sync versions). Furthermore, it has got a good number of stars on github and seems currently mantained: these two conditions, in addition to the fact that answers to the need, make it the way to go (almosto for a bit) for me.
fs.rmdir is not recursive.
You could instead use a recursive fs.readdir module like readdirp in order to find all files and directories .
Then remove all files, followed by all directories.
For an even simpler solution have a look at rimraf.
As of node v16, recursive fs.rmdir is now deprecated. The replacement is fs.rm.
Usage with promises:
const fs = require("fs/promises")
(async () => {
await fs.rm("directory", { recursive: true })
})()
Traditional:
const fs = require("fs")
fs.rm("directory", { recursive: true }, (err) => {
// Callback
})
The force option is also relevant to mention here as it will prevent the method from throwing errors if the folder is missing, which is useful if this is for cleaning up temporary files.
nodejs documentation
Use child_process.execFile it is faster.
NodeJS docs:
child_process.execFile is similar to child_process.exec() except it*
does not execute a subshell but rather the specified file directly.
This works. Mimicking rm -rf DIR...
var child = require('child_process');
var rmdir = function(directories, callback) {
if(typeof directories === 'string') {
directories = [directories];
}
var args = directories;
args.unshift('-rf');
child.execFile('rm', args, {env:process.env}, function(err, stdout, stderr) {
callback.apply(this, arguments);
});
};
// USAGE
rmdir('dir');
rmdir('./dir');
rmdir('dir/*');
rmdir(['dir1', 'dir2']);
Edit: I have to admit this is not cross-platform, will not work on Windows
Here is an asynchronous recursive version that works with promises. I use the 'Q' library but anyone will do with a few changes (eg the 'fail' function).
To make use of it, we must make a few simple wrappers around some core Node functions, namely fs.stat, fs.readdir, fs.unlink and fs.rmdir to make them promise-friendly.
Here they are:
function getStat(fpath) {
var def = Q.defer();
fs.stat(fpath, function(e, stat) {
if (e) { def.reject(); } else { def.resolve(stat); }
});
return def.promise;
}
function readdir(dirpath) {
var def = Q.defer();
fs.readdir(dirpath, function(e, files) {
if (e) { def.reject(e); } else { def.resolve(files); }
});
return def.promise;
}
function rmFile(fpath) {
var def = Q.defer();
fs.unlink(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
function rmDir(fpath) {
var def = Q.defer();
fs.rmdir(fpath, function(e) { if(e) { def.reject(e); } else { def.resolve(fpath); }});
return def.promise;
}
So here is the recursive rm function:
var path = require('path');
function recursiveDelete(fpath) {
var def = Q.defer();
getStat(fpath)
.then(function(stat) {
if (stat.isDirectory()) {
return readdir(fpath)
.then(function(files) {
if (!files.length) {
return rmDir(fpath);
} else {
return Q.all(files.map(function(f) { return recursiveDelete(path.join(fpath, f)); }))
.then(function() { return rmDir(fpath); });
}
});
} else {
return rmFile(fpath);
}
})
.then(function(res) { def.resolve(res); })
.fail(function(e) { def.reject(e); })
.done();
return def.promise;
}
Figured this was a good excuse to take a dive into the source ;)
From what I can tell, fs.rmdir is bound to the rmdir function from unistd.h. From the POSIX man page for rmdir:
The rmdir() function shall remove a directory whose name is given by
path. The directory shall be removed only if it is an empty directory.
If the directory is not an empty directory, rmdir() shall fail and set errno to [EEXIST] or [ENOTEMPTY].
In addition to the correct "no" answers, the rimraf package provides recursive delete functionality. It mimics rm -rf. It's also officially packaged by Ubuntu.
I realize this isn't exactly answering the question at hand, but I think this might be useful to someone searching here in the future (it would have been to me!): I made a little snippet that allows one to recursively delete only empty directories. If a directory (or any of its descendant directories) has content inside it, it is left alone:
var fs = require("fs");
var path = require("path");
var rmdir = function(dir) {
var empty = true, list = fs.readdirSync(dir);
for(var i = list.length - 1; i >= 0; i--) {
var filename = path.join(dir, list[i]);
var stat = fs.statSync(filename);
if(filename.indexOf('.') > -1) {
//There are files in the directory - we can't empty it!
empty = false;
list.splice(i, 1);
}
}
//Cycle through the list of sub-directories, cleaning each as we go
for(var i = list.length - 1; i >= 0; i--) {
filename = path.join(dir, list[i]);
if (rmdir(filename)) {
list.splice(i, 1);
}
}
//Check if the directory was truly empty
if (!list.length && empty) {
console.log('delete!');
fs.rmdirSync(dir);
return true;
}
return false;
};
https://gist.github.com/azaslavsky/661020d437fa199e95ab
most of the examples I see out there are synchronous implementations of recursively deleting a folder structure in node.
I've also seen a few asynchronous ones that dont actually work well.
I wrote and use one thats completely asynchronous: https://gist.github.com/yoavniran/adbbe12ddf7978e070c0
Removing NON-EMPTY directories SYNCHRONOUSLY:-
Following is the file structure -
var fs = require('fs');
fs.unlink('./stuff/writeMe.txt',function(){
fs.rmdirSync('stuff');
})
I am firstly removing the writeMe.txt file from stuff folder using code fs.unlink('./stuff/writeMe.txt') which makes the stuff folder empty and finally removing it using code fs.rmdirSync('stuff')
This function will recursively delete a directory or file that you specify, synchronously:
var path = require('path');
function deleteRecursiveSync(itemPath) {
if (fs.statSync(itemPath).isDirectory()) {
_.each(fs.readdirSync(itemPath), function(childItemName) {
deleteRecursiveSync(path.join(itemPath, childItemName));
});
fs.rmdirSync(itemPath);
} else {
fs.unlinkSync(itemPath);
}
}
I have not tested this function's behavior if:
the item does not exist, or
the item cannot be deleted (such as due to a permissions issue).
Recursive remove directory for Node.js
It turned out that Node.js fs module does not have a method to remove the directory and its content recursively. Instead you should go through the directory structure and remove atomic items, i.e. individual files and empty directories. So I found a nice gist by Takuo Kihira at https://gist.github.com/2367067 made in JavaScript and decided to make a CoffeeScript version of it:
tried to make it fail safe as sync removal will cause error if file or dir are in use at that time.
var path = require('path');
var fs = require('fs')
var dumpDirs = function (dir, name, cb) {
fs.readdir(dir, function (err, files) {
var dirs = [],
filePath, i = 0, l = files.length;
for (var i = 0; i < l; i++) {
filePath = path.join(dir, files[i]);
var stats = fs.lstatSync(filePath);
if (stats.isDirectory()) {
if (files[i].indexOf(name) != -1) {
dirs.push({
startOn: new Date(stats.ctime),
instance: files[i],
name: name
})
}
}
}
cb(dirs);
});
}
var removeDir = function (dir, callback) {
fs.readdir(dir, function (err, files) {
c = files.length;
(function remfile(i, cb) {
if (i >= c)
return cb();
var p = path.join(dir, files[i])
fs.unlink(p, function (err) {
if (err) console.log(err);
remfile(i + 1, cb)
});
})(0, function () {
fs.rmdir(dir, function (err) {
callback()
});
});
//for (var i = 0; i < c; i++) {
// fs.unlinkSync(path.join(dir, files[i]));
//};
});
}
dumpDirs(maindir, function (dirs) {
if (dirs && dirs.length > 0) {
(function rem(i, cb) {
if (i >= dirs.length) {
return cb();
}
var folder = path.join(dump, dirs[i].instance);
removeDir(folder, function () {
rem(i + 1, cb);
});
})(0, function () {
callback();
})
}
else {
callback();
}
});
Here is the coffee script prototype function I created for fluentnode that deletes a folder recursively
String::folder_Delete_Recursive = ->
path = #.toString()
if path.exists()
for file in path.files()
curPath = path.path_Combine(file)
if curPath.is_Folder()
curPath.folder_Delete_Recursive()
else
curPath.file_Delete()
fs.rmdirSync(path);
return path.not_Exists()
here is the test:
it 'folder_Create and folder_Delete' , ->
tmpDir = "./".temp_Name_In_Folder()
expect(tmpDir.folder_Exists()).to.be.false
expect(tmpDir.folder_Create()).to.equal(tmpDir.realPath())
expect(tmpDir.folder_Exists()).to.be.true
expect(tmpDir.folder_Delete()).to.be.true
expect(tmpDir.folder_Exists()).to.be.false
it 'folder_Delete_Recursive' , ->
tmpDir = "./" .temp_Name_In_Folder().folder_Create()
tmpFile = tmpDir.temp_Name_In_Folder().file_Create()
expect(tmpDir.folder_Delete_Recursive()).to.be.true
A neat synchronous version of rmdirSync.
/**
* use with try ... catch ...
*
* If you have permission to remove all file/dir
* and no race condition and no IO exception...
* then this should work
*
* uncomment the line
* if(!fs.exists(p)) return
* if you care the inital value of dir,
*
*/
var fs = require('fs')
var path = require('path')
function rmdirSync(dir,file){
var p = file? path.join(dir,file):dir;
// if(!fs.exists(p)) return
if(fs.lstatSync(p).isDirectory()){
fs.readdirSync(p).forEach(rmdirSync.bind(null,p))
fs.rmdirSync(p)
}
else fs.unlinkSync(p)
}
And a parallel IO, asynchronous version of rmdir. (faster)
/**
* NOTE:
*
* If there are no error, callback will only be called once.
*
* If there are multiple errors, callback will be called
* exactly as many time as errors occur.
*
* Sometimes, this behavior maybe useful, but users
* should be aware of this and handle errors in callback.
*
*/
var fs = require('fs')
var path = require('path')
function rmfile(dir, file, callback){
var p = path.join(dir, file)
fs.lstat(p, function(err, stat){
if(err) callback.call(null,err)
else if(stat.isDirectory()) rmdir(p, callback)
else fs.unlink(p, callback)
})
}
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ){
var i,j
for(i=j=files.length; i--; ){
rmfile(dir,files[i], function(err){
if(err) callback.call(null, err)
else if(--j === 0 ) fs.rmdir(dir,callback)
})
}
}
else fs.rmdir(dir, callback)
})
}
Anyway, if you want a sequential IO, and callback be called exactly once (either success or with first error encountered). Replace this rmdir with the above. (slower)
function rmdir(dir, callback){
fs.readdir(dir, function(err,files){
if(err) callback.call(null,err)
else if( files.length ) rmfile(dir, files[0], function(err){
if(err) callback.call(null,err)
else rmdir(dir, callback)
})
else fs.rmdir(dir, callback)
})
}
All of them depend ONLY on node.js and should be portable.
This post was getting the top answer from google but none of the answers gives a solution that:
doesn't make use of sync functions
doesn't require external libraries
doesn't use bash directly
Here is my async solution which doesn't assume anything else than node installed:
const fs = require('fs'); const path = require('path');
function rm(path){
return stat(path).then((_stat) => {
if(_stat.isDirectory()){
return ls(path)
.then((files) => Promise.all(files.map(file => rm(Path.join(path, file)))))
.then(() => removeEmptyFolder(path));
}else{
return removeFileOrLink(path);
} });
function removeEmptyFolder(path){
return new Promise((done, err) => {
fs.rmdir(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function removeFileOrLink(path){
return new Promise((done, err) => {
fs.unlink(path, function(error){
if(error){ return err(error); }
return done("ok");
});
}); }
function ls(path){
return new Promise((done, err) => {
fs.readdir(path, function (error, files) {
if(error) return err(error)
return done(files)
});
}); }
function stat(path){
return new Promise((done, err) => {
fs.stat(path, function (error, _stat) {
if(error){ return err(error); }
return done(_stat);
});
}); } }
Following on #geedew's answer.
Here is an asynchronous implementation of rm -r (ie you can pass a path to a file or directory). I'm not an experienced nodejs developer and appreciate any suggestions or constructive criticism.
var fs = require('fs');
function ResultsCollector (numResultsExpected, runWhenDone) {
this.numResultsExpected = numResultsExpected,
this.runWhenDone = runWhenDone;
this.numResults = 0;
this.errors = [];
this.report = function (err) {
if (err) this.errors.push(err);
this.numResults++;
if (this.numResults == this.numResultsExpected) {
if (this.errors.length > 0) return runWhenDone(this.errors);
else return runWhenDone();
}
};
}
function rmRasync(path, cb) {
fs.lstat(path, function(err, stats) {
if (err && err.code == 'ENOENT') return cb(); // doesn't exist, nothing to do
else if (err) {
return cb(err);
}
if (stats.isDirectory()) {
fs.readdir(path, function (err, files) {
if (err) return cb(err);
var resultsCollector = new ResultsCollector(files.length, function (err) {
if (err) return cb(err);
fs.rmdir(path, function (err) {
if (err) return cb(err);
return cb();
});
});
files.forEach(function (file) {
var filePath = path + '/' + file;
return rmRasync(filePath, function (err) {
return resultsCollector.report(err);
});
});
});
}
else { // file.
// delete file or link
fs.unlink(path, function (err) {
if (err) return cb(err);
return cb();
});
}
});
};
Invoke like so:
rmRasync('/path/to/some/file/or/dir', function (err) {
if (err) return console.error('Could not rm', err);
// else success
});
Surprisingly verbose and bad answers here...
To delete a non-empty directory on most systems:
import * as cp from 'child_process';
const dir = '/the/dir/to/remove';
const k = cp.spawn('bash');
k.stdin.end(`rm -rf "${dir}"`);
k.once('exit', code => {
// check the exit code
// now you are done
});
this will work on MacOS and Linux, but it might not work on some Windows OS.

How to watch symlink'ed files in node.js using watchFile()

I am trying to monitor a file that is (soft) symlink'ed with node.js' watchFile() with the following code:
var fs=require('fs')
, file= './somesymlink'
, config= {persist:true, interval:1};
fs.watchFile(file, config, function(curr, prev) {
if((curr.mtime+'')!=(prev.mtime+'')) {
console.log( file+' changed');
}
});
In the above code, ./somesymlink is a (soft) symlink to /path/to/the/actual/file.
When changes are made to the /path/to/the/actual/file, no event is fired. I have to replace the symlink with /path/to/the/actual/file to make it work. It seems to me that watchFile is not able to watch symlink'ed files. Of course I could make this work by using spawn+tail method but I prefer not to use that path as it would introduce more overhead.
So my question is how can I watch symlink'ed files in node.js using watchFile(). Thanks folks in advance.
You could use fs.readlink:
fs.readlink(file, function(err, realFile) {
if(!err) {
fs.watch(realFile, ... );
}
});
Of course, you could get fancier and write a little wrapper that can watch either the file or it's link, so you don't have to think about it.
UPDATE: Here's such a wrapper, for the future:
/** Helper for watchFile, also handling symlinks */
function watchFile(path, callback) {
// Check if it's a link
fs.lstat(path, function(err, stats) {
if(err) {
// Handle errors
return callback(err);
} else if(stats.isSymbolicLink()) {
// Read symlink
fs.readlink(path, function(err, realPath) {
// Handle errors
if(err) return callback(err);
// Watch the real file
fs.watch(realPath, callback);
});
} else {
// It's not a symlink, just watch it
fs.watch(path, callback);
}
});
}

Resources