Run all js files sequentially with console log - node.js

I need to execute some js files one after another, those js files are printing important information in the console.log.
I'm currently trying to use the following code, but it is not showing the console.log for sub js files.
How can I get this approach to work?
const fs = require('fs')
const exec = require('child_process').exec
const async = require('async') // npm install async
const path = require('path');
const scriptsFolder = path.resolve("./") + "\\" named scripts
const files = fs.readdirSync(scriptsFolder)
const targetFiles = files.filter(function (file) {
return path.extname(file).toLowerCase() === '.js';
});
const funcs = targetFiles.map(function (file) {
return exec.bind(null, `node ${scriptsFolder}${file}`)
})
function getResults(err, data) {
if (err) {
return console.log(err)
}
const results = data.map(function (lines) {
return lines.join('')
})
console.log(results)
}
async.series(funcs, getResults)

As there is no interaction between the js files, I just use a batch script to run all of them sequentially as below
for /R %%f in (*.js) do if not %%f==%~dpnx0 node "%%f"

Why do you execute each js file in seperate process?
Try to require the files one after the other, with no child_process
Check this answer for great example of dynamic require of folder.

You can consider running the scripts using execSync and use the stdio: "inherit" option to direct output form each script to the parent process:
const files = fs.readdirSync(scriptsFolder)
const targetFiles = files.filter(function (file) {
return path.extname(file).toLowerCase() === '.js';
});
for (let file of targetFiles) {
execSync(`node ${scriptsFolder}/${file}`, {
stdio: 'inherit'
});
}

Related

NodeJS App with PM2 and daily changed json file

I currently have a NodeJS app running in PM2. The app displays data from a JSON file. The data is retrieved daily at 12 o'clock via a CURL command via system cron. In order for the data to load in the frontend I always have to execute a "pm2 reload ...". Which is quite annoying to me.
Anybody have an idea how I can solve the problem in the most elegant way? I have not worked with PM2 yet so my stupid question :)
Best
Not really PM2 related. When the JSON file changes you have to re-read the file into memory by importing/requiring it again. Use fs.watch to watch for file changes or add a built-in timer to reread file after midnight.
I am sorry for not answering. I have two possible solutions. I myself chose version 2 because there I do not need external cronjobs.
Option-1:
I call an API with CURL on host on midnight and write it to a JSON file. I read the file with fs.promises and watch with "chokidar" about file changes.
const fs = require('fs').promises,
path = require('path'),
chokidar = require('chokidar'),
filePath = path.join(__dirname, '../<PATH_TO_FILE/<FILE>.json');
watcher = chokidar.watch(filePath, { persistent: true });
watcher.on('change', () => {
getJSONData();
});
const getJSONData = async () => {
try {
let getFileData = await fs.readFile(filePath, { encoding: 'utf-8' });
return (erg = JSON.parse(getFileData));
} catch (error) {
console.log('Error when call json file.');
}
};
Option-2:
I call the API inside a function with the node_module "node-schedule".
require('dotenv').config();
const { <API_SERVICE_MODULE> } = require('<API_SERVICE_MODULE>'),
schedule = require('node-schedule'),
API_KEY = process.env.API_KEY,
LABEL = process.env.LABEL;
const API = new <API_SERVICE_MODULE>({
projectId: API_KEY,
});
const getDataFromAPI = async () => {
try {
const data = await API.<METADATA>(LABEL);
return data;
} catch (err) {
return 'Error with fetching data from API';
}
};
schedule.scheduleJob('*/10 * * * *', () => {
getDataFromAPI();
});
FYI: Both ways working with PM2

Execute Multiple .js Files From Terminal In Sequence

I have about 100 JS files with each having different script.
file1.js , file2.js , file3.js , ... , file100.js
Right now, to execute each file I have to go to my terminal and do this:
node file1
node file2
node file3
.
.
node file100
That means I have to do this 100 times in the terminal. Is there a script to write in a JS file so I can execute ONLY ONE JS file that would execute all the 100 JS files in sequence?
I also want to give 3 seconds waiting between each execution which I believe I can achieve with the following code:
var interval = 3000;
var promise = Promise.resolve();
promise = promise.then(function () {
return new Promise(function (resolve) {
setTimeout(resolve, interval);
});
});
Any suggestion how to execute all the 100 JS files without typing node file# 100 times in the terminal?
here is a script that does that, you can also choose a different file format rather than
node file1
..
node filex
you can do this file1,file2,file3...filex,also look into child_process before using this code.
const { readFileSync } = require('fs');
const {exec} = require("child_process");
function executeEachFile(command){
exec(command, (error, stdout, stderr) => {
if(error){
console.log(error);
}
console.log("stdout: " + stdout)
});
}
function readFile(path){
const output = readFileSync(path,"utf8").split("\n").map(file=>{
return file.replace("\r","");
});
console.log(output)
return output
};
function IntervalLoop(ArrayofFiles){
let counter = 0;
const interval = setInterval(()=>{
const commandName = "node "+ ArrayofFiles[counter];
console.log(commandName);
executeEachFile(commandName);
counter++;
console.log(counter);
if (ArrayofFiles.length == counter){
clearInterval(interval);
}
},300)
}
const fileNames = readFile("exec.txt");
IntervalLoop(fileNames);

Node.JS redirect eval output to string

Can I redirect nodejs eval output to string or any other way?
edit: I need to return it as string as response for my web application (post data)
var http = require('http');
var server = http.createServer(function(req, res) {
var script = '';
if (req.method === 'POST' && req.url === '/doeval') {
req.on('data', function(chunk) {
script += chunk;
});
req.on('end', function() {
// script = 'console.log("aaaa")';
var result = eval(script);
res.write(String(result));
res.end();
});
}
}
}
server.listen(80);
output:
result: undefined
expected:
result: aaaa
Note: Running untrusted code is extremely dangerous and you should be extremely careful with whatever solution you are choosing. The one proposed here has flaws as well. To be safer, consider using a container, such as docker or lxc.
Don't use eval in this case. It's the wrong tool for the job. Do you really want arbitrary code to be evaled in your webserver context? What if the sent code is throw new Error('haha') or process.exit()? It's a huge security risk! Write the code to a temporary file, spawn a node process that executes the file and read its output. See https://nodejs.org/api/child_process.html .
Example:
var http = require('http');
var fs = require('fs');
var childProcess = require('child_process');
var server = http.createServer(function(req, res) {
var script = '';
if (req.method === 'POST' && req.url === '/doeval') {
req.on('data', function(chunk) {
script += chunk;
});
req.on('end', function() {
var tempFileName = Date.now() + '.js';
fs.writeFile(tempFileName, script, function(err) {
if (err) {
// handle error
}
child_process.execFile('node', [tempFileName], function(err, stdout, stderr) {
if (err) {
// handle err
}
// Delete tempfile...
res.write(stdout);
res.end();
});
});
});
}
}
}
server.listen(80);
There are existing npm packages that help creating and cleaning up temporary files. Also have a look at other methods of child_process.
But, this is not secure yet because the subprocess will run with the same privileges as your server (which by the looks of it runs as root :-/ ).
You should set the owner (and the group) of the file and the subprocess to nobody or some other system user that basically doesn't have any rights to access anything. Or chroot the subprocess.
Note: It was downvoted but read the entire answer before jumping to conclusions
First of all this looks like a completely insecure functionality that can potentially open your system to countless vulnerabilities. But it's an interesting question so I'll answer how you can do what you ask for, but I strongly suggest to reconsider your requirements nonetheless.
That having been said, you could pass a fake console object to you evaluated script, by wrapping it in a closure in a similar way like modules are wrapped when they are required.
Instead of eval you can use the vm module to run the script in a separate V8 context with no access file system or even require().
Note that I don't recommend saving it in a file and running it as a child process because that way the script will have access to your file system which is a serious vulnerability. The only option I would ever consider running untrusted code as a standalone process would be inside of a container that has no access to the network or any shared storage outside of that container.
With eval
For example:
const util = require('util');
const script = 'console.log("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
console.log('result:', result);
This will work if everything is synchronous. If the console.log happens asynchronously then you will have to add some way of waiting for the changes.
So this will not work:
const util = require('util');
const script = 'setTimeout(() => console.log("aaaa"), 1000);';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
console.log('result:', result);
but this will:
const util = require('util');
const script = 'setTimeout(() => console.log("aaaa"), 1000);';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
eval(`((console) => { ${script} })`)(cons);
setTimeout(() => console.log('result:', result), 1500);
because it waits before inspecting the collected output longer than it takes the evaluated code to create that output.
Without eval
You can run that code in a separate V8 context that has no access to other modules, file system, etc. For example:
const vm = require('vm');
const util = require('util');
const script = 'console.log("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
const context = vm.createContext({console: cons});
vm.runInContext(script, context);
console.log('result:', result);
Handling syntax errors
You can handle syntax errors to make sure that this script will not crash your application like this:
const vm = require('vm');
const util = require('util');
const script = 'console.lo("aaaa");';
let result = '';
const cons = {
log: (...args) => result += (util.format(...args) + '\n'),
};
const context = vm.createContext({console: cons});
try {
vm.runInContext(script, context);
console.log('result:', result);
} catch (err) {
console.log('error:', err.message);
}
Now instead of crashing it will print:
error: console.lo is not a function

Remove directory which is not empty

In my Node application I need to remove a directory which has some files, but fs.rmdir only works on empty directories. How can I do this?
As of Node.js 14.14.0, the recommended way is to use fs.rmSync:
fs.rmSync(dir, { recursive: true, force: true });
There is a module for this called rimraf (https://npmjs.org/package/rimraf). It provides the same functionality as rm -Rf
Async usage:
var rimraf = require("rimraf");
rimraf("/some/directory", function () { console.log("done"); });
Sync usage:
rimraf.sync("/some/directory");
To remove folder synchronously
const fs = require('fs');
const Path = require('path');
const deleteFolderRecursive = function (directoryPath) {
if (fs.existsSync(directoryPath)) {
fs.readdirSync(directoryPath).forEach((file, index) => {
const curPath = path.join(directoryPath, file);
if (fs.lstatSync(curPath).isDirectory()) {
// recurse
deleteFolderRecursive(curPath);
} else {
// delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(directoryPath);
}
};
Most of the people using fs with Node.js would like functions close to the "Unix way" of dealing with files. I'm using fs-extra to bring all the cool stuff :
fs-extra contains methods that aren't included in the vanilla Node.js
fs package. Such as mkdir -p, cp -r, and rm -rf.
Even better, fs-extra is a drop in replacement for native fs. All methods in fs are unmodified and attached to it.
It means that you can replace fs by fs-extra :
// this can be replaced
const fs = require('fs')
// by this
const fs = require('fs-extra')
And then you can remove a folder this way:
fs.removeSync('/tmp/myFolder');
//or
fs.remove('/tmp/myFolder', callback);
As of Node v14 (October 2020), the fs module has fs.rm and rs.rmSync that support recursive, non-empty directory unlinking:
https://nodejs.org/docs/latest-v14.x/api/fs.html#fs_fs_rm_path_options_callback
So you can now do something like this:
const fs = require('fs');
fs.rm('/path/to/delete', { recursive: true }, () => console.log('done'));
or:
const fs = require('fs');
fs.rmSync('/path/to/delete', { recursive: true });
console.log('done');
My modified answer from #oconnecp (https://stackoverflow.com/a/25069828/3027390)
Uses path.join for better cross-platform experience.
So, don't forget to require it.
var path = require('path');
Also renamed function to rimraf ;)
/**
* Remove directory recursively
* #param {string} dir_path
* #see https://stackoverflow.com/a/42505874/3027390
*/
function rimraf(dir_path) {
if (fs.existsSync(dir_path)) {
fs.readdirSync(dir_path).forEach(function(entry) {
var entry_path = path.join(dir_path, entry);
if (fs.lstatSync(entry_path).isDirectory()) {
rimraf(entry_path);
} else {
fs.unlinkSync(entry_path);
}
});
fs.rmdirSync(dir_path);
}
}
I don't usually resurrect old threads but there is a lot on churn here and sans the rimraf answer these all seem overly complicated to me.
First in modern Node (>= v8.0.0) you can simplify the process using only node core modules, fully asynchronous, and parallelize the unlinking of files concurrently all in a function of five lines and still keep readability:
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const readdir = promisify(fs.readdir);
const rmdir = promisify(fs.rmdir);
const unlink = promisify(fs.unlink);
exports.rmdirs = async function rmdirs(dir) {
let entries = await readdir(dir, { withFileTypes: true });
await Promise.all(entries.map(entry => {
let fullPath = path.join(dir, entry.name);
return entry.isDirectory() ? rmdirs(fullPath) : unlink(fullPath);
}));
await rmdir(dir);
};
On another note a guard for path traversal attacks is inappropriate for this function because
It is out of scope based on the Single Responsibility Principle.
Should be handled by the caller not this function. This is akin to the command-line rm -rf in that it takes an argument and will allow the user to rm -rf / if asked to. It would be the responsibility of a script to guard not the rm program itself.
This function would be unable to determine such an attack since it does not have a frame of reference. Again that is the responsibility of the caller who would have the context of intent which would provide it a reference to compare the path traversal.
Sym-links are not a concern as .isDirectory() is false for sym-links and are unlinked not recursed into.
Last but not least, there is a rare race condition that the recursion could error if one of the entries was unlinked or deleted outside this script at just the right time while this recursion is running. Since this scenario is not typical in most environments it can likely be overlooked. However, if required (for some edge cases) this issue can be mitigated with this slightly more complex example:
exports.rmdirs = async function rmdirs(dir) {
let entries = await readdir(dir, { withFileTypes: true });
let results = await Promise.all(entries.map(entry => {
let fullPath = path.join(dir, entry.name);
let task = entry.isDirectory() ? rmdirs(fullPath) : unlink(fullPath);
return task.catch(error => ({ error }));
}));
results.forEach(result => {
// Ignore missing files/directories; bail on other errors
if (result && result.error.code !== 'ENOENT') throw result.error;
});
await rmdir(dir);
};
EDIT: Make isDirectory() a function. Remove the actual directory at the end. Fix missing recursion.
Here is an async version of #SharpCoder's answer
const fs = require('fs');
const path = require('path');
function deleteFile(dir, file) {
return new Promise(function (resolve, reject) {
var filePath = path.join(dir, file);
fs.lstat(filePath, function (err, stats) {
if (err) {
return reject(err);
}
if (stats.isDirectory()) {
resolve(deleteDirectory(filePath));
} else {
fs.unlink(filePath, function (err) {
if (err) {
return reject(err);
}
resolve();
});
}
});
});
};
function deleteDirectory(dir) {
return new Promise(function (resolve, reject) {
fs.access(dir, function (err) {
if (err) {
return reject(err);
}
fs.readdir(dir, function (err, files) {
if (err) {
return reject(err);
}
Promise.all(files.map(function (file) {
return deleteFile(dir, file);
})).then(function () {
fs.rmdir(dir, function (err) {
if (err) {
return reject(err);
}
resolve();
});
}).catch(reject);
});
});
});
};
From Node docs, as one can see here.
To get a behavior similar to the rm -rf Unix command, use fs.rm() with options { recursive: true, force: true }.
For Example (ESM)
import { rm } from 'node:fs/promises';
await rm('/path/to', { recursive: true, force: true });
I wrote this function called remove folder. It will recursively remove all the files and folders in a location. The only package it requires is async.
var async = require('async');
function removeFolder(location, next) {
fs.readdir(location, function (err, files) {
async.each(files, function (file, cb) {
file = location + '/' + file
fs.stat(file, function (err, stat) {
if (err) {
return cb(err);
}
if (stat.isDirectory()) {
removeFolder(file, cb);
} else {
fs.unlink(file, function (err) {
if (err) {
return cb(err);
}
return cb();
})
}
})
}, function (err) {
if (err) return next(err)
fs.rmdir(location, function (err) {
return next(err)
})
})
})
}
[EDIT: using node.js v15.5.0]
Having just tried using some of the solutions posted here, I encountered the following deprecation warning:
(node:13202) [DEP0147] DeprecationWarning: In future versions of
Node.js, fs.rmdir(path, { recursive: true }) will throw if path does
not exist or is a file. Use fs.rm(path, { recursive: true, force: true
}) instead
fs.rm(path, { recursive: true, force: true }); works nicely, with fs.rmSync(path, { recursive: true, force: true }); if you want to use the blocking version.
If you are using node 8+ want asyncronicity and don't want external dependencies, here is the async/await version:
const path = require('path');
const fs = require('fs');
const util = require('util');
const readdir = util.promisify(fs.readdir);
const lstat = util.promisify(fs.lstat);
const unlink = util.promisify(fs.unlink);
const rmdir = util.promisify(fs.rmdir);
const removeDir = async (dir) => {
try {
const files = await readdir(dir);
await Promise.all(files.map(async (file) => {
try {
const p = path.join(dir, file);
const stat = await lstat(p);
if (stat.isDirectory()) {
await removeDir(p);
} else {
await unlink(p);
console.log(`Removed file ${p}`);
}
} catch (err) {
console.error(err);
}
}))
await rmdir(dir);
console.log(`Removed dir ${dir}`);
} catch (err) {
console.error(err);
}
}
const fs = require("fs");
fs.rmdir("./test", { recursive: true }, (err) => {
if (err) {
console.error(err);
}
});
Provide the recursive: true option. And it will recursively delete all the files and directories of the given path. (Assuming test is directory present at root. )
2020 Update
From version 12.10.0 recursiveOption has been added for options.
Note that recursive deletion is experimental.
So you would do for sync:
fs.rmdirSync(dir, {recursive: true});
or for async:
fs.rmdir(dir, {recursive: true});
According to the fs documentation, fsPromises currently provides the recursive option on an experimental basis, which, at least in my own case on Windows, removes the directory and any files therein.
fsPromises.rmdir(path, {
recursive: true
})
Does recursive: true remove the files on Linux and MacOS?
Explanations
As of Node.js v14, we can now use the require("fs").promises.rm function to remove a file using a promise. The first argument will be the file or folder to remove (even non-existant ones). You can use the recursive and force options in the second argument's object to mimic the behavior of the rm Shell command utility with the -rf options.
Example
"use strict";
require("fs").promises.rm("directory", {recursive: true, force: true}).then(() => {
console.log("removed");
}).catch(error => {
console.error(error.message);
});
See
Node.js v14 Documentation
Mozilla Developer Promises Documentation
rm command manual page
I reached here while trying to get over with the gulp and I'm writing for further reaches.
gulp-clean deprecated for gulp-rimraf
gulp-rimraf deprecated in favor of delete-files-folders
When you want to delete files and folders using del, you should append /** for recursive deletion.
gulp.task('clean', function () {
return del(['some/path/to/delete/**']);
});
In the latest version of Node.js (12.10.0 or later), the rmdir style functions fs.rmdir(), fs.rmdirSync(), and fs.promises.rmdir() have a new experimental option recursive that allows deleting non-empty directories, e.g.
fs.rmdir(path, { recursive: true });
The related PR on GitHub: https://github.com/nodejs/node/pull/29168
Async version of #SharpCoder's answer using fs.promises:
const fs = require('fs');
const afs = fs.promises;
const deleteFolderRecursive = async path => {
if (fs.existsSync(path)) {
for (let entry of await afs.readdir(path)) {
const curPath = path + "/" + entry;
if ((await afs.lstat(curPath)).isDirectory())
await deleteFolderRecursive(curPath);
else await afs.unlink(curPath);
}
await afs.rmdir(path);
}
};
return new Promise((resolve, reject) => {
const fs = require("fs");
// directory path
const dir = "your/dir";
// delete directory recursively <------
fs.rmdir(dir, { recursive: true }, (err) => {
if (err) {
reject(err);
}
resolve(`${dir} is deleted!`);
});
});
If you prefer async/await, you can use the fs/promises API.
const fs = require('fs/promises');
const removeDir = async (dirPath) => {
await fs.rm(dirPath, {recursive: true});
}
If you know the path of a single file in the folder and wish to delete the folder where that file is contained.
const fs = require('fs/promises');
const path = require('path');
const removeDir = async (filePath) => {
const { dir } = path.parse(filePath);
await fs.rm(dir, { recursive: true });
}
Just use rmdir module! it's easy and simple.
A quick and dirty way (maybe for testing) could be to directly use the exec or spawn method to invoke OS call to remove the directory. Read more on NodeJs child_process.
let exec = require('child_process').exec
exec('rm -Rf /tmp/*.zip', callback)
Downsides are:
You are depending on underlying OS i.e. the same method would run in unix/linux but probably not in windows.
You cannot hijack the process on conditions or errors. You just give the task to underlying OS and wait for the exit code to be returned.
Benefits:
These processes can run asynchronously.
You can listen for the output/error of the command, hence command output is not lost. If operation is not completed, you can check the error code and retry.
The de facto package is rimraf, but here is my tiny async version:
const fs = require('fs')
const path = require('path')
const Q = require('q')
function rmdir (dir) {
return Q.nfcall(fs.access, dir, fs.constants.W_OK)
.then(() => {
return Q.nfcall(fs.readdir, dir)
.then(files => files.reduce((pre, f) => pre.then(() => {
var sub = path.join(dir, f)
return Q.nfcall(fs.lstat, sub).then(stat => {
if (stat.isDirectory()) return rmdir(sub)
return Q.nfcall(fs.unlink, sub)
})
}), Q()))
})
.then(() => Q.nfcall(fs.rmdir, dir))
}
In an asynchronous approach , to delete a non empty dir using the
rmdir(path,{recursive:true,force:true}
rm(path,{recursive:true,force:true}
will work
A code snippet:
const fsp = require("fs/promises");
deleteDirRecursively("./b");
removeRecursively("./BCD/b+");
async function deleteDirRecursively(dirPath) {
try {
// fsPromises.rmdir() on a file (not a directory) results in the promise being rejected
// with an ENOENT error on Windows and an ENOTDIR error on POSIX.
// To get a behavior similar to the rm -rf Unix command,
// use fsPromises.rm() with options { recursive: true, force: true }.
//will not thorw error if dir is empty
//will thow error if dir is not present
await fsp.rmdir(dirPath, { recursive: true, force: true });
console.log(dirPath, "deleted successfully");
} catch (err) {
console.log(err);
}
async function removeRecursively(path) {
try {
//has ability to remove both file and dir
//can delete dir recursively and forcefully
//will delete an empty dir.
//will remove all the contents of a dir.
// the only difference between rmdir and rm is that rmdir can only delete dir's
await fsp.rm(path, { recursive: true, force: true });
console.log(path, "deleted successfully");
} catch (err) {
console.log(err);
}
}
Ultra-speed and fail-proof
You can use the lignator package (https://www.npmjs.com/package/lignator), it's faster than any async code (e.g. rimraf) and more fail-proof (especially in Windows, where file removal is not instantaneous and files might be locked by other processes).
4,36 GB of data, 28 042 files, 4 217 folders on Windows removed in 15 seconds vs rimraf's 60 seconds on old HDD.
const lignator = require('lignator');
lignator.remove('./build/');
Sync folder remove with the files or only a file.
I am not much of a giver nor a contributor but I couldn't find a good solution of this problem and I had to find my way... so I hope you'll like it :)
Works perfect for me with any number of nested directories and sub directories. Caution for the scope of 'this' when recursing the function, your implementation may be different. In my case this function stays into the return of another function that's why I am calling it with this.
const fs = require('fs');
deleteFileOrDir(path, pathTemp = false){
if (fs.existsSync(path)) {
if (fs.lstatSync(path).isDirectory()) {
var files = fs.readdirSync(path);
if (!files.length) return fs.rmdirSync(path);
for (var file in files) {
var currentPath = path + "/" + files[file];
if (!fs.existsSync(currentPath)) continue;
if (fs.lstatSync(currentPath).isFile()) {
fs.unlinkSync(currentPath);
continue;
}
if (fs.lstatSync(currentPath).isDirectory() && !fs.readdirSync(currentPath).length) {
fs.rmdirSync(currentPath);
} else {
this.deleteFileOrDir(currentPath, path);
}
}
this.deleteFileOrDir(path);
} else {
fs.unlinkSync(path);
}
}
if (pathTemp) this.deleteFileOrDir(pathTemp);
}
2020 Answer
If you want to do it in a npm script, You DON'T need to previously install any 3rd party package if you use the command npx
If for example, you want to delete the folders dist and .cache when you run npm run clean then just add this command to your package.json
{
"scripts": {
"clean": "npx rimraf dist .cache"
}
}
It will work in any operative system
Another alternative is using the fs-promise module that provides promisified versions of the fs-extra modules
you could then write like this example:
const { remove, mkdirp, writeFile, readFile } = require('fs-promise')
const { join, dirname } = require('path')
async function createAndRemove() {
const content = 'Hello World!'
const root = join(__dirname, 'foo')
const file = join(root, 'bar', 'baz', 'hello.txt')
await mkdirp(dirname(file))
await writeFile(file, content)
console.log(await readFile(file, 'utf-8'))
await remove(join(__dirname, 'foo'))
}
createAndRemove().catch(console.error)
note: async/await requires a recent nodejs version (7.6+)
I wish there was a way to do this without additional modules for something so minuscule and common, but this is the best I could come up with.
Update:
Should now work on Windows (tested Windows 10), and should also work on Linux/Unix/BSD/Mac systems.
const
execSync = require("child_process").execSync,
fs = require("fs"),
os = require("os");
let removeDirCmd, theDir;
removeDirCmd = os.platform() === 'win32' ? "rmdir /s /q " : "rm -rf ";
theDir = __dirname + "/../web-ui/css/";
// WARNING: Do not specify a single file as the windows rmdir command will error.
if (fs.existsSync(theDir)) {
console.log(' removing the ' + theDir + ' directory.');
execSync(removeDirCmd + '"' + theDir + '"', function (err) {
console.log(err);
});
}

Piping multiple file streams using Node.js

I want to stream multiple files, one after each other, to the browser. To illustrate, think of having multiple CSS files which shall be delivered concatenated as one.
The code I am using is:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); } // (1)
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); } // (2)
var stream = fs.createReadStream(currentFile).on('end', function () {
callback(); // (3)
});
stream.pipe(res, { end: false }); // (4)
});
}, function () {
res.end(); // (5)
});
});
The idea is that I
filter out all files that do not have the file extension .css.
filter out all directories.
proceed with the next file once a file has been read completely.
pipe each file to the response stream without closing it.
end the response stream once all files have been piped.
The problem is that only the first .css file gets piped, and all remaining files are missing. It's as if (3) would directly jump to (5) after the first (4).
The interesting thing is that if I replace line (4) with
stream.on('data', function (data) {
console.log(data.toString('utf8'));
});
everything works as expected: I see multiple files. If I then change this code to
stream.on('data', function (data) {
res.write(data.toString('utf8'));
});
all files expect the first are missing again.
What am I doing wrong?
PS: The error happens using Node.js 0.8.7 as well as using 0.8.22.
UPDATE
Okay, it works if you change the code as follows:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
var concatenated = '';
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); }
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); }
var stream = fs.createReadStream(currentFile).on('end', function () {
callback();
}).on('data', function (data) { concatenated += data.toString('utf8'); });
});
}, function () {
res.write(concatenated);
res.end();
});
});
But: Why? Why can't I call res.write multiple times instead of first summing up all the chunks, and then write them all at once?
Consider also using multistream, that allows you to combine and emit multiple streams one after another.
The code was perfectly fine, it was the unit test that was wrong ...
Fixed that, and now it works like a charme :-)
May help someone else:
const fs = require("fs");
const pth = require("path");
let readerStream1 = fs.createReadStream(pth.join(__dirname, "a.txt"));
let readerStream2 = fs.createReadStream(pth.join(__dirname, "b.txt"));
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//only readable streams have "pipe" method
readerStream1.pipe(writerStream);
readerStream2.pipe(writerStream);
I also checked Rocco's answer and its working like a charm:
//npm i --save multistream
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
let streams = [
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
];
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//new multi(streams).pipe(process.stdout);
new multi(streams).pipe(writerStream);
and to send the results to client:
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
const exp = require("express");
const app = exp();
app.listen(3000);
app.get("/stream", (q, r) => {
new multi([
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
]).pipe(r);
});

Resources