Execute Multiple .js Files From Terminal In Sequence - node.js

I have about 100 JS files with each having different script.
file1.js , file2.js , file3.js , ... , file100.js
Right now, to execute each file I have to go to my terminal and do this:
node file1
node file2
node file3
.
.
node file100
That means I have to do this 100 times in the terminal. Is there a script to write in a JS file so I can execute ONLY ONE JS file that would execute all the 100 JS files in sequence?
I also want to give 3 seconds waiting between each execution which I believe I can achieve with the following code:
var interval = 3000;
var promise = Promise.resolve();
promise = promise.then(function () {
return new Promise(function (resolve) {
setTimeout(resolve, interval);
});
});
Any suggestion how to execute all the 100 JS files without typing node file# 100 times in the terminal?

here is a script that does that, you can also choose a different file format rather than
node file1
..
node filex
you can do this file1,file2,file3...filex,also look into child_process before using this code.
const { readFileSync } = require('fs');
const {exec} = require("child_process");
function executeEachFile(command){
exec(command, (error, stdout, stderr) => {
if(error){
console.log(error);
}
console.log("stdout: " + stdout)
});
}
function readFile(path){
const output = readFileSync(path,"utf8").split("\n").map(file=>{
return file.replace("\r","");
});
console.log(output)
return output
};
function IntervalLoop(ArrayofFiles){
let counter = 0;
const interval = setInterval(()=>{
const commandName = "node "+ ArrayofFiles[counter];
console.log(commandName);
executeEachFile(commandName);
counter++;
console.log(counter);
if (ArrayofFiles.length == counter){
clearInterval(interval);
}
},300)
}
const fileNames = readFile("exec.txt");
IntervalLoop(fileNames);

Related

Listening to Mac pbcopy events in node

On Mac OS using Node, I would like to:
Copy a code snippet from Stack Overflow e.g. console.log('hello world')
Access code snippet console.log('hello world') in running Node program
This is how I imagined it would work
var child_process = require('child_process');
var child = child_process.spawn('pbcopy');
child.stdin.on('data', function (data) {
// runs every time I copy something to my clipboard
console.log('data' + data);
});
This doesn't work. Nor does 'readable' event.
I have seen this example to write to the clipboard using pbcopy https://stackoverflow.com/a/13735363/3893510
Thanks for your help
Edit: I have a very crude way to do it using node copy-paste and a timeout
var ncp = require("copy-paste");
const listOfCopiedItems = []
function trackAllCopiedText (prev = "") {
const text = ncp.paste()
if (prev !== text){
listOfCopiedItems.push({
text,
dateTime: new Date()
})
}
setTimeout(() => {
trackAllCopiedText(text)
}, 1000);
}
trackAllCopiedText()

Run all js files sequentially with console log

I need to execute some js files one after another, those js files are printing important information in the console.log.
I'm currently trying to use the following code, but it is not showing the console.log for sub js files.
How can I get this approach to work?
const fs = require('fs')
const exec = require('child_process').exec
const async = require('async') // npm install async
const path = require('path');
const scriptsFolder = path.resolve("./") + "\\" named scripts
const files = fs.readdirSync(scriptsFolder)
const targetFiles = files.filter(function (file) {
return path.extname(file).toLowerCase() === '.js';
});
const funcs = targetFiles.map(function (file) {
return exec.bind(null, `node ${scriptsFolder}${file}`)
})
function getResults(err, data) {
if (err) {
return console.log(err)
}
const results = data.map(function (lines) {
return lines.join('')
})
console.log(results)
}
async.series(funcs, getResults)
As there is no interaction between the js files, I just use a batch script to run all of them sequentially as below
for /R %%f in (*.js) do if not %%f==%~dpnx0 node "%%f"
Why do you execute each js file in seperate process?
Try to require the files one after the other, with no child_process
Check this answer for great example of dynamic require of folder.
You can consider running the scripts using execSync and use the stdio: "inherit" option to direct output form each script to the parent process:
const files = fs.readdirSync(scriptsFolder)
const targetFiles = files.filter(function (file) {
return path.extname(file).toLowerCase() === '.js';
});
for (let file of targetFiles) {
execSync(`node ${scriptsFolder}/${file}`, {
stdio: 'inherit'
});
}

Reading node child process (spawn) stdout with line breaks

I'm trying to run a child process in node.js with
function runProcess (args) {
args = args || [];
var proc = spawn('node', [path.join(__dirname, '..', 'bin', 'library.js')].concat(args), {cwd: __dirname});
proc.stdout.setEncoding('utf8');
return proc;
}
I have a test in which I use the function above:
describe('test', function () {
it('should list installed generators', function (done) {
var process = runProcess();
var data = '';
process.stdout.on('data', function(data) {
var str = data.toString(), lines = str.split(/(\r?\n)/g);
for (var i=0; i<lines.length; i++) {
console.log("chucnk ---------> " + lines[i]); // only prints one line
}
});
process.on('close', function (code) {
code.should.equal(0);
data.should.match(/\[process\] ├── bad/);
data.should.match(/\[process\] └── test/);
done();
});
});
The process is using chalk and a file called log.js to display the console output:
var chalk = require('chalk');
module.exports = function(){
'use strict';
var sig = '['+chalk.green('process')+']';
var args = Array.prototype.slice.call(arguments);
args.unshift(sig);
console.log.apply(console, args); // Invoked 3 times!
return this;
};
If I run the process manually I can see the expected output:
[process] Installed generators
[process] ├── bad
[process] └── test
but when I run it on the unit test the lines 2 and 3 are missing
chucnk ---------> [process] Installed generators
chucnk --------->
chucnk --------->
I tried the proposed at NodeJS spawn stdout string format with no luck.
Do you know why I can't read the lines after the first one?
This worked for me:
child.stdout.on('data', (data) => {
process.stdout.write(data);
});
but I think you said you tried this already

Read newest csv records with Node

I'd like to watch a CSV file and get the newest records since it was changed. I'm running the following shell command to build a very simple csv file and append a new line every second:
rm test.csv & x=0 && while true; do echo "${x},${x},${x}" >> test.csv; x=$(($x+1)); sleep 1; done
The following code prints all the records of the file until the first change and then just emits the dashed line, as if it's not re-reading the file:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
csv = require('csv');
var parser = csv.parse({delimiter: ','}, function(err, data){
console.log(data);
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
function fileChange(e, fn){
if (e) console.error(e)
fs.createReadStream(dataFile).pipe(parser);
console.log('-------')
}
Shouldn't the fileChange function re-read the file on every change? My ultimate plan here is to get both the previous array of lines and the current one and use lodash's difference function to return only the differences. If there's better way, I'm open to hear it though.
My guess is that fs.createReadStream() has opened the file and it's not being closed. So on the second event fs.createReadStream() fails. No bueno.
Try using fs.readFile() instead like this:
function fileChange(e, fn){
if (e) console.error(e)
fs.readFile(dataFile, function (err, data) {
if (err) throw err;
console.log(data);
console.log('-------')
});
};
See the documentation here: http://nodejs.org/api/fs.html#fs_fs_readfile_filename_options_callback
I ended up solving the issue by stating the file on change, and reading the difference in size to the stream data:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
readSize = 0,
csv = require('csv');
var parser = csv.parse();
parser.on('readable', function(data){
var record;
while(record = parser.read()){
console.log(record);
}
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
// fires when the watched file changes
function fileChange(e, fn){
// get these syncronously
var stats = fs.statSync(dataFile);
// if it's smaller, wait half a second
if (stats.size <= readSize) {
setTimeout(fileChange, 500);
}
// read the stream offset
var stream = fs.createReadStream(dataFile, {start: readSize, end: stats.size});
stream.on('data', function(chunk){
parser.write(chunk.toString());
});
readSize = stats.size;
}
Any feedback on why this may not work would be appreciated.

Piping multiple file streams using Node.js

I want to stream multiple files, one after each other, to the browser. To illustrate, think of having multiple CSS files which shall be delivered concatenated as one.
The code I am using is:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); } // (1)
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); } // (2)
var stream = fs.createReadStream(currentFile).on('end', function () {
callback(); // (3)
});
stream.pipe(res, { end: false }); // (4)
});
}, function () {
res.end(); // (5)
});
});
The idea is that I
filter out all files that do not have the file extension .css.
filter out all directories.
proceed with the next file once a file has been read completely.
pipe each file to the response stream without closing it.
end the response stream once all files have been piped.
The problem is that only the first .css file gets piped, and all remaining files are missing. It's as if (3) would directly jump to (5) after the first (4).
The interesting thing is that if I replace line (4) with
stream.on('data', function (data) {
console.log(data.toString('utf8'));
});
everything works as expected: I see multiple files. If I then change this code to
stream.on('data', function (data) {
res.write(data.toString('utf8'));
});
all files expect the first are missing again.
What am I doing wrong?
PS: The error happens using Node.js 0.8.7 as well as using 0.8.22.
UPDATE
Okay, it works if you change the code as follows:
var directory = path.join(__dirname, 'css');
fs.readdir(directory, function (err, files) {
var concatenated = '';
async.eachSeries(files, function (file, callback) {
if (!endsWith(file, '.css')) { return callback(); }
var currentFile = path.join(directory, file);
fs.stat(currentFile, function (err, stats) {
if (stats.isDirectory()) { return callback(); }
var stream = fs.createReadStream(currentFile).on('end', function () {
callback();
}).on('data', function (data) { concatenated += data.toString('utf8'); });
});
}, function () {
res.write(concatenated);
res.end();
});
});
But: Why? Why can't I call res.write multiple times instead of first summing up all the chunks, and then write them all at once?
Consider also using multistream, that allows you to combine and emit multiple streams one after another.
The code was perfectly fine, it was the unit test that was wrong ...
Fixed that, and now it works like a charme :-)
May help someone else:
const fs = require("fs");
const pth = require("path");
let readerStream1 = fs.createReadStream(pth.join(__dirname, "a.txt"));
let readerStream2 = fs.createReadStream(pth.join(__dirname, "b.txt"));
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//only readable streams have "pipe" method
readerStream1.pipe(writerStream);
readerStream2.pipe(writerStream);
I also checked Rocco's answer and its working like a charm:
//npm i --save multistream
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
let streams = [
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
];
let writerStream = fs.createWriteStream(pth.join(__dirname, "c.txt"));
//new multi(streams).pipe(process.stdout);
new multi(streams).pipe(writerStream);
and to send the results to client:
const multi = require('multistream');
const fs = require('fs');
const pth = require("path");
const exp = require("express");
const app = exp();
app.listen(3000);
app.get("/stream", (q, r) => {
new multi([
fs.createReadStream(pth.join(__dirname, "a.txt")),
fs.createReadStream(pth.join(__dirname, "b.txt"))
]).pipe(r);
});

Resources