Load random file from directory - node.js

So i'm building a game and i want to choose a random file from a directory as a map. I found this old topic which gave the answer
const randomFile = require('random-file')
const dir = '/tmp/whatever'
randomFile(dir, (err, file) => {
console.log(`The random file is: ${file}.`)
})
but it seems that fs is no longer in use, but fs.realpath

Its pretty simple:
const fs = require("fs");
const path = require("path");
fs.readdir(path.join(process.cwd(), "maps"), (err, files) => {
console.log(err, files)
let max = files.length - 1;
let min = 0;
let index = Math.round(Math.random() * (max - min) + min);
let file = files[index];
console.log("Random file is", file);
});
In less then 20 lines.
Why do people use for every simple task a external module?!
Regardless that the package does not what you want...

Related

Nodejs ffmpeg "The input file path can not be empty" error, but files exist

I'm trying to merge an audio file with a video file from the same source (Youtube)
In the following code I first read in the console parameters wirh commander then i define the videoOutput dir and download the highset res. video from youtube with node-ytdl-core. After that I download the audio for the video. and in the callback of the video.on("end", ....)
i call the function merge()
const path = require('path');
const fs = require('fs');
const readline = require("readline");
const ytdl = require('ytdl-core');
const { program } = require('commander');
const ffmpeg = require('ffmpeg');
program
.option("--url, --url <url>", "Youtube video url")
.option("--name, --name <name>", "Name of the video in hard drive")
program.parse(process.argv);
const options = program.opts();
let url = options.url;
let name = options.name;
let videoOutput = path.resolve(`./video${name}.mp4`);
let video = ytdl(url, {
quality: "highestvideo"
});
let starttime = 0;
video.pipe(fs.createWriteStream(videoOutput));
video.once('response', () => {
starttime = Date.now();
});
video.on('progress', (chunkLength, downloaded, total) => {
const percent = downloaded / total;
const downloadedMinutes = (Date.now() - starttime) / 1000 / 60;
const estimatedDownloadTime = (downloadedMinutes / percent) - downloadedMinutes;
readline.cursorTo(process.stdout, 0);
process.stdout.write(`${(percent * 100).toFixed(2)}% downloaded `);
process.stdout.write(`(${(downloaded / 1024 / 1024).toFixed(2)}MB of ${(total / 1024 / 1024).toFixed(2)}MB)\n`);
process.stdout.write(`running for: ${downloadedMinutes.toFixed(2)}minutes`);
process.stdout.write(`, estimated time left: ${estimatedDownloadTime.toFixed(2)}minutes `);
readline.moveCursor(process.stdout, 0, -1);
});
video.on('end', () => {
process.stdout.write('\n\n');
});
// repeat for audio
video = ytdl(url, {
quality: "highestaudio"
});
starttime = 0;
let audioOutput = path.resolve(`./audio${name}.mp3`);
video.pipe(fs.createWriteStream(audioOutput));
video.once('response', () => {
starttime = Date.now();
});
video.on('progress', (chunkLength, downloaded, total) => {
const percent = downloaded / total;
const downloadedMinutes = (Date.now() - starttime) / 1000 / 60;
const estimatedDownloadTime = (downloadedMinutes / percent) - downloadedMinutes;
readline.cursorTo(process.stdout, 0);
process.stdout.write(`${(percent * 100).toFixed(2)}% downloaded `);
process.stdout.write(`(${(downloaded / 1024 / 1024).toFixed(2)}MB of ${(total / 1024 / 1024).toFixed(2)}MB)\n`);
process.stdout.write(`running for: ${downloadedMinutes.toFixed(2)}minutes`);
process.stdout.write(`, estimated time left: ${estimatedDownloadTime.toFixed(2)}minutes `);
readline.moveCursor(process.stdout, 0, -1);
});
function merge(){
ffmpeg()
.input("./videotest.mp4") //your video file input path
.input("./audiotest.mp3") //your audio file input path
.output("./finished.mp4") //your output path
.outputOptions(['-map 0:v', '-map 1:a', '-c:v copy', '-shortest'])
.on('start', (command) => {
console.log('TCL: command -> command', command)
})
.on('error', (error) => console.log("errrrr",error))
.on('end',()=>console.log("Completed"))
.run()
}
video.on('end', () => {
process.stdout.write('\n\n');
merge();
});
But even though the files are there ffmpeg throws me this error:
I also tried this in the video-end callback, because maybe the audio is finished downloading before the video, still doesn't work. I've also tried to rename the outputDirs for the files and keep the old files and rerun the script so the files are 100% there. Still doesn't work.
I have also tried absolute paths ("C:/..." also with backslash "C:\...") but I still get the error message that the input file path can't be empty.
Appreciate any piece of advise or help!
Ok I have found a solution.
First i moved my code for downloading the audio into the video.on("end",... callback of the video download.
I changed from require("ffmpeg") to these 5 lines:
const ffmpeg = require('fluent-ffmpeg');
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
const ffprobePath = require('#ffprobe-installer/ffprobe').path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
make sure to run:
npm i fluent-ffmpeg
and
npm i #ffprobe-installer/ffprobe
and
npm i #ffmpeg-installer/ffmpeg
that's my solution!

How can I find the oldest folder in a node directory?

I am trying to find the oldest folder in a directory of my nodejs app.
Right now I get the folder names by fs.readdirSync and the I try to run through the mtime in a for-loop with the fs.stat function. But it does not return any values.
cron.schedule('* * * * *', () => {
folders = fs.readdirSync(__dirname + '/uploads/');
var oldestTime;
var oldest;
for (var i = 0; i < folders.length; i++) {
let stats = fs.statSync(folders[i]);
if (oldestTime == undefined || stats.mtime < oldestTime) {
oldestTime = stats.mtime;
oldest = folders[i];
}
}
console.log("oldest folder name is:", oldest)
}
Is there a better way?
Thank you so much!
You use the asynchronous version of fs.stat(). Try the synchronous (callback-free) fs.statSync() instead.
And don't forget that readdir returns results named . for the current directory and .. for the parent directory. You may want to exclude those from your mtime search.
And please spend some time reading up on the Javascript asynchronous programming model.
folders = fs.readdirSync(__dirname + "/uploads");
var oldestTime;
var oldest;
for (var i = 0; i < folders.length; i++) {
let stats = fs.statSync(__dirname + "/uploads/" + folders[i]);
if (oldestTime == undefined || stats.mtime < oldestTime) {
oldestTime = stats.mtime;
oldest = folders[i];
}
}
console.log("oldest folder name is:", oldest)
or use sort to get the oldest one:
const fs = require('fs');
const cron = require("cron");
var job = new cron.CronJob('* * * * * *', function () {
const folders = fs.readdirSync(__dirname + "/uploads");
var oldest = folders.sort((a, b) => fs.statSync(__dirname + '/uploads/' + a).mtime - fs.statSync(__dirname + '/uploads/' + b).mtime)[0];
console.log("oldest folder name is:", oldest)
});
job.start();

How to get a folder size in nodejs with excluding certain paths with glob pattern?

I want to calculate the total archive file size before archiving to show a progress bar.
I have some folder which are exluded from zipping with are defined with a glob pattern.
How can you get a folder size with a glob filter?
It appears you can't use regular expressions; you can use https://www.npmjs.com/package/glob and loop through the files that match your glob pattern, and get the size of each. Something roughly like (i haven't tested this code):
const fs = require('fs')
const glob = require('glob')
let totalSize = 0 // bytes
// options is optional
glob("**/*.js", options, function (er, files) {
files.forEach(f => {
totalSize += fs.statSync(f)
})
})
With the help of above answer this is the solution
const glob = require('glob');
const fs = require('fs');
function getFolderSizeByGlob(folder, { ignorePattern: array }) {
const filePaths = glob.sync('**', { // "**" means you search on the whole folder
cwd: folder, // folder path
ignore: array, // array of glob pattern strings
absolute: true, // you have to set glob to return absolute path not only file names
});
let totalSize = 0;
filePaths.forEach((file) => {
console.log('file', file);
const stat = fs.statSync(file);
totalSize += stat.size;
});
return totalSize;
}

Trying to download a file in a loop with node.js, but seems to get some sync problems

I'm trying to make a simple program in node.js that will download the same file with some interval. If the downloaded file is newer than the previous, then it will be saved in a new filename with the help of a counter.
If it's a new file, then I want to save it in the last_unique.jpg name and use it to compare next time the file will be downloaded. But it doesn't seem to work. For test, I just have an empty last_unique.jpg that I would expect to be overwritten. But it never is, so every time the jpg file is downloaded, it is unique and saves it in file3.jpg, file3.jpg, etc.
However, the output also looks like maybe some async issues? It skips the first couple of times.
OUTPUT:
downloading 1
downloading 2
downloading 3
Unique file spotted!
downloading 4
Unique file spotted!
downloading 5
Unique file spotted!
downloading 6
Unique file spotted!
downloading 7
Unique file spotted!
downloading 8
Unique file spotted!
Here is the code:
const http = require('http');
const fs = require('fs');
const md5File = require('md5-file');
const fileToDownload = "http://i3.ytimg.com/vi/J---aiyznGQ/mqdefault.jpg";
var counter = 0;
function request() {
counter = counter + 1
console.log("downloading " + counter);
const save = fs.createWriteStream("last_download.jpg");
http.get(fileToDownload, function(response) {
response.pipe(save)
});
const hash1 = md5File.sync('last_download.jpg');
const hash2 = md5File.sync('last_unique.jpg');
// it is a new file
if (hash1.localeCompare(hash2) != 0) {
console.log('Unique file spotted!');
fs.copyFileSync('last_download.jpg','last_unique.jpg');
fs.copyFileSync('last_unique.jpg','file' + counter + '.jpg');
}
}
setInterval(request, 3000);
const http = require('http');
const fs = require('fs');
const md5File = require('md5-file');
const fileToDownload = "http://i3.ytimg.com/vi/J---aiyznGQ/mqdefault.jpg";
var counter = 0;
function request() {
counter = counter + 1;
console.log("downloading " + counter);
const save = fs.createWriteStream("last_download.jpg");
http.get(fileToDownload, function(response) {
response.pipe(save);
response.on('end',function () {
save.end();
})
});
save.on('finish',function () {
const hash1 = md5File.sync('last_download.jpg');
const hash2 = md5File.sync('last_unique.jpg');
console.log(hash1,hash2);
// it is a new file
if (hash1.localeCompare(hash2) != 0) {
console.log('Unique file spotted!');
fs.copyFileSync('last_download.jpg','last_unique.jpg');
fs.copyFileSync('last_unique.jpg','file' + counter + '.jpg');
}
});
}
setInterval(request, 3000);
You need to listen for the finish event on the stream otherwise it maybe the case that you call the copy function before the stream has completely been written. Hence a partial image is copied from the last_download.jpg to last_unique.jpg which means the hashes would be different. This is due the asynchronous nature of copying and http request.

Get latest files inside a folder

i have a folder stucture like
var/
testfolder1/
myfile.rb
data.rb
testfolder2/
home.rb
sub.rb
sample.rb
rute.rb
inside var folder contains subfolders(testfolder1,testfolder2) and some files(sample.rb,rute.rb)
in the following code returing a josn object that contains folders and files inside the var folder
like
{
'0': ['sample.rb', 'rute.rb'],
testfolder1: ['myfile.rb',
'data.rb',
],
testfolder2: ['home.rb',
'sub.rb',
]
}
code
var scriptsWithGroup = {};
fs.readdir('/home/var/', function(err, subfolder) {
if(err) return context.sendJson({}, 200);
var scripts = [];
for (var j = 0; j < subfolder.length; j++) {
var scriptsInFolder = [];
if(fs.lstatSync(scriptPath + subfolder[j]).isDirectory()) {
fs.readdirSync(scriptPath + subfolder[j]).forEach(function(file) {
if (file.substr(file.length - 3) == '.rb')
scriptsInFolder.push(file);
});
scriptsWithGroup[subfolder[j]] = scriptsInFolder;
} else {
if (subfolder[j].substr(subfolder[j].length - 3) == '.rb')
scripts.push(subfolder[j]);
}
}
scriptsWithGroup["0"] = scripts;
console.log(scriptsWithGroup)
context.sendJson(scriptsWithGroup, 200);
});
What i need is i want to return the latest modified or created files.here i only use 2 files inside folders it contains lots of files.so i want to return latest created ones
I'm going to assume here that you want only the most recent two files. If you actually want them all, just sorted, just remove the slice portion of this:
scriptsInFolder = scriptsInFolder.sort(function(a, b) {
// or mtime, if you're only wanting file changes and not file attribute changes
var time1 = fs.statSync(a).ctime;
var time2 = fs.statSync(b).ctime;
if (time1 < time2) return -1;
if (time1 > time2) return 1;
return 0;
}).slice(0, 2);
I'll add, however, that it's typically considered best practice not to use to the synchronize fs methods (e.g. fs.statSync). If you're able to install async, that would be a good alternative approach.
const fs = require('fs');
const path = require('path');
const getMostRecentFile = (dir) => {
const files = orderReccentFiles(dir);
return files.length ? files[0] : undefined;
};
const orderReccentFiles = (dir) => {
return fs.readdirSync(dir)
.filter(file => fs.lstatSync(path.join(dir, file)).isFile())
.map(file => ({ file, mtime: fs.lstatSync(path.join(dir, file)).mtime }))
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
};
const dirPath = '<PATH>';
console.log(getMostRecentFile(dirPath));

Resources