DELETE FILES with Node.js - node.js

I'm trying to delete some files and then show a message.
EXPECTED OUTPUT
File deleted
Folder Cleared!!!
ACTUAL OUTPUT
Folder Cleared!!!
File deleted
The current code is:
function clearConverted() {
const resp = new Promise(async (resolve) => {
const converted = glob.sync('./converted/*.mp4');
if (converted.length) {
const fs = require('fs');
const promises = converted.map(v => {
fs.unlink(v, () => console.log('File deleted'))
})
Promise.all(promises);
} else {
console.log('No files to delete');
}
resolve();
})
resp.then(console.log('Folder Cleared!!!'))
}
May you help me?

Per my original comments on your question, you have heaps to fix with your code -
Don't use require inside a loop
fs.unlink does not return a Promise.You're probably looking for fsPromises.unlink
What is the explicit Promise constructor anti-pattern and how do I avoid it?
const { unlink } =
require("fs").promises // <-- fsPromises
function clearConverted() {
const converted =
glob.sync("./converted/*.mp4")
if (converted.length === 0)
return Promise.resolve([])
const promises =
converted.map(v => unlink(v).then(_ => v))
return Promise.all(promises)
}
clearConverted() // <-- returns a promise!
.then(deletedFiles => console.log("done! deleted files:", deletedFiles))
// done! deleted files: ./converted/foo.mp4, ./converted/bar.mp4
And see how we removed console.log side effects from the function? This allows our function to collect meaningful data, like the file names, and return a list of deleted files. Because the console.log effect is now outside of clearConverted we can change it, if we wish.
For example, we could simply display the count of deleted files, in a less verbose program -
clearConverted()
.then(deletedFiles =>
console.log("done! deleted %d files.", deletedFiles.length)
)
// done! deleted 9 files.
And we can do more. An obvious improvement now is to make clearConverted a generic function which accepts the path as an argument -
function unlinkFiles (path = "") { // <-- generic name and path parameter
const files =
glob.sync(path)
if (files.length === 0)
return Promise.resolve([])
else
return Promise.all(files.map(f => unlink(f).then(_ => f)))
}
unlinkFiles("./converted/*.mp4") // <-- supply path at call site
.then(deletedFiles => console.log("done! deleted files:", deletedFiles))
.catch(console.error) // <-- don't forget to catch Promises too
Modern features like async allow us to skip some of the ceremony around Promises -
async function unlinkFiles (path = "") { // <-- async keyword
const files =
glob.sync(path)
return files.length === 0
? [] // <-- automatically wrapped in a Promise
: Promise.all(files.map(f => unlink(f).then(_ => f)))
}
Now if you wanted, you could make the function accept a glob result instead of a path -
const unlinkFiles = async (files = []) => // <-- arrow function
files.length === 0
? []
: Promise.all(files.map(f => unlink(f).then(_ => f)))
unlinkFiles(glob.sync("./converted/*.mp4")) // <-- use glob as input
.then(console.log, console.error)
When you detangle the wires, programming can be fun and easy. Sadly, languages like JavaScript also make it easy to shoot yourself in the foot, so there's a lot of misery before enlightenment.
I have other answers involving the fs module and Promises. These additional code examples may help provide additional insight -
Recursively rename files
Recursively read all package.json
Recursively list all subdirectories

When you call Promise.all add .then method there, something likethis:
Promise.all(promises).then((response) => console.log("Folder Cleared")).catch((error) => console.log(error))
And also when you require modules or dependencies, you declare them at the top of the file and not inside functions or loops.

Related

Node JS: How to catch the individual errors while reading files, in case multiple files are read on Promise.all?

I am having 10 different files and I need to read their content and merge it in one object (in NodeJS). I am successfully doing that with the code below:
const fs = require('fs');
const path = require('path');
const { promisify } = require("util");
const readFileAsync = promisify(fs.readFile);
let filePathArray = ['path/to/file/one', ... , 'path/to/file/ten'];
Promise.all(
filePathArray.map(filePath => {
return readFileAsync(filePath);
})
).then(responses => { //array of 10 reponses
let combinedFileContent = {};
responses.forEach((itemFileContent, index) => {
let tempContent = JSON.parse(itemFileContent);
//merge tempContent into combinedFileContent
}
});
But what I wonder is, how to catch if there is some error while trying to read the files? When reading a single file, this works like:
fs.readFile(singleFilePath, (singleFileErr, singleFileContent) => {
if (singleFileErr) {
//do something on error, while trying to read the file
}
});
So my question here is, how can I access to the error inn the first code snippet, which corresponds to singleFileErr from this second code snippet?
The issue I am facing is: in case some of the files does not exists, I want to check the error and to skip this file, but since I can not detect the error with current implementation, my whole block crashes and I am not able to merge the other 9 files because of this one. I want to use the error check I mentioned in the second snippet.
Check out the Promise.allSettled function, which will run every Promise passed to it, and will tell you at the end which ones succeeded and which ones failed.
Maybe try something like this:
in the map() callback, return a promise that resolves to null if the file is not found.
Introduce a middle stage in the promise chain filtering out null responses.
This would look something like this:
Promise.all(
filePathArray.map(filePath => {
return readFileAsync(filePath).catch(function(error){
if(isErrorFileDoesNotExist(error)) return null
throw error;
})
});
).then(responses => {
return responses.filter(response => response != null)
})
.then(filteredResponses => {
// .. do something
});
Would that work for you? Note this presupposes you are actually able to discriminate between missing file errors from other errors the promise returned by readFileAsync() may reject - presumably via the isErrorFileDoesNotExist() function in this snippet.

Refactor code with promises to read files and convert them to json

I'm trying to do the following: Read the content of a directory to find all the .xml files (I'm using glob but I'd like to use something like fs.readdir from fs), then I want to read every file using fs.readFile and then I want to convert the xml file to JSON objects. I'm using xml2json for this purpose.
Once I have the json objects, I would like to iterate every one of them to get the one property out of it and push it to an array. Eventually, all the code is wrapped in a function that logs the content of the array (once is completed). This code currently works fine but I'm getting to the famous callback hell.
const fs = require('fs');
const glob = require('glob');
const parser = require('xml2json');
let connectors = []
function getNames(){
glob(__dirname + '/configs/*.xml', {}, (err, files) => {
for (let j=0; j < files.length; j++) {
fs.readFile( files[j], function(err, data) {
try {
let json = parser.toJson(data, {object: true, alternateTextNode:true, sanitize:true})
for (let i=0; i< json.properties.length; i++){
connectors.push(json.properties[i].name)
if (connectors.length === files.length){return console.log(connectors)}
}
}
catch(e){
console.log(e)
}
});
}
})
}
getNames()
However, I'd like to move to a more clean and elegant solution (using promises). I've been reading the community and I've found some ideas in some similar posts here or here.
I'd like to have your opinion on how I should proceed for this kind of situations. Should I go for a sync version of readFile instead? Should I use promisifyAll to refactor my code and use promises everywhere? If so, could you please elaborate on what my code should look like?
I've also learned that there's a promises based version of fs from node v10.0.0 onwards. Should I go for that option? If so how should I proceed with the parser.toJson() part. I've also seen that there's another promise-based version called xml-to-json-promise.
I'd really appreciate your insights into this as I'm not very familiar with promises when there are several asynchronous operations and loops involved, so I end up having dirty solutions for situations like this one.
Regards,
J
I would indeed suggest that you use the promise-version of glob and fs, and then use async, await and Promise.all to get it all done.
NB: I don't see the logic about the connectors.length === files.length check, as in theory the number of connectors (properties) can be greater than the number of files. I assume you want to collect all of them, irrespective of their number.
So here is how the code could look (untested):
const fs = require('fs').promises; // Promise-version (node 10+)
const glob = require('glob-promise'); // Promise-version
const parser = require('xml2json');
async function getNames() {
let files = await glob(__dirname + '/configs/*.xml');
let promises = files.map(fileName => fs.readFile(fileName).then(data =>
parser.toJson(data, {object: true, alternateTextNode:true, sanitize:true})
.properties.map(prop => prop.name)
));
return (await Promise.all(promises)).flat();
}
getNames().then(connectors => {
// rest of your processing that needs access to connectors...
});
As in comments you write that you have problems with accessing properties.map, perform some validation, and skip cases where there is no properties:
const fs = require('fs').promises; // Promise-version (node 10+)
const glob = require('glob-promise'); // Promise-version
const parser = require('xml2json');
async function getNames() {
let files = await glob(__dirname + '/configs/*.xml');
let promises = files.map(fileName => fs.readFile(fileName).then(data =>
(parser.toJson(data, {object: true, alternateTextNode:true, sanitize:true})
.properties || []).map(prop => prop.name)
));
return (await Promise.all(promises)).flat();
}
getNames().then(connectors => {
// rest of your processing that needs access to connectors...
});

Fs.writeFile callback not called

Node version: 8.11.2
I have a simple CSV export function that takes an array of objects, and generates the headers of the file based on the object properties of the objects.
const exportCsv = (list, fileName) => {
if (list.length > 0) {
let headers = Object.keys(list[0]);
let opts = { headers };
let parser = new Parser(opts);
let csv = parser.parse(list);
fs.writeFile(`./output/${fileName}.csv`, csv, err => {
if (err) {
console.error(err);
}
console.log(`Wrote ${fileName} to disk.`);
});
} else {
console.log('List is Empty. Nothing to export.');
}
};
It was working great, but now the call back in the fs.writeFile call isn't firing, and there are no errors or exceptions from VS Code's Debugger.
What would cause it to nut run?
If process is dead before the writing path is done, your callback will not be called because it is an asynchronous.
So you have 3 options
make sure your process is not dead before writing path is done (you can use async/await or promise)
use writeFileSync instead (less effective but less confusing)

Read file with NodeJS returns `ENOENT no such file or directory`

I'm trying to read a file, but I always got the error Error: ENOENT: no such file or directory, open \'SB01028A.RET\'. The file name is correct, and exists because I put the file in my Home/sentbox directory.
What I did wrong here ?
Code:
function downloadFile () {
return new Promise((resolve, reject) => {
try {
const testFolder = `${require('os').homedir()}/sentbox`
fs.readdir(testFolder, (err, files) => {
if (err) {
return reject(err)
}
files.forEach(fileRetorno => {
const retorno = fs.readFileSync(fileRetorno, 'UTF8')
return resolve(retorno)
})
})
} catch (err) {
return reject(err)
}
})
}
You have a number of things wrong in your code like
Using synchronous file reading within a promise when you could be making your file reading asynchronous
Using try/catch in an asynchronous context without wrapping that in a async/await function
You're not using the results of fs.readdir() correctly.
Attemping to resolve/reject a promise that could have already been resolved or rejected
Using require() in a loop, and in an asynchronous context
fs.readdir() is going to return all the entry names within that directory, both files and directories, as an array. Before you can call fs.readFile() you'll need to check if the entry is a file or directory and you'll need to join() the path to the directory read with readdir() in this case, testFolder, with the entry name.
When returning using Promises (async/await wraps promises but still uses them), you only can resolve each promise once. So resolving the same promise multiple times to return different values doesn't work, the same is true for rejecting multiple times. Instead, you'll need to return your values in an Array or an Object. For the scenario in the above code, using an Object would be more ideal since you can associate the file contents to a key for reference and better access later on.
I've used async/await to clean up this code, this will give you the synchronously development approach while getting the functionality of promises. You can read more about async/await on MDN. I've also promisified all of the asynchronous version of the needed fs functions using util.promisify()
The code below will
Read all of the entries in testFolder
Filter the entries array to only include files by calling stat() for each entry and checking if it is a file.
stat() will return a fs.Stat object that can tell us if the entry is a file via stat.isFile(). Since Array#filter() is only expecting a boolean result for each entry interated over, the result of stat.isFile() can be returned directly
Iterate over the files array with Array#reduce() and call readFile() for each file, returning the contents on an object with each file name as a key to the contents
const fs = require('fs')
const {promisify} = require('util')
const os = require('os')
const path = require('path')
const readdir = promisify(fs.readdir)
const readFile = promisify(fs.readFile)
const stat = promisify(fs.stat)
const downloadFile = async () => {
const testFolder = `${os.homedir()}/sentbox`
// Get all the entries in the directory async
const entries = await readdir(testFile)
// We only want the file entries returned
const files = entries.filter(async entry => {
let stat = await stat(path.join(testFolder, entry))
return stat.isFile()
})
return Promise.all(files.reduce(async (filesContents, file) => {
let filepath = path.join(testFolder, file)
fileContents[file] = await readFile(filepath, 'utf8')
return fileContents
}, {}))
}

How to get console.log line numbers shown in Nodejs?

Got an old application, that prints out quite a lot of messages using console.log, but I just can not find in which files and lines console.log is called.
Is there a way to hook into the app and show file name and line numbers?
Having full stack trace for each call is a bit noisy. I've just improved the #noppa's solution to print only the initiator:
['log', 'warn', 'error'].forEach((methodName) => {
const originalMethod = console[methodName];
console[methodName] = (...args) => {
let initiator = 'unknown place';
try {
throw new Error();
} catch (e) {
if (typeof e.stack === 'string') {
let isFirst = true;
for (const line of e.stack.split('\n')) {
const matches = line.match(/^\s+at\s+(.*)/);
if (matches) {
if (!isFirst) { // first line - current function
// second line - caller (what we are looking for)
initiator = matches[1];
break;
}
isFirst = false;
}
}
}
}
originalMethod.apply(console, [...args, '\n', ` at ${initiator}`]);
};
});
It also patches other methods (useful for Nodejs, since warn and error don't come with a stack trace as in Chrome).
So your console would look something like:
Loading settings.json
at fs.readdirSync.filter.forEach (.../settings.js:21:13)
Server is running on http://localhost:3000 or http://127.0.0.1:3000
at Server.app.listen (.../index.js:67:11)
For a temporary hack to find the log statements that you want to get rid of, it's not too difficult to override console.log yourself.
var log = console.log;
console.log = function() {
log.apply(console, arguments);
// Print the stack trace
console.trace();
};
// Somewhere else...
function foo(){
console.log('Foobar');
}
foo();
That will print something like
Foobar
Trace
at Console.console.log (index.js:4:13)
at foo (index.js:10:13)
at Object.<anonymous> (index.js:12:1)
...
A lot of noise in there but the second line in the call stack, at foo (index.js:10:13), should point you to the right place.
All solutions to this question so far rely on splitting and matching the stack trace as a string, which will break in (the unlikely) case the format of that string is changed in the future. Inspired by this gist on GitHub and the other answers here, I want to provide my own solution:
'use strict';
const path = require('path');
['debug', 'log', 'warn', 'error'].forEach((methodName) => {
const originalLoggingMethod = console[methodName];
console[methodName] = (firstArgument, ...otherArguments) => {
const originalPrepareStackTrace = Error.prepareStackTrace;
Error.prepareStackTrace = (_, stack) => stack;
const callee = new Error().stack[1];
Error.prepareStackTrace = originalPrepareStackTrace;
const relativeFileName = path.relative(process.cwd(), callee.getFileName());
const prefix = `${relativeFileName}:${callee.getLineNumber()}:`;
if (typeof firstArgument === 'string') {
originalLoggingMethod(prefix + ' ' + firstArgument, ...otherArguments);
} else {
originalLoggingMethod(prefix, firstArgument, ...otherArguments);
}
};
});
// Tests:
console.log('%s %d', 'hi', 42);
console.log({ a: 'foo', b: 'bar'});
Unlike the other solutions, this script
outputs no additional lines and
handles string substitutions correctly.
You can color the prefix with chalk or color.js, but I didn't want to introduce dependencies for this here.
The above script uses the V8 API to customize stack traces. The callee is a CallSite object with the following methods in case you want to customize the prefix:
getThis: returns the value of this
getTypeName: returns the type of this as a string. This is the name of the function stored in the constructor field of this, if available, otherwise the object’s [[Class]] internal property.
getFunction: returns the current function
getFunctionName: returns the name of the current function, typically its name property. If a name property is not available an attempt is made to infer a name from the function’s context.
getMethodName: returns the name of the property of this or one of its prototypes that holds the current function
getFileName: if this function was defined in a script returns the name of the script
getLineNumber: if this function was defined in a script returns the current line number
getColumnNumber: if this function was defined in a script returns the current column number
getEvalOrigin: if this function was created using a call to eval returns a string representing the location where eval was called
isToplevel: is this a top-level invocation, that is, is this the global object?
isEval: does this call take place in code defined by a call to eval?
isNative: is this call in native V8 code?
isConstructor: is this a constructor call?
isAsync: is this an async call (i.e. await or Promise.all())?
isPromiseAll: is this an async call to Promise.all()?
getPromiseIndex: returns the index of the promise element that was followed in Promise.all() for async stack traces, or null if the CallSite is not a Promise.all() call.
This answer is a cross-post of an answer I just gave to a similar question as more people might find this page.
I found Dmitry Druganov's answer really nice, but I tried it on Windows 10 (with Node 8.9.4) and it didn't work well. It was printing the full path, something like:
Loading settings.json
at fs.readdirSync.filter.forEach (D:\Users\Piyin\Projects\test\settings.js:21:13)
Server is running on http://localhost:3000 or http://127.0.0.1:3000
at Server.app.listen (D:\Users\Piyin\Projects\test\index.js:67:11)
So I took said answer and made these improvements (from my point of view):
Assume the important line of the stack trace is the third one (the first one is the word Error and the second one is where you place this script)
Remove the current script folder path (given by __dirname, which in my case is D:\Users\Piyin\Projects\test). Note: For this to work well, the script should be on the project's main Javascript
Remove the starting at
Place the file information before the actual log
Format the information as Class.method at path/to/file:line:column
Here it is:
['log','warn','error'].forEach((methodName) => {
const originalMethod = console[methodName];
console[methodName] = (...args) => {
try {
throw new Error();
} catch (error) {
originalMethod.apply(
console,
[
(
error
.stack // Grabs the stack trace
.split('\n')[2] // Grabs third line
.trim() // Removes spaces
.substring(3) // Removes three first characters ("at ")
.replace(__dirname, '') // Removes script folder path
.replace(/\s\(./, ' at ') // Removes first parentheses and replaces it with " at "
.replace(/\)/, '') // Removes last parentheses
),
'\n',
...args
]
);
}
};
});
And here's the new output:
fs.readdirSync.filter.forEach at settings.js:21:13
Loading settings.json
Server.app.listen at index.js:67:11
Server is running on http://localhost:3000 or http://127.0.0.1:3000
Here's the minified-by-hand code (240 bytes):
['log','warn','error'].forEach(a=>{let b=console[a];console[a]=(...c)=>{try{throw new Error}catch(d){b.apply(console,[d.stack.split('\n')[2].trim().substring(3).replace(__dirname,'').replace(/\s\(./,' at ').replace(/\)/,''),'\n',...c])}}});
Slightly modified version of noppa's answer, this version will output something like:
/file/in-which/console/is/called.js:75:23
The stuff you want to log.
This is clean and convenient (especially for use in VSCode - which will turn the file path into a link).
const { log } = console;
function proxiedLog(...args) {
const line = (((new Error('log'))
.stack.split('\n')[2] || '…')
.match(/\(([^)]+)\)/) || [, 'not found'])[1];
log.call(console, `${line}\n`, ...args);
}
console.info = proxiedLog;
console.log = proxiedLog;
// test
console.log('Hello!');
The snippet will only work well in a NodeJS environment…
Appends the line number to the end of the log
const stackTrace = function () {
let obj = {}
Error.captureStackTrace(obj, stackTrace)
return obj.stack
}
const getLine = function (stack) {
let matchResult = stack.match(/\(.*?\)|\s.+/g) || []
let arr = matchResult.map((it) => {
return it.split(' ').pop().replace(/\(|\)/g, '')
})
return arr[1] ?? ''
}
const log = function (...args) {
let stack = stackTrace() || ''
let matchResult = getLine(stack)
let line = matchResult
for (var i in arguments) {
if (typeof arguments[i] == 'object') {
// util.inspect(arguments[i], false, 2, false)
arguments[i] = JSON.stringify(arguments[i])
}
}
arguments[i] += ' ' + line
console.log.apply(console, arguments)
}
log("test")
Simple & exhaustive solution if you want to temporarily find the origin of logs:
{
const logOriginal = process.stdout.write
// #ts-ignore
const log = (msg) => logOriginal.call(process.stdout, msg + '\n')
;['stdout', 'stderr'].forEach((stdName) => {
// #ts-ignore
var methodOriginal = process[stdName].write
// #ts-ignore
process[stdName].write = function (...args) {
log('LOG:')
// #ts-ignore
methodOriginal.apply(process[stdName], args)
// #ts-ignore
log(new Error().stack.replace(/^Error/, 'LOGGED FROM:'))
}
})
Error.stackTraceLimit = Infinity
}
const showName = (name) => {
return
console.log(name)
}
showName(“Crush”)

Resources