How to add a line at a specific line row to a file's content using node.js and Promises - node.js

Situation: I made some code that loops through a main directory and its subdirectories looking for htm files, once it finds the .htm file it is supposed to add a line after the head tag, then it should loop further looking for all other .htm files in the main directory and perfmoring the same action.
The code:
var fs = require("fs");
var path = require("path");
var mainDirectory = 'directory';
function addLineInFile() { //execute the steps needed to alter the file's content in sequential order.
readContent() //can be found below.
.then(lookForHead) //can be found below.
.then(writeNewContent) //can be found below.
}
addLineInFile();
function readContent() {
return new Promise((resolve, reject) => {
FileContent = [];
fs.readFile(extendedDirectoryPath, (err, data) => {
fileContent = data.toString('utf8').split("\n");
console.log("Read file content")
});
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
function lookForHead(FileContent) {
return new Promise((resolve, reject) => {
var string = "<head>"
for (i = 0; i < FileContent.length; i++) {
console.log("Looking for <head>.")
if (FileContent[i].indexOf(string) !== -1) {
console.log("found <head>")
FileContent.splice(i + 1, 0, 'line to be added')
}
}
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
function writeNewContent(FileContent) {
return new Promise((resolve, reject) => {
console.log("Started Writing to the file!")
var file = fs.createWriteStream(extendedDirectoryPath);
file.on('error', function(err) { /* error handling */ });
FileContent.forEach(function(v) {
file.write(v.join(', ') + '\n');
console.log("Wrote a line to the file.")
});
file.end();
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
Problem: The file is written to BEFORE the content that has to be written to it is ready (Take a look at the output). So the writeNewContent() is executed before the readContent() and lookForHead() are done with giving it its content to be written in the file. I've tried so many different things before this like callback functions and was convinced Promises would be my solution but perhaps I'm using them incorrectly? Please keep in mind that I don't know all that much about node.js and Promises most of my work is just copy pasting from internet and changing small parts of it to my liking.
Output:
Got file info successfully!
file1.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file2.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file3.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file4.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
someInnerDirectory is a innerDirectory
Delving deeper!
Got file info successfully!
file5.htm This is anhtm file!
Started Writing to the file!
Got file info successfully!
file6.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file7.htm This is an htm file!
Started Writing to the file!
Read file content
Read file content
Read file content
Read file content
Read file content
Read file content
Read file content

Well done on your work so far and sticking with it. Good to see you are using recursion too. You are creating a function called addLineInFile but not executing it. I did something similar a while back, check it out

Related

Is there a way to list the file extension of all files in a directory node.js?

So when I use fs.readdir() I only get the file name and if I use .split(".") it wont work since if I have a folder called a.exe it will say that .exe is the extension even that it's not since folders don't have a extension.
Here is the code for fs.readdir()
fs.readdir("./", (err,data)=>{
if (err) throw err
console.log(data)
})
Console: (a.exe is a folder and aa.exe is a file)
[ 'a.exe', 'aa.exe' ]
Any way to get the file extension or get whether its a folder or file (that will help too)
https://i.stack.imgur.com/yE0oD.png
I used to 'file-system' module.
This is tutorial that use this module.
fs.recurse('path', function(filepath, relative, filename) { });
fs.recurse('path', [
'*.css',
'**/*.js',
'path/*.html',
'!**/path/*.js'
], function(filepath, relative, filename) {
if (filename) {
// it's file
} else {
// it's folder
}
});
// Only using files
fs.recurse('path', function(filepath, relative, filename) {
if (!filename) return;
});
You can find more info in here

Bad file descriptor, read, while extracting zip file using node-stream-zip

I have a zip file that has a folder like
1234/pic1.png
1234/pic2.png
1234/data.xlsx
I am trying to extract the spreadsheet (failing that, all files) using node-stream-zip.
const StreamZip = require('node-stream-zip');
const zip = new StreamZip({
file: path.join(downloadsDir, fileToFind),
storeEntries: true
});
zip.on('ready', () => {
if(!fs.existsSync('extracted')) {
fs.mkdirSync('extracted');
}
zip.extract('1234/', './extracted', err => {
console.log(err);
});
zip.close();
});
This produces
EBADF: bad file descriptor, read
In the extracted folder is one of the png files. But when following the guide to extract just the xlsx file it appears that the xlsx file is the one causing this error.
zip.extract('1234/data.xlsx', './extracted.xlsx', err => {
console.log(err);
});
Is the problem with the xlsx file? I can open it manually. Is it permissions-related? Node? This particular package?
Your problem is related to zip.close(). You're closing it on the same tick as you're invoking zip.extract().

Node.js moving contents of subdirectory into current directory

I have a Node script that downloads a zip into tmp/archive.zip and extracts that to tmp/archive.
I would like to move the contents of tmp/archive into .. I'm having difficulty finding how to use fs.rename in a way that is equivalent to mv tmp/archive/* .
I have tried fs.rename('tmp/archive/*', '.', function(err){ but that gives me the following error: Error: ENOENT: no such file or directory, rename 'tmp/archive/*' -> '.'
I have also tried using glob to list the contents of tmp/archive and then iterate through it and move the files using fs-extra's move, as follows:
glob('tmp/archive/*', {}, function(err, files){
for (var i = files.length - 1; i >= 0; i--) {
fs.move(files[i], '.', function(err){});
}
}.bind(this));
which results in the folowing error: Error: EEXIST: file already exists, link 'tmp/archive/subdirectory' -> '.'
I could just call mv tmp/archive/* . from the script but i would like to avoid that if possible. Is there something obvious I am missing? How can I go about doing this?
Here's one way to move a directory of files from one location to another (assuming they are on the same volume and thus can be renamed rather than copied):
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require('path');
function moveFiles(srcDir, destDir) {
return fs.readdirAsync(srcDir).map(function(file) {
var destFile = path.join(destDir, file);
console.log(destFile);
return fs.renameAsync(path.join(srcDir, file), destFile).then(function() {
return destFile;
});
});
}
// sample usage:
moveFiles(path.join(".", "tempSource"), path.join(".", "tempDest")).then(function(files) {
// all done here
}).catch(function(err) {
// error here
});
This will move both files and sub-directories in the srcDir to destDir. Since fs.rename() will move a sub-directory all at once, you don't have to traverse recursively.
When designing a function like this, you have a choice of error behavior. The above implementation aborts upon the first error. You could change the implementation to move all files possible and then just return a list of files that could not be moved.
Here's a version that renames all files that it can and if there were any errors, it rejects at the end with a list of the files that failed and their error objects:
function moveFilesAll(srcDir, destDir) {
return fs.readdirAsync(srcDir).map(function(file) {
var destFile = path.join(destDir, file);
var srcFile = path.join(srcDir, file);
return fs.renameAsync(srcFile, destFile).then(function() {
return {file: srcFile, err: 0};
}).catch(function(err) {
console.log("error on " + srcFile);
return {file: srcFile, err: err}
});
}).then(function(files) {
var errors = files.filter(function(item) {
return item.err !== 0;
});
if (errors.length > 0) {
// reject with a list of error files and their corresponding errors
throw errors;
}
// for success, return list of all files moved
return files.filter(function(item) {
return item.file;
});
});
}
// sample usage:
moveFilesAll(path.join(".", "tempSource"), path.join(".", "tempDest")).then(function(files) {
// all done here
}).catch(function(errors) {
// list of errors here
});

How to deal with missing file when adding N number of files to NodeJs Archiver

I'm adding a series of files to zip using Archiver, but in the case that a file is missing, say if was deleted or moved, it obviously causes a problem. But I'm not able to get around this problem.
My current code is like so:
for (var i=0; i<receivedIds.length; i++) {
var filePath = './public/pdf/letter-pdfs/'+receivedIds[i]+'.pdf';
console.log(filePath);
try {
pdfStream = fs.createReadStream(filePath);
archive.append(pdfStream, {name: receivedIds[i]+'.pdf'});
} catch(e) {
console.error(e);
}
}
Every time I try to wrap the append in a stream event like so:
pdfStream.on('readable', function() {
archive.append(pdfStream, {name: receivedIds[i]+'.pdf'});
});
The error is caught but the Archiver just outputs some empty file, even though some of the files do exist. How can I update this to append the files that do exist, and simply ignore the ones that don't?
Simply make sure that the file exists before reading it into the archive:
for (var i=0; i<receivedIds.length; i++) {
var filePath = './public/pdf/letter-pdfs/'+receivedIds[i]+'.pdf';
console.log(filePath);
try {
// will throw if it does not exist
// alternatively, use fs.statSync or an async version of the two
fs.accessSync(filePath);
pdfStream = fs.createReadStream(filePath);
archive.append(pdfStream, {name: receivedIds[i]+'.pdf'});
} catch(e) {
console.error(e);
}
}
This way the lack of a file is detected before the archiver attempts to read from an invalid source.

NodeJS Reading all files in a dir by each line

I am fairly new to NodeJS, I am trying to read all files in a given dir and then print out the results line by line using the code below
var fs=require('fs'),fsf = require('fs'),lazy = require('lazy');
var fr;
var dir = '/path/to/dir';
fs.readdir(dir,function(err,files){
if (err) throw err;
files.forEach(function(file){
console.log(file);
fr = fsf.createReadStream(file);
//console.log(fr);
new lazy(fr).lines.forEach(function(line){
console.log(line.toString());
});
});
I am getting the following error
Cannot call method 'toString' of undefined
Any pointers will be really appreciated!
Update: - There were actually two issues
(main) The blank lines in the individual files were causing this
exception.
The hidden files were getting picked up by the program.
Corrected both and here is the refactored code
var fs=require('fs'),lazy = require('lazy');
var fr;
var dir = '/path/to/dir';
fs.readdir(dir,function(err,files){ //Get a listing of all the files in the dir
if (err) throw err;
files.forEach(function(file){
if(file.search('\\.md') != -1) { //Only read markdown files
console.log(file);
fr = fs.createReadStream(file);
new lazy(fr).lines.forEach(function(line){
if (typeof line != 'undefined'){ // Skip blank lines within the files
if ((line.toString().search('\\+') != -1)||(line.toString().search('#') != -1)){
console.log(line.toString());
}
}
});
}
});
});
The code seems fine and is working with other directories and on other machines. After some investigation it seems to be an issue with the .DS_Store hidden files in the directory. I was trying this on a Mac with OSX 10.9.4. I am not 100% sure, but for now that seems to be the likely cause of the error.
Thanks!

Resources