NodeJS Reading all files in a dir by each line - node.js

I am fairly new to NodeJS, I am trying to read all files in a given dir and then print out the results line by line using the code below
var fs=require('fs'),fsf = require('fs'),lazy = require('lazy');
var fr;
var dir = '/path/to/dir';
fs.readdir(dir,function(err,files){
if (err) throw err;
files.forEach(function(file){
console.log(file);
fr = fsf.createReadStream(file);
//console.log(fr);
new lazy(fr).lines.forEach(function(line){
console.log(line.toString());
});
});
I am getting the following error
Cannot call method 'toString' of undefined
Any pointers will be really appreciated!

Update: - There were actually two issues
(main) The blank lines in the individual files were causing this
exception.
The hidden files were getting picked up by the program.
Corrected both and here is the refactored code
var fs=require('fs'),lazy = require('lazy');
var fr;
var dir = '/path/to/dir';
fs.readdir(dir,function(err,files){ //Get a listing of all the files in the dir
if (err) throw err;
files.forEach(function(file){
if(file.search('\\.md') != -1) { //Only read markdown files
console.log(file);
fr = fs.createReadStream(file);
new lazy(fr).lines.forEach(function(line){
if (typeof line != 'undefined'){ // Skip blank lines within the files
if ((line.toString().search('\\+') != -1)||(line.toString().search('#') != -1)){
console.log(line.toString());
}
}
});
}
});
});
The code seems fine and is working with other directories and on other machines. After some investigation it seems to be an issue with the .DS_Store hidden files in the directory. I was trying this on a Mac with OSX 10.9.4. I am not 100% sure, but for now that seems to be the likely cause of the error.
Thanks!

Related

Renaming multiple files asyncroniosly causing error in Node.js

I am trying to rename a bunch of pre-generated testing files (1000+) asynchronously in Node.js.
The code looks like the following:
const fs = require('fs')
const { each } = require('async')
each(files, file => {
let newfile = 'new' + file
fs.rename(file, newfile, err => {
err ? console.log(err) : console.log('renamed')
}
})
This leads to following error:
Uncaught Error: ENOENT: no such file or directory, lstat '8d3320e35d22772f'
at fs.lstatSync (fs.js:902:18)
at Object.fs.lstatSync
It's not async module issue, since replacing each with native forEach leads to the same error. Also, there are no issues when using synchronous version of rename fs.renameSync.
I think it's trying to move some file twice or so but can't figure where exactly mistake is. Made this assumption, because all files have been already renamed successfully and very likely error generated afterward. Can someone advice what causing such behavior?
My bad. Just in case someone curious, this error came from following underlying function:
function rmDir(dir) {
var list = fs.readdirSync(dir);
for(var i = 0; i < list.length; i++) {
var dirOrFile = path.join(dir, list[i]);
var stat = fs.lstatSync(dirOrFile);
if(dirOrFile == "." || dirOrFile == "..") {
// pass these files
} else if (stat.isDirectory()) {
// rmdir recursively
rmDir(dirOrFile);
}
// else { fs.unlinkSync(dirOrFile) } // rm fiilename
}
fs.rmdirSync(dir);
}

Creating and naming a new file according to a string - gulp

Total newbie in Gulp, really would like some assistance..
I am trying to name & create a new file using a string that exist in another file. this will give me the name of the white label that was deployed onto the server.
The content of the file that holds the string is (among other things) {"TITLE":"name_env"}
name_env should be the new name of the file with the suffix of .web
meaning that the new file would be like this name_env.web
What I've came up until now was:
gulp.task('label', function () {
var str = require('path/to/file/file.json')
return file('label', str, {src: true})
.pipe(gulp.dest('build/'))
});
Am I on the right track ?
Hopefully I've managed to explain myself..
Here's gulp file which will do your task(assuming there's dist folder already exist!!)
var gulp = require('gulp');
var fs = require('fs');
gulp.task('label', function() {
var buffer = JSON.parse(fs.readFileSync('path/to/file.json', 'utf8'));
return fs.writeFile('dist/' + buffer['TITLE'] + '.web' , buffer, { flag: 'wx' }, function(err) {
if (err) throw err;
console.log("It's saved!");
});
});

NodeJS fs.rename always thrown an error

I don't understand one thing with fs.rename(), why this code always throw an error?
The file is correctly renamed but each time i got this error Error: ENOENT: no such file or directory, rename '/path/to/file/example.log' -> '/path/to/file/example_1.log'
var fs = require('fs');
var mv = fs.rename('example.log', 'example_1.log', function(err) {
if(err instanceof Error)
throw err;
});
When I run your exact code:
var fs = require('fs');
var mv = fs.rename('example.log', 'example_1.log', function(err) {
if(err instanceof Error)
throw err;
});
(the only difference is adding the fs require).
Then I get the error when I don't have the file in the current directory.
But when I create example.log and run the program then I don't get any error and the file is renamed.
Of course next time I get the error again because I no longer have the file with the original name.
Are you sure that it doesn't work for you?
By the way, instead of
if (err instanceof Error)
you may want to use:
if (err)
Just in case that your error is not the instance of Error but still is defined (this can be true while using some modules).

Node.js moving contents of subdirectory into current directory

I have a Node script that downloads a zip into tmp/archive.zip and extracts that to tmp/archive.
I would like to move the contents of tmp/archive into .. I'm having difficulty finding how to use fs.rename in a way that is equivalent to mv tmp/archive/* .
I have tried fs.rename('tmp/archive/*', '.', function(err){ but that gives me the following error: Error: ENOENT: no such file or directory, rename 'tmp/archive/*' -> '.'
I have also tried using glob to list the contents of tmp/archive and then iterate through it and move the files using fs-extra's move, as follows:
glob('tmp/archive/*', {}, function(err, files){
for (var i = files.length - 1; i >= 0; i--) {
fs.move(files[i], '.', function(err){});
}
}.bind(this));
which results in the folowing error: Error: EEXIST: file already exists, link 'tmp/archive/subdirectory' -> '.'
I could just call mv tmp/archive/* . from the script but i would like to avoid that if possible. Is there something obvious I am missing? How can I go about doing this?
Here's one way to move a directory of files from one location to another (assuming they are on the same volume and thus can be renamed rather than copied):
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require('path');
function moveFiles(srcDir, destDir) {
return fs.readdirAsync(srcDir).map(function(file) {
var destFile = path.join(destDir, file);
console.log(destFile);
return fs.renameAsync(path.join(srcDir, file), destFile).then(function() {
return destFile;
});
});
}
// sample usage:
moveFiles(path.join(".", "tempSource"), path.join(".", "tempDest")).then(function(files) {
// all done here
}).catch(function(err) {
// error here
});
This will move both files and sub-directories in the srcDir to destDir. Since fs.rename() will move a sub-directory all at once, you don't have to traverse recursively.
When designing a function like this, you have a choice of error behavior. The above implementation aborts upon the first error. You could change the implementation to move all files possible and then just return a list of files that could not be moved.
Here's a version that renames all files that it can and if there were any errors, it rejects at the end with a list of the files that failed and their error objects:
function moveFilesAll(srcDir, destDir) {
return fs.readdirAsync(srcDir).map(function(file) {
var destFile = path.join(destDir, file);
var srcFile = path.join(srcDir, file);
return fs.renameAsync(srcFile, destFile).then(function() {
return {file: srcFile, err: 0};
}).catch(function(err) {
console.log("error on " + srcFile);
return {file: srcFile, err: err}
});
}).then(function(files) {
var errors = files.filter(function(item) {
return item.err !== 0;
});
if (errors.length > 0) {
// reject with a list of error files and their corresponding errors
throw errors;
}
// for success, return list of all files moved
return files.filter(function(item) {
return item.file;
});
});
}
// sample usage:
moveFilesAll(path.join(".", "tempSource"), path.join(".", "tempDest")).then(function(files) {
// all done here
}).catch(function(errors) {
// list of errors here
});

NodeJs Method Chaining of fs.mkdir and fs.rename

I'm working on a file uploader script which creates a new folder (based on the timestamp) and moves the uploaded file to the created folder.
Sometimes it works and sometimes I'm getting a ENOENT rename Error (file/folder not exist).
The following code is in my post route:
var form = new multiparty.Form({
uploadDir: "C:"+ path.sep + "files"
});
form.parse(req, function(err, fields, files) {
var dirPath = "C:"+ path.sep + "files" + path.sep + +new Date;
fs.mkdir(dirPath, 0777, function(err, dirPath){
if (err) console.error(err);
console.log("Created new folder");
fs.rename(files.file[i].path, dirPath + path.sep + files.file[i].originalFilename, function(err){
if (err) console.error(err);
console.log("Moved file");
});
}(err, dirPath));
next();
});
I'm using express(4) and the multiparty module.
As you can see I'm using async functions.
So the question is: What is wrong with my code?
Edit
The error I'm talking about: "Error: ENOENT, rename 'C:\files\7384-1r41cry.png'"
It has something to do with a race condition. With fs.mkdirSync everything works fine.
My guess would be some sort of race condition happening here.
This kind of stuff is easy to get wrong and hard to get it right.
I normally use gulp for this kind of stuff and maybe should you :)
To copy a whole directory tree into some other directory wouldn't be easier.
gulp.src('./inputDir/**/*').pipe(gulp.dest('./outputDir')
And all files from inputDir would be copied into outputDir
But maybe coping is not a option. The files could too large, right?
Lets hack it a bit to make it work the way we want.
var fs = require('fs')
, gulp = require('gulp')
, thr = require('through2').obj
, SRC = './test/**/*.{css,html,js}' // it could be 'my/file/located/here'
, OUT = './output' // it could be 'my/newer/file/located/there'
;
gulp.src(SRC)
.pipe(thr(function(chunk, enc, next){
chunk.contents = new Buffer('') // cleaning the contents of the file
chunk._originalPath = chunk.path
next(null, chunk)
}))
.pipe(gulp.dest(OUT))
.pipe(thr(function(chunk, enc, next){
// now a place holder file exists at our destination.
// so we can write to it and being convident it exists
console.log('moving file from', chunk._originalPath, 'to', chunk.path)
fs.rename(chunk._originalPath, chunk.path, function(err){
if (err) return next(err)
next()
})
}))
This moves all css, html and js files from input to output, regardless of how many nested directories there are
gulp is awesome :)
Ok a few things...
you really should start using promises, it makes the code easier to read and the error handling is way superior. I usually use when.js, but there are other alternatives.
you should throw the errors or return on errors or you would try to continue running the function even when the previous operations failed.
you do
if (err) console.error(err);
should be
if (err) throw err;

Resources