Renaming file not having proper assigned name - node.js

I was trying to watch a certain directory and when a new file is added to that directory I want to rename the file but it's not working. The problem is the directory watching part works fine but when I rename the newly added file the name I am giving it is iterated and giving it the wrong name. For Example, if the new name I'm assigning is thisIsName when it gets renamed it becomes thisIsNamethisIsNamethisIsNamethisIsName. How can I make it so that the rename is the assigned name without any iteration? Any help is appreciated. Thanks in advance.
const fs = require("fs");
const chokidar = require('chokidar');
const watcher = chokidar.watch('filePath', {
ignored: /(^|[\/\\])\../,
persistent: true
});
function yyyymmdd() {
var now = new moment();
return now.format("YYYYMMDD");
}
function hhmmss() {
var now = new moment();
return now.format("HHmmss");
}
const log = console.log.bind(console);
//watching a certain directory for any update to it
watcher
.on('add', path => {
const newFileName = "filePath\\" + yyyymmdd() + hhmmss() + path
//trying to rename the file, but its not working because newFileName is somehow getting looped and having multiple iterations of the DATE and TIME in the new name when getting renamed. Example of what the product looks like is included above in the question.
fs.renameSync(path, newFileName);
})
.on('change', path => {
log(`File ${path} has been changed`)
})
.on('unlink', path => {
log(`File ${path} has been removed`)
})

I've done some small changes in your code and it worked for me for any file formats (for unformatted files as well). Anyway, use as you want. The only thing you've missed, was the usage of "path":
const moment = require('moment');
const fs = require('fs');
const chokidar = require('chokidar');
const path = require('path');
const log = console.log.bind(console);
function formattedDate() {
return moment().format('YYYYMMDDHHmmss');
}
// here I've used some folder with name "folder" in the same directory as this file
const filePath = path.join(__dirname, `./folder`);
const watcher = chokidar.watch(filePath, {
ignored: /(^|[\/\\])\../,
persistent: true
});
watcher
.on('add', addedFilePath => {
const fileExt = path.extname(addedFilePath);
const newFilePath = path.join(__dirname, `./folder/${formattedDate()}${fileExt}`);
fs.renameSync(addedFilePath, newFilePath);
})
.on('change', changedFilePath => {
log(`File ${changedFilePath} has been changed`);
})
.on('unlink', removingFilePath => {
log(`File ${removingFilePath} has been removed`);
});
Here is the stuff:

Related

Recursive read images files in provided folder

I am getting folder absolute path and I want to extract all text files path from the folder (recursively).
This is what I've tried:
const fs = require('fs-extra');
const path = require('path');
const getFolderImagesRecursive = async (folderPath) => {
const directoryChildren = await fs.readdir(folderPath);
return directoryChildren.reduce(async (finalArray, directoryChild) => {
const fullPath = path.join(folderPath, directoryChild);
const pathStat = await fs.lstat(fullPath);
if (pathStat.isDirectory()) {
const recursiveResult = await getFolderImagesRecursive(fullPath);
return [
...finalArray,
...recursiveResult,
];
} else if (fullPath.split('.').pop() === 'txt') {
return [
...finalArray,
fullPath,
]
} else {
return finalArray;
}
}, []);
}
For testing purpose I've created dummy folders and text files or folders nested inside. When tried the function on the main test folder I got:TypeError: object is not iterable (cannot read property Symbol(Symbol.iterator)) on line 21.
Does anyone observe the error and can fix it?
The problem is that array.reduce is a synchronous function. And when you pass it an async function the result it gets is a promise. Not an array. The problem is that the finalArray (after the first iteration) is a promise that returns an array. And you're trying to destructure the promise on line 21. You should rewrite your code using loops instead of async/await like this
const fs = require('fs-extra');
const path = require('path');
const getFolderImagesRecursive = (folderPath) => {
const directoryChildren = fs.readdirSync(folderPath);
const finalArray = [];
directoryChildren.forEach(directoryChild => {
const fullPath = path.join(folderPath, directoryChild);
const pathStat = fs.lstatSync(fullPath);
if (pathStat.isDirectory()) {
const recursiveResult = getFolderImagesRecursive(fullPath);
finalArray.concat(recursiveResult);
} else if (fullPath.split('.').pop() === 'txt') {
finalArray.push(fullPath);
}
})
return finalArray;
}
}

Move all .txt files from one folder to another folder using Node js

I have tried with this code but it's not working it display error like that file not exists on that directory.
System take .txt as file not as extension of file.
const fs = require('fs');
var oldPath = '/abc/def/ghi/*.txt'
var newPath = '/xyz/cbi/'
fs.rename(oldPath, newPath, function (err) {
if (err) throw err
console.log('Successfully renamed - AKA moved!')
})
Try this one:
const shell = require('child_process').execSync ;
const src= `/abc/def/ghi`;
const dist= `/xyz/cbi`;
shell(`mv ${src}/* ${dist}`);
This will solve your problem Check here
const fs = require('fs-extra')
// With a callback:
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
if (err) return console.error(err)
console.log('success!')
})
Try this one
For One File:
const moveThem = async () => {
// Move file ./js/foo.js to ./ns/qux.js
const original = join(__dirname, 'js/foo.js');
const target = join(__dirname, 'ns/qux.js');
await mv(original, target);
}
For Many Files:
mv('source/dir', 'dest/a/b/c/dir', {mkdirp: true}, function(err) {
});
OR
var spawn = require('child_process').spawn,
mv = spawn('mv', ['/dir1/dir2/*','dir1/']);

Copy whole source folder rather than just the trigger file using Google Functions & Node JS 10

Need to tweak the following code so it copies the whole of sourceFolder instead of just the event file itself but not sure how to do this.
const {Storage} = require('#google-cloud/storage');
const {path} = require('path');
exports.copyRenders = (event, context) => {
const gcsEvent = event;
const sourcePathOnly = gcsEvent.name
const folderToWatch = ' ... folder on source bucket... '
// Process only if it's in the correct folder
if (sourcePathOnly.indexOf(folderToWatch) > -1) {
const storage = new Storage();
const sourceFileBucket = gcsEvent.bucket
const sourceFolder = sourcePathOnly.split('/').slice(-2)
const destFileBucket = 'trans-test'
storage
.bucket(sourceFileBucket)
.file(sourcePathOnly)
.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' +
sourceFolder[1]));
}
console.log(`Processing file: ${sourcePathOnly}`);
}
With new code from answer bellow:
const {Storage} = require('#google-cloud/storage');
const {path} = require('path');
exports.copyRenders = (event, context) => {
const gcsEvent = event;
const sourcePathOnly = gcsEvent.name
const folderToWatch = ' ... folder on source bucket... '
// Process only if it's in the correct folder
if (sourcePathOnly.indexOf(folderToWatch) > -1) {
const storage = new Storage();
const sourceFileBucket = gcsEvent.bucket
const sourceFolder = sourcePathOnly.split('/').slice(-2)
const destFileBucket = 'trans-test'
const options = {
// Get the source path without the file name)
prefix: sourcePathOnly.slice(0,sourcePathOnly.lastIndexOf("/")),
};
const [files] = storage.bucket(sourceFileBucket).getFiles(options);
files.forEach(file => {
file.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' + sourceFolder[1]));
});
}
console.log(`Processing file: ${sourcePathOnly}`);
}
You can call getFiles() to build a list of files in your directory, and then copy the files. Something like this (you may have to modify for your scenario):
const [ files ] = await storage.bucket(sourceFileBucket).getFiles({
autoPaginate: false,
prefix: sourceFolder
});
files.forEach(file => file.copy( ... ));
You can copy all the file of a directory prefix
const options = {
// Get the source path without the file name)
prefix: sourcePathOnly.slice(0,sourcePathOnly.lastIndexOf("/")),
};
const [files] = storage.bucket(sourceFileBucket).getFiles(options);
files.forEach(file => {
file.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' +
sourceFolder[1]));
});
Be careful, each time that your function will be trigger, the whole prefix matches will be copied. And if you copy in the same bucket, you can create an exponential infinite loop!
Note: mathematically, I'm not know if something infinite can be exponential!!

fs-extra copy file outputs blank file

When I run the following code a blank file gets created with the correct name. I clearly dont want a blank file.
I know the path is correct because when i make it purposely incorrect it fails (obviously)
const path = require('path');
const fse = require('fs-extra');
const OUTPUT_PATH = 'js/libs/';
const _NODE_MODULES = 'node_modules/';
const filePath = `${_NODE_MODULES}tooltipster/dist/js/tooltipster.bundle.min.js`;
fse.copy(path.join(__dirname, filePath), path.join(__dirname, `${OUTPUT_PATH}/something.js`), err => {
if (err) {
console.log(err);
process.exit(1)
}
console.log('Copy complete');
process.exit(0);
})
Output of this is
Copy Complete
But the file is blank as I previously stated. Any idea what I'm doing wrong here?
I've modified Your code and checked on my PC.
So result: http://joxi.ru/ZrJEEJh1KXw1Aj
Checkout this code:
const path = require('path');
const fs = require('fs-extra');
let sourceFile = path.join(__dirname, 'node_modules', 'tooltipster/dist/js/tooltipster.bundle.min.js');
let destinationFile = path.join(__dirname, 'js/libs', 'something.js');
fs.copy(sourceFile, destinationFile, err => {
if (err) {
return console.error(err);
}
console.log('Copy complete');
});
if it fail again so, be sure that there is no issue with code.
check Your filesystem maybe there is some open file limits, permission problems or no free space.
also I can guess that the source file is empty, so do:
cat node_modules/tooltipster/dist/js/tooltipster.bundle.min.js
Your call to process.exit interfered/aborted before it could finish. Don't need to call process.exit. It will exit when everything is done.

Node fs copy a folder

I am trying to copy a folder using Node fs module. I am familiar with readFileSync() and writeFileSync() methods but I am wondering what method I should use to copy a specified folder?
You can use fs-extra to copy contents of one folder to another like this
var fs = require("fs-extra");
fs.copy('/path/to/source', '/path/to/destination', function (err) {
if (err) return console.error(err)
console.log('success!')
});
There's also a synchronous version.
fs.copySync('/path/to/source', '/path/to/destination')
Save yourself the extra dependency with just 10 lines of native node functions
Add the following copyDir function:
const { promises: fs } = require("fs")
const path = require("path")
async function copyDir(src, dest) {
await fs.mkdir(dest, { recursive: true });
let entries = await fs.readdir(src, { withFileTypes: true });
for (let entry of entries) {
let srcPath = path.join(src, entry.name);
let destPath = path.join(dest, entry.name);
entry.isDirectory() ?
await copyDir(srcPath, destPath) :
await fs.copyFile(srcPath, destPath);
}
}
And then use like this:
copyDir("./inputFolder", "./outputFolder")
Further Reading
Copy folder recursively in node.js
fsPromises.copyFile (added in v10.11.0)
fsPromises.readdir (added in v10.0)
fsPromises.mkdir (added in v10.0)
You might want to check out the ncp package. It does exactly what you're trying to do; Recursively copy files from a path to another.
Here's something to get your started :
const fs = require("fs");
const path = require("path");
const ncp = require("ncp").ncp;
// No limit, because why not?
ncp.limit = 0;
var thePath = "./";
var folder = "testFolder";
var newFolder = "newTestFolder";
ncp(path.join(thePath, folder), path.join(thePath, newFolder), function (err) {
if (err) {
return console.error(err);
}
console.log("Done !");
});
I liked KyleMit's answer, but thought a parallel version would be preferable.
The code is in TypeScript. If you need JavaScript, just delete the : string type annotations on the line of the declaration of copyDirectory.
import { promises as fs } from "fs"
import path from "path"
export const copyDirectory = async (src: string, dest: string) => {
const [entries] = await Promise.all([
fs.readdir(src, { withFileTypes: true }),
fs.mkdir(dest, { recursive: true }),
])
await Promise.all(
entries.map((entry) => {
const srcPath = path.join(src, entry.name)
const destPath = path.join(dest, entry.name)
return entry.isDirectory()
? copyDirectory(srcPath, destPath)
: fs.copyFile(srcPath, destPath)
})
)
}
Here's the synchronous version of #KyleMit answer
copyDirectory(source, destination) {
fs.mkdirSync(destination, { recursive: true });
fs.readdirSync(source, { withFileTypes: true }).forEach((entry) => {
let sourcePath = path.join(source, entry.name);
let destinationPath = path.join(destination, entry.name);
entry.isDirectory()
? copyDirectory(sourcePath, destinationPath)
: fs.copyFileSync(sourcePath, destinationPath);
});
}
There is an elegant syntax. You can use the pwd-fs module.
const FileSystem = require('pwd-fs');
const pfs = new FileSystem();
async () => {
await pfs.copy('./path', './dest');
}

Resources