I have used this package to recursively untar a given tar.gz file and move them to a destination . Program given below
var targz = require("tar.gz");
var fs = require("fs");
var s = "/home/user/test.tar.gz";
var d = "/home/user/destination";
var parse;
var readStream;
function extractTar(source, destination) {
targz().extract(source, destination).then(function() {
console.log("extracted " + source);
readStream = fs.createReadStream(source);
parse = targz().createParseStream();
readStream.pipe(parse);
parse.on('entry', function(entry) {
var stringpath = destination+entry.path;
if ((entry.path.length > 6) && entry.path.substr(entry.path.length - 6) === "tar.gz") {
console.log(stringpath, destination);
extractTar(stringpath, destination);
}
else {
var filenameStartsAt = stringpath.lastIndexOf("/");
filename = stringpath.substr(filenameStartsAt+1);
if (filename.length) {
console.log("===="+filename);
fs.rename(stringpath, destination+filename, function(err) {
if (err) {
console.log(err);
}
});
}
}
});
});
}
extractTar(s, d);
This works fine if there is already a folder named "destination" is in my directory. If destination folder is not present "destination" folder is created but that folder does not contains all extracted files in it. Some times there is not even one file in it. Can any one tell me what am i doing wrong here on any better way to do this task.
Related
I am trying to zip a single file using the Archiver npm package located: https://www.npmjs.com/package/archiver
I have been able to use the following to zip a directory:
archive.directory(folderName, false);
But when I try to use either of these nothing seems to happen (ie: no zip is generated, file never finishes zipping):
archive.file(folderName, { name: 'file4.txt' });
archive.file(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
Has anyone run into this issue before? Please let me know what I am doing wrong. Thank you in advance!
edit:
module.exports = async function zipper(user, pass, orgid, s4url, apiToken, newOrgName, file) {
const s4 = require('../testcli/s4');
const fs = require('fs');
const archiver = require('archiver');
const path = require('path');
var parentDirect;
if(file == "./"){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
const newZipFile = parentDirect + '/s4.zip';
var folderName = file;
//Checks for existence of infinite loop
if(path.resolve(parentDirect).length > path.resolve(folderName).length){
console.log(folderName.search(parentDirect));
console.error('\x1b[36m%s\x1b[0m', 'ERROR!!!! : Please adjust where your console is pointed, this will result in an infinite loop. Exiting.');
return;
}
var P = ['\\', '|', '/', '-'];
var x = 0;
var output = fs.createWriteStream(newZipFile);
var archive = archiver('zip');
scansdisplayinterval = setInterval(function () {
twrl();
}, 250);
// listen for all archive data to be written
output.on('close', function () {
console.log('\x1b[36m%s\x1b[0m', archive.pointer() + ' total bytes');
console.log('\x1b[36m%s\x1b[0m', 'archiver has been finalized and the output file descriptor has closed.');
try {
process.stdout.write(newZipFile);
clearInterval(scansdisplayinterval);
s4(user, pass, newZipFile, orgid, s4url, apiToken, newOrgName);
} catch (e) {
console.log(e);
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
throw err;
});
// This event is fired when the data source is drained no matter what was the data source.
output.on('end', function() {
console.log('\x1b[36m%s\x1b[0m', 'Data has been drained');
});
// pipe archive data to the file
archive.pipe(output);
//Checks -f for file extension
let singleFileCheck = path.extname(file);
//If file has extension
if(singleFileCheck.length <= 4 && singleFileCheck != ''){
//Append single file
console.log('singleFile', path.resolve(file));
archive.file(path.resolve(file), { name: 'file4.txt' });
// archive.append(fs.createReadStream(path.resolve(file)), {name: 'File' + singleFileCheck});
//Else = folder
}else{
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(folderName, false);
}
// archive.directory(folderName, false);
console.log('\x1b[36m%s\x1b[0m', "Zipping: " + folderName + " To: " + newZipFile);
console.log('\x1b[36m%s\x1b[0m', "Zipping To: " + path.resolve(newZipFile));
archive.finalize();
function twrl() {
process.stdout.write('\rZipping Folder ... ' + P[x++]);
x &= 3;
}
return(newZipFile);
};
The issue came from how I was defining the parentDirect var.
Solution:
let singleFileCheck = path.extname(file);
if(file == "./" || singleFileCheck.length <= 4 && singleFileCheck != ''){
parentDirect = "..";
}else{
parentDirect = path.basename(path.dirname(file));
}
I'm looking for a way to copy a folder's content to another folder or even replace the folder if it exists with the old one but preserve its name.
Thanks for helping.
First install fs-extra module in your project by doing npm install fs-extra then follow the steps below:
import the following
var fs = require('fs');
var fs_Extra = require('fs-extra');
var path = require('path');
// Here you declare your path
var sourceDir = path.join(__dirname, "../working");
var destinationDir = path.join(__dirname, "../worked")
// if folder doesn't exists create it
if (!fs.existsSync(destinationDir)){
fs.mkdirSync(destinationDir, { recursive: true });
}
// copy folder content
fs_Extra.copy(sourceDir, destinationDir, function(error) {
if (error) {
throw error;
} else {
console.log("success!");
}
});
NB: source and destination folder name should not be the same.
First check if the destination path exists if not create it,
then you could use fs-extra for the copying of files/subdirectories.
var fs = require('fs');
var fse = require('fs-extra');
var sourceDir = '/tmp/mydir';
var destDir = '/tmp/mynewdir';
// if folder doesn't exists create it
if (!fs.existsSync(destDir)){
fs.mkdirSync(destDir, { recursive: true });
}
//copy directory content including subfolders
fse.copy(sourceDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
Here my code:
So someone help how to dowload the files as a zip folder
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// Here multiple files are contained in the data array
//So I need to download the files into a zip folder
}
}
})
};
You can do this using ADM-ZIP
const zip=require('adm-zip');
var zipper = new zip();
zipper.addLocalFile('1.csv');
zipper.addLocalFile('2.csv');
zipper.addLocalFile('3.csv');
zipper.writeZip("123.zip");
Here's a small example of adm-zip on how to add files directly, from local directory and buffer:
// creating archives
var zip = new AdmZip();
// add file directly
zip.addFile("test.txt", new Buffer("inner content of the file"), "entry comment goes here");
// add local file
zip.addLocalFile("/home/me/some_picture.png");
// get everything as a buffer
var willSendthis = zip.toBuffer();
// or write everything to disk
zip.writeZip(/*target file name*/"/home/me/files.zip");
In your case you can add files in a for loop iterating through the array and adding a file in each recursion.
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// for loop goes here:
for(var i =0; i<data.length; i++){
// add the files to zip
}
}
}
})
};
var AdmZip = require('adm-zip');
var zip = new AdmZip();
var fs=require('fs-extra');
let arr = ['AKTU.pdf', 'test1.pdf']
for(let i = 0 ;i<arr.length ;i++){
zip.addLocalFile(`./createZip/${arr[i]}`); //local path
}
zip.writeZip("./files.zip");
You can use this code to add multiple files in a zip.
I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});
I am using copy method of fs-extra to copy files from a source to destination. My use case is to create a copy of the file with a name like if a file of the same name exists in the destination. The copy method of fs-extra module overwrites the destination file.
You could try something like this:
const fs = require('fs-extra');
async function copy(src, dest) {
try {
await fs.copy(src, dest, { overwrite: false, errorOnExist: true });
return true;
} catch (error) {
if (error.message.includes('already exists')) {
return false;
}
throw error;
}
}
async function copyWithoutOverwrite(src, dest, maxAttempts) {
try {
if (!await copy(src, dest)); {
for (let i = 1; i <= maxAttempts; i++) {
if (await copy(src, `${dest}_copy${i}`)) {
return;
}
}
}
} catch (error) {
console.error(error);
}
}
const src = '/tmp/testfile';
const dest = '/tmp/mynewfile';
const maxAttempts = 10;
copyWithoutOverwrite(src, dest, maxAttempts);
So basically you need to implement custom copy procedure, that you can interrupt and alter at any moment. fs-jetpack does this very well.
const pathUtil = require("path");
const jetpack = require("fs-jetpack");
const src = jetpack.cwd("path/to/source/folder");
const dst = jetpack.cwd("path/to/destination/folder");
const findUnusedPath = path => {
const pathDir = pathUtil.dirname(path);
const extension = pathUtil.extname(path);
const fileName = pathUtil.basename(path, extension);
const pathCandidate = pathUtil.join(pathDir, `${fileName}-duplicate${extension}`);
if (dst.exists(pathCandidate)) {
// Will just add "-duplicate-duplicate-duplicate..." to name
// of the file until finds unused name.
return findUnusedPath(pathCandidate);
} else {
return pathCandidate;
}
};
// Iterate over all files in source directory and manually decide
// where to put them in destination directory.
src.find().forEach(filePath => {
const content = src.read(filePath, "buffer");
let destPath = filePath;
if (dst.exists(destPath)) {
destPath = findUnusedPath(destPath);
}
dst.write(destPath, content);
});
Here using file read/write instead of copy, because it's faster in this particular case. Copy needs to check first what the path is: directory or file. If we already know it is a file there is no need for this extra work.