using archiver module with downloadable online links - node.js

In a node application, I wish to download a zip file that contains pdfs downloaded from various urls on the internet (where if I type the url into a browser, it just directs me to download a pdf). I've been using the archiver module which is documented on github at https://github.com/archiverjs/node-archiver, and the official documentation is at https://www.archiverjs.com/.
I'm stuck at the part where it gives the following examples for adding files to the zip file.
// append a file from stream
var file1 = __dirname + '/file1.txt';
archive.append(fs.createReadStream(file1), { name: 'file1.txt' });
// append a file from string
archive.append('string cheese!', { name: 'file2.txt' });
// append a file from buffer
var buffer3 = Buffer.from('buff it!');
archive.append(buffer3, { name: 'file3.txt' });
// append a file
archive.file('file1.txt', { name: 'file4.txt' });
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('subdir/', false);
// append files from a glob pattern
archive.glob('subdir/*.txt');
Unfortunately, it seems just pasting urls into the first parameter of .append or .directory doesn't work - would anyone know how I can add downloadable files (that are online) into the zip file?

sure, using download-pdf first something like that
var download = require('download-pdf')
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
var pdf = "http://www.consejoconsultivoemt.cl/wp-content/uploads/2018/12 /Presentaci%C3%B3n-Lineamientos-Estrat%C3%A9gicos-de-Corfo.pdf"
var pdf2 = "https://www.biobiochile.cl/static/tarifas.pdf"
var options = {
directory: "./files/",
filename: "first.pdf"
}
var options2 = {
directory: "./files/",
filename: "second.pdf"
}
download(pdf, options, function (err) {
if (err) throw err
console.log("meow")
})
download(pdf2, options2, function (err) {
if (err) throw err
console.log("meow2")
})
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('./files/first.pdf', { name: 'first.pdf' });
archive.file('./files/second.pdf', { name: 'second.pdf' });
archive.finalize();

Related

How to convert multiple files as zip in nodejs?

Here my code:
So someone help how to dowload the files as a zip folder
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// Here multiple files are contained in the data array
//So I need to download the files into a zip folder
}
}
})
};
You can do this using ADM-ZIP
const zip=require('adm-zip');
var zipper = new zip();
zipper.addLocalFile('1.csv');
zipper.addLocalFile('2.csv');
zipper.addLocalFile('3.csv');
zipper.writeZip("123.zip");
Here's a small example of adm-zip on how to add files directly, from local directory and buffer:
// creating archives
var zip = new AdmZip();
// add file directly
zip.addFile("test.txt", new Buffer("inner content of the file"), "entry comment goes here");
// add local file
zip.addLocalFile("/home/me/some_picture.png");
// get everything as a buffer
var willSendthis = zip.toBuffer();
// or write everything to disk
zip.writeZip(/*target file name*/"/home/me/files.zip");
In your case you can add files in a for loop iterating through the array and adding a file in each recursion.
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// for loop goes here:
for(var i =0; i<data.length; i++){
// add the files to zip
}
}
}
})
};
var AdmZip = require('adm-zip');
var zip = new AdmZip();
var fs=require('fs-extra');
let arr = ['AKTU.pdf', 'test1.pdf']
for(let i = 0 ;i<arr.length ;i++){
zip.addLocalFile(`./createZip/${arr[i]}`); //local path
}
zip.writeZip("./files.zip");
You can use this code to add multiple files in a zip.

Gulp: Abnormal behavior of program

I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});

How do I unzip a .zip/.rar file in Node.js into a folder

I am using zlib along with fstream now for zipping and sending to the client, Now I need to unzip an archive(which may contains sub folders) into a folder maintaining the folder structure. How do I do that?
There are plenty of node modules that can do this for you. One of them is node-unzip. You can extract a .zip file to a directory as simple as this.
fs.createReadStream('path/to/archive.zip').pipe(unzip.Extract({ path: 'output/path' }));
Further reading: https://github.com/EvanOxfeld/node-unzip
Rar is a closed-source software. The only way you can do it -- install command-line rar (rar.exe or linux version of rar, which is available on most platforms) and call it by means of this:
var exec = require('child_process').exec;
exec("rar.exe x file.rar", function (error) {
if (error) {
// error code here
} else {
// success code here
}
});
you can use this amazing module http://node-machine.org/machinepack-zip
for uncompress a zip file with directory structure inside zip
var Zip = require('machinepack-zip');
// Unzip the specified .zip file and write the decompressed files/directories as contents of the specified destination directory.
Zip.unzip({
source: '/Users/mikermcneil/stuff.zip',
destination: '/Users/mikermcneil/my-stuff',
}).exec(callbackSuccess, callbackFail );
for download remote file and unzip you can use this code:
var fs = require('fs');
var unzip = require("unzip2");
var tar = require('tar');
var zlib = require('zlib');
var path = require('path');
var mkdirp = require('mkdirp'); // used to create directory tree
var request = require("request");
var http = require('http');
var zip = require("machinepack-zip");
for (var i = 0; i < _diff.length; i++) {
request(constants.base_patch +"example.zip")
request = http.get({ host: 'localhost',
path: '/update/patchs/' + "example.zip",
port: 80,
headers: { 'accept-encoding': 'gzip,deflate' } });
request.on('response', (response) => {
var output = fs.createWriteStream(__dirname + "/tmp/" +"example.zip");
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(unzip.Extract({ path: __dirname }));
break;
case 'deflate':
response.pipe(zlib.createInflate()).pipe(unzip.Extract({ path: __dirname }));
break;
default:
response.pipe(output);
break;
}
})
request.on('close', function(){
zip.unzip({
source: __dirname + "/tmp/" + "example.zip",
destination: __dirname,
}).exec({
error: function (err){
alert("error")
},
success: function (){
//delete temp folder content after finish uncompress
},
});
})
}
note : remove unnecesary modules.
Use node js decompress-zip, first install it with npm:
npm install decompress-zip --save
Then you have to require it:
const DecompressZip = require('decompress-zip');
Finally you can use it in the following way:
let unzipper = new DecompressZip( absolutePathFileZip );
The directory to be extracted must be specified:
unzipper.extract({
path: pathToExtract
});
Additional you can use the following for better control:
Handle Error:
unzipper.on('error', function (err) {
console.log('event error')
});
Notify when everything is extracted
unzipper.on('extract', function (log) {
console.log('log es', log);
});
Notify "progress" of the decompressed files:
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
If anyone looking for async-await way syntax:
const request = require('request');
const unzip = require('unzip');
await new Promise(resolve =>
request('url')
.pipe(fs.createWriteStream('path/zipfilename'))
.on('finish', () => {
resolve();
}));
await new Promise(resolve =>
fs.createReadStream('path/filename')
.pipe(unzip.Extract({ path: 'path/extractDir }))
.on('close', ()=>{
resolve()
}));

how to convert multiple files to compressed zip file using node js

I want to convert multiple files to a compressed zip file on node.js.
I tried the following code:
var archiver = require('archiver');
var fs = require('fs');
var StringStream = require('string-stream');
http.createServer(function(request, response) {
var dl = archiver('data');
dl.pipe(response);
dl.append(new fs.createReadStream('test/fixtures/test.txt'), {
name: 'stream.txt', date: testDate2
});
dl.append(new StringStream("Ooh dynamic stuff!"), {
name : 'YoDog/dynamic.txt'
});
dl.finalize(function(err) {
if (err)
res.send(200000)
});
}).listen(3500);
There is a much simpler solution with the archiver module:
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('/path/to/file0.txt', {name: 'file0-or-change-this-whatever.txt'});
archive.file('/path/to/README.md', {name: 'foobar.md'});
// wait for streams to complete
archive.finalize();
It also supports tar archives, just replace 'zip' by 'tar' at line 4.
I get no credit for this code, it's just part of the README (you should check it out for other means of adding stuff into the archive).
Neat package, and it's probably the only one that's still being maintained and documented properly.
For zipping up multiple files, you can use this utility method I wrote with the archiver module:-
var zipLogs = function(working_directory) {
var fs = require('fs');
var path = require('path');
var output = fs.createWriteStream(path.join(working_directory, 'logs.zip'));
var archiver = require('archiver');
var zipArchive = archiver('zip');
zipArchive.pipe(output);
zipArchive.bulk([{src: [path.join(working_directory, '*.log')], expand: true}]);
zipArchive.finalize(function(err, bytes) {
if (err)
throw err;
console.log('done:', base, bytes);
});
}
This for example, zips up all the log files in a particular directory.
To Compress the text file using node js
var fs=require('fs');
var Zlib=require('zlib');
fs.createReadStream('input.txt').pipe(Zlib.createGzip()).pipe(fs.createWriteStream('input.txt.gz'));

Need to ZIP an entire directory using Node.js

I need to zip an entire directory using Node.js. I'm currently using node-zip and each time the process runs it generates an invalid ZIP file (as you can see from this Github issue).
Is there another, better, Node.js option that will allow me to ZIP up a directory?
EDIT: I ended up using archiver
writeZip = function(dir,name) {
var zip = new JSZip(),
code = zip.folder(dir),
output = zip.generate(),
filename = ['jsd-',name,'.zip'].join('');
fs.writeFileSync(baseDir + filename, output);
console.log('creating ' + filename);
};
sample value for parameters:
dir = /tmp/jsd-<randomstring>/
name = <randomstring>
UPDATE: For those asking about the implementation I used, here's a link to my downloader:
I ended up using archiver lib. Works great.
Example
var file_system = require('fs');
var archiver = require('archiver');
var output = file_system.createWriteStream('target.zip');
var archive = archiver('zip');
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err){
throw err;
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source_dir, false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
archive.finalize();
I'm not going to show something new, just wanted to summarise the solutions above for those who like Promises as much as I do 😉.
const archiver = require('archiver');
/**
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
Hope it will help someone 🤞
Use Node's native child_process api to accomplish this.
No need for third party libs. Two lines of code.
const child_process = require("child_process");
child_process.execSync(`zip -r <DESIRED_NAME_OF_ZIP_FILE_HERE> *`, {
cwd: <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>
});
The example above showcases the synchronous API. You can also use child_process.exec(path, options, callback) if you want async behavior. There are a lot more options you can specify other than cwd to further fine-tune your request.
If you don't have the ZIP utility:
This question is specifically asks about the zip utility for archiving/compression purposes. Therefore, this example assumes you have the zip utility installed on your system. For completeness sakes, some operating systems may not have utility installed by default. In that case you have at least three options:
Work with the archiving/compression utility that is native to your platform
Replace the shell command in the above Node.js code with code from your system. For example, linux distros usually come with tar/gzip utilities:
tar -cfz <DESIRED_NAME_OF_ZIP_FILE_HERE> <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>.
This is a nice option as you don't need to install anything new onto your operating system or manage another dependency (kind of the whole point for this answer).
Obtain the zip binary for your OS/distribution.
For example on Ubuntu: apt install zip.
The ZIP utility is tried and tested for decades, it's fairly ubiquitous and it's a safe choice. Do a quick google search or go to the creator, Info-ZIP's, website for downloadable binaries.
Use a third party library/module (of which there are plenty on NPM).
I don't prefer this option. However, if you don't really care to understand the native methods and introducing a new dependency is a non-issue, this is also a valid option.
This is another library which zips the folder in one line :
zip-local
var zipper = require('zip-local');
zipper.sync.zip("./hello/world/").compress().save("pack.zip");
Archive.bulk is now deprecated, the new method to be used for this is glob:
var fileName = 'zipOutput.zip'
var fileOutput = fs.createWriteStream(fileName);
fileOutput.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.pipe(fileOutput);
archive.glob("../dist/**/*"); //some glob pattern here
archive.glob("../dist/.htaccess"); //another glob pattern
// add as many as you like
archive.on('error', function(err){
throw err;
});
archive.finalize();
To include all files and directories:
archive.bulk([
{
expand: true,
cwd: "temp/freewheel-bvi-120",
src: ["**/*"],
dot: true
}
]);
It uses node-glob(https://github.com/isaacs/node-glob) underneath, so any matching expression compatible with that will work.
To pipe the result to the response object (scenarios where there is a need to download the zip rather than store locally)
archive.pipe(res);
Sam's hints for accessing the content of the directory worked for me.
src: ["**/*"]
I have found this small library that encapsulates what you need.
npm install zip-a-folder
const zip-a-folder = require('zip-a-folder');
await zip-a-folder.zip('/path/to/the/folder', '/path/to/archive.zip');
https://www.npmjs.com/package/zip-a-folder
Adm-zip has problems just compressing an existing archive https://github.com/cthackers/adm-zip/issues/64 as well as corruption with compressing binary files.
I've also ran into compression corruption issues with node-zip https://github.com/daraosn/node-zip/issues/4
node-archiver is the only one that seems to work well to compress but it doesn't have any uncompress functionality.
Since archiver is not compatible with the new version of webpack for a long time, I recommend using zip-lib.
var zl = require("zip-lib");
zl.archiveFolder("path/to/folder", "path/to/target.zip").then(function () {
console.log("done");
}, function (err) {
console.log(err);
});
As today, I'm using AdmZip and works great:
import AdmZip = require('adm-zip');
export async function archiveFile() {
try {
const zip = new AdmZip();
const outputDir = "/output_file_dir.zip";
zip.addLocalFolder("./yourFolder")
zip.writeZip(outputDir);
} catch (e) {
console.log(`Something went wrong ${e}`);
}
}
import ... from answer based on https://stackoverflow.com/a/51518100
To zip single directory
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectory;
/**
* From: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
To zip multiple directories:
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectories;
/**
* Adapted from: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectories(sourceDirs, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
var result = archive;
sourceDirs.forEach(sourceDir => {
result = result.directory(sourceDir, false);
});
result
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
You can try in a simple way:
Install zip-dir :
npm install zip-dir
and use it
var zipdir = require('zip-dir');
let foldername = src_path.split('/').pop()
zipdir(<<src_path>>, { saveTo: 'demo.zip' }, function (err, buffer) {
});
I ended up wrapping archiver to emulate JSZip, as refactoring through my project woult take too much effort. I understand Archiver might not be the best choice, but here you go.
// USAGE:
const zip=JSZipStream.to(myFileLocation)
.onDone(()=>{})
.onError(()=>{});
zip.file('something.txt','My content');
zip.folder('myfolder').file('something-inFolder.txt','My content');
zip.finalize();
// NodeJS file content:
var fs = require('fs');
var path = require('path');
var archiver = require('archiver');
function zipper(archive, settings) {
return {
output: null,
streamToFile(dir) {
const output = fs.createWriteStream(dir);
this.output = output;
archive.pipe(output);
return this;
},
file(location, content) {
if (settings.location) {
location = path.join(settings.location, location);
}
archive.append(content, { name: location });
return this;
},
folder(location) {
if (settings.location) {
location = path.join(settings.location, location);
}
return zipper(archive, { location: location });
},
finalize() {
archive.finalize();
return this;
},
onDone(method) {
this.output.on('close', method);
return this;
},
onError(method) {
this.output.on('error', method);
return this;
}
};
}
exports.JSzipStream = {
to(destination) {
console.log('stream to',destination)
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
return zipper(archive, {}).streamToFile(destination);
}
};

Resources