How to convert multiple files as zip in nodejs? - node.js

Here my code:
So someone help how to dowload the files as a zip folder
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// Here multiple files are contained in the data array
//So I need to download the files into a zip folder
}
}
})
};

You can do this using ADM-ZIP
const zip=require('adm-zip');
var zipper = new zip();
zipper.addLocalFile('1.csv');
zipper.addLocalFile('2.csv');
zipper.addLocalFile('3.csv');
zipper.writeZip("123.zip");

Here's a small example of adm-zip on how to add files directly, from local directory and buffer:
// creating archives
var zip = new AdmZip();
// add file directly
zip.addFile("test.txt", new Buffer("inner content of the file"), "entry comment goes here");
// add local file
zip.addLocalFile("/home/me/some_picture.png");
// get everything as a buffer
var willSendthis = zip.toBuffer();
// or write everything to disk
zip.writeZip(/*target file name*/"/home/me/files.zip");
In your case you can add files in a for loop iterating through the array and adding a file in each recursion.
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// for loop goes here:
for(var i =0; i<data.length; i++){
// add the files to zip
}
}
}
})
};

var AdmZip = require('adm-zip');
var zip = new AdmZip();
var fs=require('fs-extra');
let arr = ['AKTU.pdf', 'test1.pdf']
for(let i = 0 ;i<arr.length ;i++){
zip.addLocalFile(`./createZip/${arr[i]}`); //local path
}
zip.writeZip("./files.zip");
You can use this code to add multiple files in a zip.

Related

How to delete files with NodeJS? [duplicate]

This question already has answers here:
node.js remove file
(21 answers)
Closed last year.
I have two async functions:
One for creating objects in PostgreSQL and upload files with named in this object.
Another one for deleting this entity and deleting files from folder.
I don't know how to extract filenames from PostgreSQL entity and delete certain files in my 'static' folder
Entity in PostgreSQL looks like:
Car:
{
'name': 'Nissan 350Z',
'description': 'new',
'image': '123wejdsefberfkj.jpg',
'video': '23rusdjf8ioysdfs.mp4'
}
Create function:
Here I get files from form-data, create unique name for files and save it in PostgreSQL, then I save files in "static" folder.
let videoName = uuid.v4() + '.mp4';
let imageName = uuid.v4() + '.jpg';
let {name,description} = req.body;
const {video, image} = req.files;
const car = await Car.create( {name, description, video: videoName,image: imageName})
.then(video.mv(path.resolve(__dirname,'..', 'static', videoName)))
.then(image.mv(path.resolve(__dirname,'..', 'static', imgName)))
Delete function:
Here I need to extract file-names from database and delete them from folder
a bit pseudocode:
async delete(req, res) {
try {
const {id} = req.params;
await Car.findOne({where:{id}})
.then( async data => {
if(data) {
await let videoName = Car.#extract_video_name# ({where: {id}})
.then(mv(path.delete(__dirname,'..','static',videoName)))
await Car.destroy({where:{id}}).then(() => {
return res.json("Car deleted");
})
} else {
return res.json("This Car doesn't exist in database");
}
})
} catch (e) {
console.error(e)
}
}
You can use fs, require it using const fs = require('fs');
You have no npm package to install, it's included in node.js.
This is the code that allows you to save your files into a directory by creating the folder if it doesn't exist and upload the file into it
let path = String(`./directoryName/${fileName}`);
fs.mkdirSync('./directoryName', { recursive: true });
fs.writeFileSync(path, data);
And you can delete the file using
fs.unlink(path, (err) => {
if (err) throw err //handle your error the way you want to;
console.log('path/file.txt was deleted');//or else the file will be deleted
});
);
Refernce : https://nodejs.org/api/fs.html

using archiver module with downloadable online links

In a node application, I wish to download a zip file that contains pdfs downloaded from various urls on the internet (where if I type the url into a browser, it just directs me to download a pdf). I've been using the archiver module which is documented on github at https://github.com/archiverjs/node-archiver, and the official documentation is at https://www.archiverjs.com/.
I'm stuck at the part where it gives the following examples for adding files to the zip file.
// append a file from stream
var file1 = __dirname + '/file1.txt';
archive.append(fs.createReadStream(file1), { name: 'file1.txt' });
// append a file from string
archive.append('string cheese!', { name: 'file2.txt' });
// append a file from buffer
var buffer3 = Buffer.from('buff it!');
archive.append(buffer3, { name: 'file3.txt' });
// append a file
archive.file('file1.txt', { name: 'file4.txt' });
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('subdir/', false);
// append files from a glob pattern
archive.glob('subdir/*.txt');
Unfortunately, it seems just pasting urls into the first parameter of .append or .directory doesn't work - would anyone know how I can add downloadable files (that are online) into the zip file?
sure, using download-pdf first something like that
var download = require('download-pdf')
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
var pdf = "http://www.consejoconsultivoemt.cl/wp-content/uploads/2018/12 /Presentaci%C3%B3n-Lineamientos-Estrat%C3%A9gicos-de-Corfo.pdf"
var pdf2 = "https://www.biobiochile.cl/static/tarifas.pdf"
var options = {
directory: "./files/",
filename: "first.pdf"
}
var options2 = {
directory: "./files/",
filename: "second.pdf"
}
download(pdf, options, function (err) {
if (err) throw err
console.log("meow")
})
download(pdf2, options2, function (err) {
if (err) throw err
console.log("meow2")
})
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('./files/first.pdf', { name: 'first.pdf' });
archive.file('./files/second.pdf', { name: 'second.pdf' });
archive.finalize();

Gulp: Abnormal behavior of program

I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});

Problems when I upload images and show them in angular and node using the GraphicsMagick

I use this code to upload images in Node:
req.file('image[]')
.upload({
maxBytes: 5000000, // Files limit(in bytes)
dirname: path.resolve(sails.config.appPath, 'assets/images/user') // Path to copy the files
}, function whenDone(err, uploadedFiles) {
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < uploadedFiles.length; i++){
image_real_name = 'images/user/'+path.basename(uploadedFiles[i].fd);
json.push(image_real_name);
}
res(json);
});
but I needed to compress the images to gain space on the server so I used the gm GraphicsMagick:
`
var receiver = new Writable({objectMode: true});
receiver._write = function(file, enc, cb) {
// The output stream to pipe to
var output = require('fs').createWriteStream('assets/images/user/' + file.fd);
gm(file).resize('200', '200').stream().pipe(output);
cb();
};
req.file('image[]').upload(receiver, function(err, files){
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < files.length; i++){
image_real_name = 'images/user/'+path.basename(files[i].fd);
json.push(image_real_name);
}
res(json);
});
`
I am using angular and when I change the direction of the image by binding the new image should be shown but it is not loaded, with the first code all perfect but using gm does not work anymore. I have to restart the page so that the image is displayed.
track:
apparently the image has not been fully processed, if a setTime is entered, the image is displayed correctly:
setTimeout((dataService: DataService, image: any) => {
dataService.getUser().image = image;
}, 1000, this.data, dataI.json().url);
any idea?
There is a finish event when the file is fully loaded, I solved the problem by adding the following:
output.on("finish", function() { return cb(); });

issue with extracting tar.gz files using tar.gz npm module

I have used this package to recursively untar a given tar.gz file and move them to a destination . Program given below
var targz = require("tar.gz");
var fs = require("fs");
var s = "/home/user/test.tar.gz";
var d = "/home/user/destination";
var parse;
var readStream;
function extractTar(source, destination) {
targz().extract(source, destination).then(function() {
console.log("extracted " + source);
readStream = fs.createReadStream(source);
parse = targz().createParseStream();
readStream.pipe(parse);
parse.on('entry', function(entry) {
var stringpath = destination+entry.path;
if ((entry.path.length > 6) && entry.path.substr(entry.path.length - 6) === "tar.gz") {
console.log(stringpath, destination);
extractTar(stringpath, destination);
}
else {
var filenameStartsAt = stringpath.lastIndexOf("/");
filename = stringpath.substr(filenameStartsAt+1);
if (filename.length) {
console.log("===="+filename);
fs.rename(stringpath, destination+filename, function(err) {
if (err) {
console.log(err);
}
});
}
}
});
});
}
extractTar(s, d);
This works fine if there is already a folder named "destination" is in my directory. If destination folder is not present "destination" folder is created but that folder does not contains all extracted files in it. Some times there is not even one file in it. Can any one tell me what am i doing wrong here on any better way to do this task.

Resources