How do I password protect a zip file in Nodejs? - node.js

I am creating a zip file using archiver. below is my code to do it. I need to password protect it. How can I do it?
var head={'Content-Type':'application/octet-stream','Content-disposition':'attachment; filename='+zipName,'Transfer-Encoding':'chunked' }
res.writeHead(200,head);
var archive = archiver('zip');
archive.pipe(res);
archive.append(result, { name: attachment.aliasFileName });
archive.finalize();
return res.send("thanks");

If you are working in linux you can do some thing like this
//create a zip
spawn = require('child_process').spawn;
zip = spawn('zip',['-P', 'password' , 'archive.zip', 'complete path to archive file']);
zip .on('exit', function(code) {
...// Do something with zipfile archive.zip
...// which will be in same location as file/folder given
});
Refer https://nodejs.org/api/child_process.html

Related

Unzip a MacOS .app file in Electron using Node.js

I am trying to unzip a file called Restart.Manager.zip which contains a single item, Restart Manager.app. This code seems to unzip the file correctly but upon launching the outputted .app folder, I get an error "The application “Restart Manager” can’t be opened."
const JSZip = require('jszip');
const fs = require('fs');
const jetpack = require('fs-jetpack');
const originalFs = require('original-fs');
async function extractZip(filePath, destination) {
fs.readFile(filePath, function(err, data) {
if (!err) {
var zip = new JSZip();
zip.loadAsync(data).then(function(contents) {
Object.keys(contents.files).forEach(function(filename) {
const file = zip.file(filename);
if (file) {
file.async('nodebuffer').then(function(content) {
var dest = destination + '/' + filename;
if (filename.endsWith('.asar')) {
originalFs.writeFileSync(dest, content)
} else {
jetpack.write(dest, content);
}
});
}
});
});
}
});
};
extractZip('/Users/me/Desktop/Restart.Manager.zip', '/Users/me/Desktop')
Manually unzipping the .zip file creates a working .app so I'm not sure where the code is messing up.
Here is the file on GitHub releases for testing: https://github.com/itw-creative-works/restart-manager-download-server/releases/download/installer/Restart.Manager.zip but feel free to use your own zipped .app file (although it should probably be an Electron app in which case you can find one here https://www.electronjs.org/apps)
I have tried zipping things like a .png and it unzips fine, which makes me think it is having problems with .app files or possibly the fact that the .app contains a .asar file which Electron supposedly has problems handling when it comes to the fs module: https://github.com/electron/electron/issues/1658

How can you archive with tar in NodeJS while only storing the subdirectory you want?

Basically I want to do the equivalent of this How to strip path while archiving with TAR but with the tar commands imported to NodeJS, so currently I'm doing this:
const gzip = zlib.createGzip();
const pack = new tar.Pack(prefix="");
const source = Readable.from('public/images/');
const destination = fs.createWriteStream('public/archive.tar.gz');
pipeline(source, pack, gzip, destination, (err) => {
if (err) {
console.error('An error occurred:', err);
process.exitCode = 1;
}
});
But doing so leaves me with files like: "/public/images/a.png" and "public/images/b.png", when what I want is files like "/a.png" and "/b.png". I want to know how I can add to this process to strip out the unneeded directories, while keeping the files where they are.
You need to change working directory:
// cwd The current working directory for creating the archive. Defaults to process.cwd().
new tar.Pack({ cwd: "./public/images" });
const source = Readable.from('');
Source: documentation of node-tar
Example: https://github.com/npm/node-tar/blob/main/test/pack.js#L93

how to create a zip file in node given multiple downloadable links

I have a node application that contains several downloadable links (when you click on the link a pdf file is downloaded), and these links are dynamically created/populated. I want to implement a feature where we can somehow download all files from these links in one go. I presume for this I will somehow need to create a zip file from all these links - would anyone know how to go about this?
you could use the fs and archiver module:
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('/path/to/file0.txt', {name: 'file0-or-change-this-whatever.txt'});
archive.file('/path/to/README.md', {name: 'foobar.md'});
//
archive.finalize();

How do you get a list of the names of all files present in a web server directory using Node.js?

when using fs.readdir it gives me file name present in the given path but how can get file name stored on a specific path on a web server.
I believe you are using this function
fs.readdir ('../', function (err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
To access the
root(or C drive) use / .
current directory use ./.
parent directory use ../.
parent of parent directory use ../../.
To access a directory in the parent directory use ../sibling_name.
Now I believe you can navigate through directories. Navigate through directories and list the files and the folders contained in the directory.
I think it will help u.
const fs = require('fs');
const path = require('path');
function getFile(dirPath) {
const files = fs.readdirSync(dirPath);
files.forEach(function (item) {
const currentPath = path.join(dirPath, item),
isFile = fs.statSync(currentPath).isFile(),
isDir = fs.statSync(currentPath).isDirectory();
if (isFile) {
// console.log(currentPath);
} else if (isDir) {
console.log(currentPath);
getFile(currentPath);
}
});
}
getFile('./'); // this is your server path

Compress an uncompressed xlsx file using node.js (Electron)

I have an unzipped xlsx file, in it I edit some files to be able to generate a new xlsx file containing new data.
In linux to recompress the file in xlsx I just need to go into the terminal and type
find . -type f | xargs zip ../newfile.xlsx
into the folder where the xlsx files are.
The question now is how can I do this using node.js?
The solution is to compress a direct list of files contained in xlsx, for some reason if we try to compress the folder the file has corrupted.
The code looks like this if you use JSZIP
var fs = require('fs');
var JSZip = require("jszip");
var zip = new JSZip();
var file = [];
file.push("_rels/.rels");
file.push("docProps/core.xml");
file.push("docProps/app.xml");
file.push("docProps/custom.xml");
file.push("[Content_Types].xml");
file.push("xl/_rels/workbook.xml.rels");
file.push("xl/styles.xml");
file.push("xl/pivotTables/_rels/pivotTable3.xml.rels");
file.push("xl/pivotTables/_rels/pivotTable1.xml.rels");
file.push("xl/pivotTables/_rels/pivotTable2.xml.rels");
file.push("xl/pivotTables/pivotTable3.xml");
file.push("xl/pivotTables/pivotTable1.xml");
file.push("xl/pivotTables/pivotTable2.xml");
file.push("xl/workbook.xml");
file.push("xl/worksheets/_rels/sheet2.xml.rels");
file.push("xl/worksheets/_rels/sheet1.xml.rels");
file.push("xl/worksheets/_rels/sheet3.xml.rels");
file.push("xl/worksheets/sheet4.xml");
file.push("xl/worksheets/sheet1.xml");
file.push("xl/worksheets/sheet3.xml");
file.push("xl/worksheets/sheet2.xml");
file.push("xl/sharedStrings.xml");
file.push("xl/pivotCache/_rels/pivotCacheDefinition1.xml.rels");
file.push("xl/pivotCache/pivotCacheDefinition1.xml");
file.push("xl/pivotCache/pivotCacheRecords1.xml");
for (var i = 0; i < file.length; i++) {
zip.file(file[i], fs.readFileSync("/home/user/xlsx_FILES/"+file[i]));
}
zip.generateAsync({type:"blob"}).then(function(content) {
// see FileSaver.js
saveAs(content, "yourfile.xlsx");
});
Take a look at archiver, a compression library for nodejs. The docs for the library look like they are comprehensive. The library also allows you to append archives and take advantage of streaming api's for appending and creating new archives.
Here is an example snippet from their docs which shows how to use the library.
// require modules
var fs = require('fs');
var archiver = require('archiver');
// create a file to stream archive data to.
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
store: true // Sets the compression method to STORE.
});
// listen for all archive data to be written
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);

Resources