I have an unzipped xlsx file, in it I edit some files to be able to generate a new xlsx file containing new data.
In linux to recompress the file in xlsx I just need to go into the terminal and type
find . -type f | xargs zip ../newfile.xlsx
into the folder where the xlsx files are.
The question now is how can I do this using node.js?
The solution is to compress a direct list of files contained in xlsx, for some reason if we try to compress the folder the file has corrupted.
The code looks like this if you use JSZIP
var fs = require('fs');
var JSZip = require("jszip");
var zip = new JSZip();
var file = [];
file.push("_rels/.rels");
file.push("docProps/core.xml");
file.push("docProps/app.xml");
file.push("docProps/custom.xml");
file.push("[Content_Types].xml");
file.push("xl/_rels/workbook.xml.rels");
file.push("xl/styles.xml");
file.push("xl/pivotTables/_rels/pivotTable3.xml.rels");
file.push("xl/pivotTables/_rels/pivotTable1.xml.rels");
file.push("xl/pivotTables/_rels/pivotTable2.xml.rels");
file.push("xl/pivotTables/pivotTable3.xml");
file.push("xl/pivotTables/pivotTable1.xml");
file.push("xl/pivotTables/pivotTable2.xml");
file.push("xl/workbook.xml");
file.push("xl/worksheets/_rels/sheet2.xml.rels");
file.push("xl/worksheets/_rels/sheet1.xml.rels");
file.push("xl/worksheets/_rels/sheet3.xml.rels");
file.push("xl/worksheets/sheet4.xml");
file.push("xl/worksheets/sheet1.xml");
file.push("xl/worksheets/sheet3.xml");
file.push("xl/worksheets/sheet2.xml");
file.push("xl/sharedStrings.xml");
file.push("xl/pivotCache/_rels/pivotCacheDefinition1.xml.rels");
file.push("xl/pivotCache/pivotCacheDefinition1.xml");
file.push("xl/pivotCache/pivotCacheRecords1.xml");
for (var i = 0; i < file.length; i++) {
zip.file(file[i], fs.readFileSync("/home/user/xlsx_FILES/"+file[i]));
}
zip.generateAsync({type:"blob"}).then(function(content) {
// see FileSaver.js
saveAs(content, "yourfile.xlsx");
});
Take a look at archiver, a compression library for nodejs. The docs for the library look like they are comprehensive. The library also allows you to append archives and take advantage of streaming api's for appending and creating new archives.
Here is an example snippet from their docs which shows how to use the library.
// require modules
var fs = require('fs');
var archiver = require('archiver');
// create a file to stream archive data to.
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
store: true // Sets the compression method to STORE.
});
// listen for all archive data to be written
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
Related
I have an array of files that I have to pack into a gzip archive and send them through http response on the fly. That means I can't store the whole file in the memory yet I have to synchronously pipe them into tar.entry or everything is going to break.
const tar = require('tar-stream'); //lib for tar stream
const { createGzip } = require('zlib'); //lib for gzip stream
//large list of huge files.
const files = [ 'file1', 'file2', 'file3', ..., 'file99999' ];
...
//http request handler:
const pack = tar.pack(); //tar stream, creates .tar
const gzipStream = createGzip(); //gzip stream so we could reduce the size
//pipe archive data trough gzip stream
//and send it to the client on the fly
pack.pipe(gzipStream).pipe(response);
//The issue comes here, when I need to pass multiple files to pack.entry
files.forEach(name => {
const src = fs.createReadStream(name); //create stream from file
const size = fs.statSync(name).size; //determine it's size
const entry = pack.entry({ name, size }); //create tar entry
//and this ruins everything because if two different streams
//writes smth into entry, it'll fail and throw an error
src.pipe(entry);
});
Basically I need for the pipe to complete sending data (smth like await src.pipe(entry);), but pipes in nodejs don't do that. So is there any way I could get around it?
Nevermind, just don't use forEach in this case
I have a node application that contains several downloadable links (when you click on the link a pdf file is downloaded), and these links are dynamically created/populated. I want to implement a feature where we can somehow download all files from these links in one go. I presume for this I will somehow need to create a zip file from all these links - would anyone know how to go about this?
you could use the fs and archiver module:
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('/path/to/file0.txt', {name: 'file0-or-change-this-whatever.txt'});
archive.file('/path/to/README.md', {name: 'foobar.md'});
//
archive.finalize();
I need to be able to tar a directory, and send this to a remote endpoint via HTTP PUT.
I could of course create the tar, save it to disk, then read it again and send it.
But I'd rather like to create the tar, then pipe it to some buffer and send it immediately. I haven't been able to achieve this.
Code so far:
var tar = require('tar');
var fs = require("fs");
var path = "/home/me/uploaddir";
function getTar(path, cb) {
var buf = new Buffer('');
var wbuf = fs.createWriteStream(buf);
wbuf.on("finish", function() {
cb(buf);
});
tar.c({file:""},[path]).
pipe(wbuf);
}
getTar(path, function(tar) {
//send the tar over http
});
This code results in:
fs.js:575
binding.open(pathModule._makeLong(path),
^
TypeError: path must be a string
at TypeError (native)
at Object.fs.open (fs.js:575:11)
I've also tried using an array as buffer, no joy.
The following solution
creates the tar, then pipes it to some buffer and sends it immediately
and with great speed thanks to the tar-fs library:
First install the libraries request for simplified requests and tar-fs, which provides filesystem bindings for tar-stream: npm i -S tar-fs request
var tar = require('tar-fs')
var request = require('request')
var fs = require('fs')
// pack specific files in the directory
function packTar (folderName, pathsArr) {
return tar.pack(folderName, {
entries: pathsArr
})
}
// return put stream
function makePutReq (url) {
return request.put(url)
}
packTar('./testFolder', ['test.txt', 'test1.txt'])
.pipe(makePutReq('https://www.example.com/put'))
I have renamed the function names to be super verbose.
I have a file named mytext.txt and I'd like to compress this file to archive.rar. How can I do this in nodejs?
I've found nothing similar to rar only zip.
Find an rar command line utility that you can execute like
$ rar myfile.dat compressed.rar
Node.js can do command line calls. (See child_process.exec)
Give the normal command to the exec function, and it should get the job done.
For zipping a single file, zlib module can be very useful.
(function () {
'use strict';
var zlib = require('zlib');
var gzip = zlib.createGzip();
var fs = require('fs');
var inp = fs.createReadStream('mytext.txt');
var out = fs.createWriteStream('mytext.txt.gz');
inp.pipe(gzip).pipe(out);
}());
Unfortunately Nodejs dosn't native support Rar compression/decompression, i frustated with this too so i created a module called "super-winrar" making super easy deal with rar files in nodejs :)
check it out: https://github.com/KiyotakaAyanokojiDev/super-winrar
Exemple creating a file "archive.rar" and appending "mytext.txt" file:
const Rar = require('super-winrar');
// create a rar constructor with file path! (created if not exists)
const rar = new Rar('archive.rar');
// handle erros, otherwise will throw an exception!
rar.on('error', err => console.log(err.message));
rar.once('ready', async () => {
await rar.append(['mytext.txt']);
});
I'm attempting to download a file using the http module in node. While the file seems to download sucessfully, the resultant file cannot be opened using gzip. I've tried downloading the file through other methods, and that works, and I've tried using multiple ways to open the resultant gzip'd file, but all of those produce the same error.
I did attempt to use the request module, but there seemed to be no way of accessing the returned HTTP headers before the file was finished downloading, which I need because I'd like to offer some sort of visual indicator as to how long this file is going to take to download.
This is (roughly) the code that I've got so far.
var http = require('http');
var fs = require('fs');
var progress = 0;
downloadFile = function() {
http.get(FILE_URL, function(response) {
var maxBytes = parseInt(response.headers['content-length'], 10);
var dumpFile = fs.createWriteStream(FILENAME + '.dl');
response.pipe(dumpFile);
response
.on('data', function(chunk) {
progress += chunk.length;
// progressbar-type code here
})
.on('end', function() {
// pass
})
dumpFile.on('finish', function() {
dumpFile.close();
fs.rename(FILENAME + '.dl', FILENAME);
});
}
So my question: How would you advise I download a file, bearing in mind it's a large file and I need some sort of visual indicator for download progress? Should I give up on http? Or am I doing something monumentally stupid?
Thanks!