Unable to unzip the zipped files using zlib module - node.js

I am trying to unzip the files from the zipped data using node build-in module zlib, for some reason I am not able to unzip it, I am getting an error as follows:
Error: incorrect header check
test.js:53
No debug adapter, can not send 'variables'
The code I am trying is as follows:
var zlib = require('zlib');
var fs = require('fs');
var filename = './Divvy_Trips_2019_Q2.zip';
var str1 = fs.createReadStream(filename);
var gzip = zlib.createGunzip();
str1.pipe(gzip).on('data', function (data) {
console.log(data.toString());
}).on('error', function (err) {
console.log(err);
});
The URL to the zipped data is as follows: Divvy_Trips_2019_Q2.zip

GZip (.gz) and ZIP (.zip) are different formats. You need a library that handles ZIP files, like yauzl.
// https://github.com/thejoshwolfe/yauzl/blob/master/examples/dump.js
const yauzl = require("yauzl");
const path = "./Divvy_Trips_2019_Q2.zip";
yauzl.open(path, function(err, zipfile) {
if (err) throw err;
zipfile.on("error", function(err) {
throw err;
});
zipfile.on("entry", function(entry) {
console.log(entry);
console.log(entry.getLastModDate());
if (/\/$/.exec(entry)) return;
zipfile.openReadStream(entry, function(err, readStream) {
if (err) throw err;
readStream.pipe(process.stdout);
});
});
});

Related

Is it possible to zip data as we write the data in nodejs?

I am able to append data to a file like this
row (this I will get )
fs.appendFile('./file.txt', JSON.stringify(row) + '\n', (err) => {
if (err) throw err;
});
But how can I "append" the data while zipping it at the same time ? I am unsure if this is possible, If yes any pointers will be extremely helpful.
Can I achieve it through piping? if yes how?
zip.appendData('./file.zip',JSON.stringify(row) + '\n', (err) => {
if (err) throw err;
});
Something like above ?
I don't know if it's possible to append to a .zip archive without rewriting it.
If a .gz file is considered, you can use the built-in zlib module to append directly to the .gz file.
const zlib = require('zlib');
zlib.gzip(JSON.stringify(row), (err, data) => {
if (err) throw err;
fs.appendFile('file.txt.gz', data, err => {
if (err) throw err;
})
})
I'm not sure this gonna work, but maybe helps you.
You need zlib package on your project
npm install zlib
Code:
const fs = require('fs');
const zlib = require('zlib');
function WriteAndZip(filename) {
return new Promise(async function (resolve, reject) {
fs.appendFile(`./your_path/${filename}`, JSON.stringify(row) + '\n', (err) => {
if (err) throw err;
});
const fileContents = fs.createReadStream(`./your_path/${filename}`);
const writeStream = fs.createWriteStream(`./your_path/${filename}.gz`);
const zip = zlib.createGzip();
fileContents.pipe(zip).pipe(writeStream).on('finish', (err) => {
if (err) return reject(err);
else resolve();
});
});
}

how to add new line using fs.writeFile?

I tried to write my project's files to a new file, each one in new line, using Node.js. How can I do that? Below is my code:
var fs = require('fs');
var colors = require('colors');
fs.readdir('./', 'utf-8', function(err, data) {
if (err) throw err;
console.log(data);
fs.writeFile('./new.txt', data, function(err) {
if (err) throw err;
console.log('Saved!'.blue);
});
});
You can collect all filenames first and then write them to a file.
const fs = require('fs');
const colors = require('colors');
let fileNames = '';
fs.readdir('./', 'utf-8', function(err, data) {
data.forEach(filename => fileNames += filename + '\n');
fs.writeFile('./new.txt', fileNames, function(err) {
if (err) throw err;
console.log('Saved!'.blue);
});
});

How to pipe a WriteStream concatenating an extension?

I am new in NodeJS. I know we can stream data to the client by using pipe() method.
Here is the snippet of the code
router.get('/archive/*', function (req, res) {
var decodedURI = decodeURI(req.url);
var dirarr = decodedURI.split('/');
var dirpath = path.join(dir, dirarr.slice(2).join("/"));
console.log("dirpath: " + dirpath);
var archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.directory(dirpath, 'new-subdir');
archive.on('error', function (err) {
throw err;
});
archive.pipe(res)
archive.on('finish', function () {
console.log("finished zipping");
});
archive.finalize();
});
when I use a get request the zipped file downloaded but without any extension. I know its because I am piping a writestream into the response. Is there anyway pipe it with a .zip extension ? Or How can I send the zip file without building the zip file in HDD ?
You can use res.attachment() to both set the filename of the download, and also its mime-type:
router.get('/archive/*', function (req, res) {
res.attachment('archive.zip');
...
});
One of the ways is to change Headers before piping,
res.setHeader("Content-Type", "application/zip");
res.setHeader('Content-disposition' ,'attachment; filename=downlaod.zip');
For the Given Code,
router.get('/archive/*', function (req, res) {
var decodedURI = decodeURI(req.url);
var dirarr = decodedURI.split('/');
var dirpath = path.join(dir, dirarr.slice(2).join("/"));
var output = fs.createWriteStream(__dirname + '/7.zip');
var archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.directory(dirpath, 'new-subdir');
archive.on('error', function (err) {
throw err;
});
res.setHeader("Content-Type", "application/zip");
res.setHeader('Content-disposition' ,'attachment; filename=downlaod.zip');
archive.pipe(res);
archive.finalize();
});

Am Using tinify in node js. Wow can we download supressed file using nodejs?

I am using npm install --save tinify then uploading my image files using nodejs but I need compressed .zip file in my system .
When I'm uploading my image files through browser at https://tinypng.com after uploading it shows download option that is perfect. But how can we do same through nodejs?
Here is my code:
var tinify = require("tinify");
tinify.key = "myRightApiCode";
var fs = require("fs");
fs.readFile("C:/Users/sourav/images/pgL_NA-10005_5.jpg", function(err,
sourceData) {
if (err) throw err;
tinify.fromBuffer(sourceData).toBuffer(function(err, resultData) {
if (err) throw err;
// ...
console.log(resultData);
//need compressed file in my system
});
});
You can use tinify's method that converts and writes the compressed image at once:
var sourceFile = tinify.fromFile("uncompressed.jpg");
sourceFile.toFile("compressed.jpg");
Alternatively in your method try:
var tinify = require("tinify");
tinify.key = "myRightApiCode";
var fs = require("fs");
fs.readFile("C:/Users/sourav/images/pgL_NA-10005_5.jpg", function(err,
sourceData) {
if (err) throw err;
tinify.fromBuffer(sourceData).toBuffer(function(err, resultData) {
if (err) throw err;
fs.writeFile('C:/Users/sourav/images/optimized.jpg', resultData, function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
});
});
Hope this solves your query :)

Why is this readFile operation in Node throwing an error?

I have this code using socket.io on a Node server:
io.sockets.on(
'connection'
,function (socket) {
reader = require('fs');
fileContents = reader.readFile(__dirname + '/textCharacters.txt'
,'utf8'
,function(data, err) {
if (err) throw err;
console.log(data);
}
);
socket.emit('retrievedFileContent', {content:fileContents} );
}
);
When I check the Node server debug, the error shows the contents of the file, so I know the file is being read, but why isn't it being returned to the fileContents variable?
Because the readFile(filename, encoding, callback) function doesn't return the file contents, it passes them as the second argument to the given callback function. Try modifying your code as such:
var fs = require('fs');
io.sockets.on('connection', function (socket) {
var filename = __dirname + '/textCharacters.txt';
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
socket.emit('retrievedFileContent', {content:data});
});
});

Resources