I'm trying to zip a few audio files from a bucket, then download the zip on Firebase functions, everything works well on local emulator, also raised memory to 1Gb and 300 for timeout, but the https function fails with a generic message, "Couldn't handle response".
Here's the code.
const workingDir = join(tmpdir(), 'zip' + +new Date());
await fs.ensureDir(workingDir);
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
console.log('Warning', err);
} else {
throw err;
}
});
archive.on('error', function (err) {
console.log('error', err);
throw err;
});
res.attachment(+new Date() + '.zip').type('zip');
archive.pipe(res);
archive.on('end', () => res.end());
archive.directory(workingDir, false);
archive.finalize();
Related
I've been trying to add a use case to my code in which I try to unzip a zip that is too large to fit in the disk space and I expect my code to throw ENOSPC. I've tried multiple libraries but none of them throw error rather fail silently without completing the zipping. I'd expect them to throw ENOSPC error but all the packages seem to log the first info statement which states that the unzipping has been started but nothing after that. Most of them create incomplete folders, whatever they could write before disk went out of space. Here is how my code looks like for each of the libraries.
My code using adm-zip:
exports.unzip = function(source, destination) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
const zip = new AdmZip(source);
zip.extractAllTo(destination, true);
console.info("done unzipping");
} catch (error) {
console.error("Unzipping failed. Reason: %s", error)
throw new Error(error)
}
};
Code using yauzl:
exports.extractZip = function(source, destination) {
return new Promise(function(resolve, reject) {
console.log("Extracting zip: '" + source + "' to '" + destination + "'");
yauzl.open(source, {
lazyEntries: true
}, function(err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("error", function (err) {
console.error("Something went wrong while extracting!");
reject(new Error(err));
});
zipfile.on("end", function () {
console.log("Completed extracting zip!");
resolve();
});
zipfile.on("entry", function(entry) {
if (/\/$/.test(entry.fileName)) {
// directory file names end with '/'
mkdirp(destination + '/' + entry.fileName, function(err) {
if (err) {
console.error("Something went wrong while extracting!");
throw err;
}
zipfile.readEntry();
});
} else {
// file entry
zipfile.openReadStream(entry, function(err, readStream) {
if (err) {
console.error("Something went wrong while extracting!");
throw err;
}
// ensure parent directory exists
mkdirp(destination + '/' + path.dirname(entry.fileName), function(err) {
if (err) throw err;
readStream.pipe(fs.createWriteStream(destination + '/' + entry.fileName));
readStream.on("end", function() {
zipfile.readEntry();
});
});
});
}
});
});
});
}
Code using Unzipper:
exports.unzip2 = function(source, destination) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
fs.createReadStream(source)
.pipe(unzipper.Extract({ path: destination }))
.on('error',function (err){
console.error("something went wrong", err.code);
throw err;
});
} catch (error) {
console.error("Unzipping failed. Reason: %s", error)
throw new Error(error)
}
};
Code Using extract-zip:
exports.extractArchive = async function(source, destination) {
try {
extract(source, { dir: destination }, function (err) {
if (err) {
console.error("Something went wrong!", err.code);
throw err;
}
});
console.log('Extraction complete')
} catch (err) {
// handle any errors
}
};
Is there something wrong with my code ? Is there any special event that I need to listen on ?
After some trail and error on both Yauzl and unzipper, unzipper seemed to work (throw ENOSPC when ran out of disk space during unzipping) with the following code.
exports.unzip2 = function(source, destination) {
return new Promise(function(resolve, reject) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
var sourceStream = fs.createReadStream(source);
sourceStream.on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});
var destinationStream = unzipper.Extract({ path: destination });
destinationStream.on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});
destinationStream.on('close',function (){
console.log("Completed extract!");
resolve();
});
sourceStream.pipe(destinationStream).on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});;
} catch (error) {
console.error("something went wrong", err.code);
reject(new Error(err));
}
});
};
I am using nodejs archive for creating my zip file. I need to download this zip file via axios.
this is my node js code.
let zipFile = fs.createWriteStream(zipFilePath);
var archive = ar('zip');
zipFile.on('close', () => {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
zipFile.on('end', () => {
console.log('Data has been drained');
});
archive.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('worning');
} else {
throw err;
}
});
archive.on('error', (err) => {
throw err;
res.status(500).send({ error: err.message });
});
archive.on('finish', () => {
console.log("zip file generate");
});
archive.pipe(zipFile);
archive.pipe(res);
res.on('close', (err) => {
if (err) {
throw err;
}
console.log('Archive wrote %d bytes', archive.pointer());
})
archive.append(fs.createReadStream(englishFilePath), { name: englishFileName });
archive.append(fs.createReadStream(spanishFilePath), { name: spanishFileName });
archive.finalize();
this is my vuejs code for download the zip file
const url = window.URL.createObjectURL(new Blob([res.body]));
const link = document.createElement("a");
link.setAttribute('href', URL)
link.href = url;
link.setAttribute("download", zipfileName);
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
now I am able to download a zip file, but it was corrupted.
function unzipCode() {
console.log('Unzipping contents...');
return new Promise((resolve, reject) => {
const files = [];
unzip.open(filePath, { autoclose: false, lazyEntries: true }, (err, zipfile) => {
if (err) reject;
zipfile.readEntry();
zipfile.on('entry', (entry) => {
if (!sourceDirectoryRegEx.test(entry.fileName)) {
console.log(" [X] Skipping: " + entry.fileName);
zipfile.readEntry();
}
else {
console.log(" [+] Unzipping: " + entry.fileName);
if (/\/$/.test(entry.fileName)) {
// directory file names end with '/'
mkdirp(path.join(cwd, entry.fileName), (err) => {
if (err) reject;
zipfile.readEntry();
});
}
else {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) reject;
// ensure parent directory exists
mkdirp(path.join(cwd, path.dirname(entry.fileName)), (err) => {
if (err) reject;
let stream = fs.createWriteStream(path.join(cwd, entry.fileName));
readStream.pipe(stream, { end: false });
// console.log(readStream.pipe);
readStream.on('end', () => {
console.log('After write', entry.fileName);
// add file details to files array
files.push({
key: entry.fileName,
body: stream,
});
zipfile.readEntry();
readStream.unpipe(stream);
stream.end();
});
});
});
}
}
});
zipfile.once('end', () => {
zipfile.close();
resolve(files);
});
});
});
}
I am trying to unzip some files in aws-lambda console using the function. The enviornment I've chosen is nodejs 12.x. It was running with nodejs 8.10.
The exact error I'm encountering is Cannot pipe, not readable.
How to solve it?
I am trying to download files from google cloud storage and zip them.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
for (let i in all_file_links) {
let name = all_file_links[i]['name']
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${name}.zip`);
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
archive.finalize();
}
}
In the example on top, I am looping through all files and creating individual archives. i.e., if I download two files, I would be creating two separate archives. This works.
However, if I want to zip all the files into one archive, I get the following error:
start of central directory not found; zipfile corrupt. (please
check that you have transferred or created the zipfile in the
appropriate BINARY mode and that you have compiled UnZip properly)
The code I used is:
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
}
archive.finalize();
}
Found the solution. It was carelessness actually.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
archive.pipe(output);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
}
archive.finalize();
}
I moved the archive.pipe(output) before the for loop and it works.
I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.