I've been trying to add a use case to my code in which I try to unzip a zip that is too large to fit in the disk space and I expect my code to throw ENOSPC. I've tried multiple libraries but none of them throw error rather fail silently without completing the zipping. I'd expect them to throw ENOSPC error but all the packages seem to log the first info statement which states that the unzipping has been started but nothing after that. Most of them create incomplete folders, whatever they could write before disk went out of space. Here is how my code looks like for each of the libraries.
My code using adm-zip:
exports.unzip = function(source, destination) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
const zip = new AdmZip(source);
zip.extractAllTo(destination, true);
console.info("done unzipping");
} catch (error) {
console.error("Unzipping failed. Reason: %s", error)
throw new Error(error)
}
};
Code using yauzl:
exports.extractZip = function(source, destination) {
return new Promise(function(resolve, reject) {
console.log("Extracting zip: '" + source + "' to '" + destination + "'");
yauzl.open(source, {
lazyEntries: true
}, function(err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("error", function (err) {
console.error("Something went wrong while extracting!");
reject(new Error(err));
});
zipfile.on("end", function () {
console.log("Completed extracting zip!");
resolve();
});
zipfile.on("entry", function(entry) {
if (/\/$/.test(entry.fileName)) {
// directory file names end with '/'
mkdirp(destination + '/' + entry.fileName, function(err) {
if (err) {
console.error("Something went wrong while extracting!");
throw err;
}
zipfile.readEntry();
});
} else {
// file entry
zipfile.openReadStream(entry, function(err, readStream) {
if (err) {
console.error("Something went wrong while extracting!");
throw err;
}
// ensure parent directory exists
mkdirp(destination + '/' + path.dirname(entry.fileName), function(err) {
if (err) throw err;
readStream.pipe(fs.createWriteStream(destination + '/' + entry.fileName));
readStream.on("end", function() {
zipfile.readEntry();
});
});
});
}
});
});
});
}
Code using Unzipper:
exports.unzip2 = function(source, destination) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
fs.createReadStream(source)
.pipe(unzipper.Extract({ path: destination }))
.on('error',function (err){
console.error("something went wrong", err.code);
throw err;
});
} catch (error) {
console.error("Unzipping failed. Reason: %s", error)
throw new Error(error)
}
};
Code Using extract-zip:
exports.extractArchive = async function(source, destination) {
try {
extract(source, { dir: destination }, function (err) {
if (err) {
console.error("Something went wrong!", err.code);
throw err;
}
});
console.log('Extraction complete')
} catch (err) {
// handle any errors
}
};
Is there something wrong with my code ? Is there any special event that I need to listen on ?
After some trail and error on both Yauzl and unzipper, unzipper seemed to work (throw ENOSPC when ran out of disk space during unzipping) with the following code.
exports.unzip2 = function(source, destination) {
return new Promise(function(resolve, reject) {
console.info("Started un-zipping from source: %s to destination: %s", source, destination);
try {
var sourceStream = fs.createReadStream(source);
sourceStream.on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});
var destinationStream = unzipper.Extract({ path: destination });
destinationStream.on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});
destinationStream.on('close',function (){
console.log("Completed extract!");
resolve();
});
sourceStream.pipe(destinationStream).on('error',function (err){
console.error("something went wrong", err.code);
reject(new Error(err));
});;
} catch (error) {
console.error("something went wrong", err.code);
reject(new Error(err));
}
});
};
Related
I'm trying to zip a few audio files from a bucket, then download the zip on Firebase functions, everything works well on local emulator, also raised memory to 1Gb and 300 for timeout, but the https function fails with a generic message, "Couldn't handle response".
Here's the code.
const workingDir = join(tmpdir(), 'zip' + +new Date());
await fs.ensureDir(workingDir);
const archive = archiver('zip', { zlib: { level: 9 } });
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
console.log('Warning', err);
} else {
throw err;
}
});
archive.on('error', function (err) {
console.log('error', err);
throw err;
});
res.attachment(+new Date() + '.zip').type('zip');
archive.pipe(res);
archive.on('end', () => res.end());
archive.directory(workingDir, false);
archive.finalize();
Cant really understand what I do wrong here. Everything loops fine in the console.log but in the file I only get the last one. I quessing it is overwriting the file everytime?
readFiles('/Volumes/Staccs_Sled_001/Eagle Rock 2 Oktober - Exported', (filepath, name, ext, stat) => {
console.log('file path:', filepath);
console.log('file name:', name);
console.log('file extension:', ext);
console.log('file information:', stat);
const infotext = [
ext,
filepath,
]
fs.writeFileSync('./exportedTitles.json', JSON.stringify(infotext, null, 2), err => {
if (err) {
console.log(err);
} else {
console.log('files made');
}
})
})
Any suggestions what I doing wrong?
fs.writeFileSyncoverwrites the file.
You will have to use fs.appendFile
fs.appendFile('./exportedTitles.json', 'data to append', err => {
if (err) {
console.log(err);
} else {
console.log('files made');
}
});
I am using Multer for uploading images in my fs. Multer doesn't allow you to set dynamically the position in the fs, so I am uploading always in the same folder and then I am changing the name of the folder using fs.renamesynch.
I am using the synch version because I think that I have to wait for the end of the task, otherwise, I will have problems in the following task that need synchronization with the renaming function.
However, I have intermittent errors and I don't know how to solve it
Here is my code:
router.post("/changeprofile", ensureAuthenticated, (req, res) => {
upload(req, res, err => {
if (err) {
res.render("changeprofile", { msg: err });
} else {
if (req.file == undefined) {
res.render("changeprofile", { msg: "Error: No file Selected!" });
} else {
res.render("changeprofile", {
msg: "File Uploaded!",
file: `uploads/${req.file.filename}`
});
fsextra.removeSync("./public/profile" + id);
if (err) console.log(err);
else console.log("Deleted old folder");
glob("./public/uploads/profile.*", (err, matches) => {
if (err) console.log(err);
else {
console.log("renaming folder...");
fs.renameSync("./public/uploads", "./public/profile/" + id);
if (err) console.log(err);
else {
fs.mkdir("./public/uploads", err => {
if (err) console.log(err);
else
console.log(
"------------------FOLDER RECREATED---------------------------"
);
});
}
}
});
}
}
});
});
Here is the error that I get:
Error: EPERM: operation not permitted, rename './public/uploads' -> './public/profile/21'
at Object.renameSync (fs.js:593:3)
at glob (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\routes\users.js:558:19)
at f (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\once\once.js:25:25)
at Glob. (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:151:7)
at Glob.emit (events.js:189:13)
at Glob._finish (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:197:8)
at done (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:182:14)
at Glob._processReaddir2 (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:434:12)
at C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:371:17
at RES (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\inflight\inflight.js:31:16)
at f (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\once\once.js:25:25)
at Glob._readdirEntries (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:578:10) at C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\glob\glob.js:555:12
at go$readdir$cb (C:\Users\Andrea\Desktop\LinkedinLikeSocialNetwork\node_modules\graceful-fs\graceful-fs.js:162:14)
at FSReqWrap.args [as oncomplete] (fs.js:140:20)
Even if I didn't get an answer I think that I arrived at a solution:
I think that the problem is this. When you are using the sync method you probably think that node will be synchronized also with the operating system in deleting and renaming your folder inside the file system. But it seems like it is not: it will call the operative system with some kernel function but then the management of the fs is made by the os and node doesn't wait for the return of the function, even if you are using the synchronous mode. It could happen for example that all the I/O operation will be buffered and then executed all together to improve the performance of the os.
The right idea would be to flush the os caches after that you have executed the fs operation. However, using fs-extra and graceful-fs I think that it is now working:
var fs = require("graceful-fs");
const fsextra = require("fs-extra");
fs.gracefulify(realfs);
router.post("/changeprofile", ensureAuthenticated, (req, res) => {
upload(req, res, err => {
if (err) {
res.render("changeprofile", { msg: err });
} else {
if (req.file == undefined) {
res.render("changeprofile", { msg: "Error: No file Selected!" });
} else {
res.render("changeprofile", {
msg: "File Uploaded!",
file: `uploads/${req.file.filename}`
});
var path = "./public/profile/" + id;
if (fsextra.existsSync(path)){
fsextra.removeSync(path);
}
if (err) console.log(err);
else console.log("Deleted old folder");
glob("./public/uploads/profile.*", (err, matches) => {
if (err) console.log(err);
else {
console.log("renaming folder...");
fs.renameSync("./public/uploads", "./public/profile/" + id);
if (err) console.log(err);
else {
fs.mkdir("./public/uploads", err => {
if (err) console.log(err);
else
console.log(
"------------------FOLDER RECREATED---------------------------"
);
});
}
}
});
}
}
});
});
I have mocha tests. I will simplify as possible as I can.
I wonder how should I handle the errors in mocha tests. For example, if there is an error in getName, what should I do? How can I throw an error? Or should I use done(error) as;
it('trial', function(done) {
getName(function (name, err) {
if(err){
done(err); //what should I do here?
}else{
console.log(name);
}
});
});
If doneis called with a argument other than undefined, the test will fail and be reported as such. Other test will still be executed.
It allow you to test for success, but also for error:
it('succeed', (done) => {
myFunc('success', (err, res) => {
if(err) {
done(err);
} else if(res !== 'expected') {
done('Wrong result ' + res);
} else {
done();
}
});
});
it('fail with error 404', (done) => {
myFunc('fail', (err, res) => {
if(err) {
if(err === 404) {
done();
} else {
done('Error was expected to be 404, got ' + err);
}
} else {
done('Was expected to fail, got result ' + res + ' and no error');
}
});
});
it('succeed', (done) => {
try {
var res = myFuncSync('succeed');
} catch(err) {
done(err);
}
done();
});
it('fail with error 404', (done) => {
try {
var res = myFuncSync('succeed');
} catch(err) {
if(err === 404) {
done();
} else {
done('Error was expected to be 404, got ' + err);
}
}
done('Was expected to fail, got result ' + res + ' and no error');
});
when using busboy to handle file upload in node.js
var uploadStream = fs.createWriteStream(imagePath);
uploadStream.on('error', function(err) {
if (err.code === 'ENOENT') {
// unknown dir
mkdirp(imageDir, function(err) {
if (err) {
throw new Error('mkdirp error in upload')
} else {
// how to continue pipe from here?
}
})
}
});
file.on('data', function(data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
// save uploaded file
file.pipe(uploadStream);
if createWriteStream with an unexist dir,then an error event is fired, and I tried to handle this error by create that dir and continue the upload pipe, but I dont know how to continue the upload I tried something like
uploadStream.on('error', function(err) {
if (err.code === 'ENOENT') {
// unknown dir
mkdirp(imageDir, function(err) {
if (err) {
throw new Error('mkdirp error in upload')
} else {
file.pipe(uploadStream) // not work
file.pipe(fs.createWriteStream(imagePath)) // only get a part of uploaded image
}
})
}
// uploadStream.end();
});
please help!!!