Need to ZIP an entire directory using Node.js - node.js

I need to zip an entire directory using Node.js. I'm currently using node-zip and each time the process runs it generates an invalid ZIP file (as you can see from this Github issue).
Is there another, better, Node.js option that will allow me to ZIP up a directory?
EDIT: I ended up using archiver
writeZip = function(dir,name) {
var zip = new JSZip(),
code = zip.folder(dir),
output = zip.generate(),
filename = ['jsd-',name,'.zip'].join('');
fs.writeFileSync(baseDir + filename, output);
console.log('creating ' + filename);
};
sample value for parameters:
dir = /tmp/jsd-<randomstring>/
name = <randomstring>
UPDATE: For those asking about the implementation I used, here's a link to my downloader:

I ended up using archiver lib. Works great.
Example
var file_system = require('fs');
var archiver = require('archiver');
var output = file_system.createWriteStream('target.zip');
var archive = archiver('zip');
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err){
throw err;
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source_dir, false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
archive.finalize();

I'm not going to show something new, just wanted to summarise the solutions above for those who like Promises as much as I do 😉.
const archiver = require('archiver');
/**
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
Hope it will help someone 🤞

Use Node's native child_process api to accomplish this.
No need for third party libs. Two lines of code.
const child_process = require("child_process");
child_process.execSync(`zip -r <DESIRED_NAME_OF_ZIP_FILE_HERE> *`, {
cwd: <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>
});
The example above showcases the synchronous API. You can also use child_process.exec(path, options, callback) if you want async behavior. There are a lot more options you can specify other than cwd to further fine-tune your request.
If you don't have the ZIP utility:
This question is specifically asks about the zip utility for archiving/compression purposes. Therefore, this example assumes you have the zip utility installed on your system. For completeness sakes, some operating systems may not have utility installed by default. In that case you have at least three options:
Work with the archiving/compression utility that is native to your platform
Replace the shell command in the above Node.js code with code from your system. For example, linux distros usually come with tar/gzip utilities:
tar -cfz <DESIRED_NAME_OF_ZIP_FILE_HERE> <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>.
This is a nice option as you don't need to install anything new onto your operating system or manage another dependency (kind of the whole point for this answer).
Obtain the zip binary for your OS/distribution.
For example on Ubuntu: apt install zip.
The ZIP utility is tried and tested for decades, it's fairly ubiquitous and it's a safe choice. Do a quick google search or go to the creator, Info-ZIP's, website for downloadable binaries.
Use a third party library/module (of which there are plenty on NPM).
I don't prefer this option. However, if you don't really care to understand the native methods and introducing a new dependency is a non-issue, this is also a valid option.

This is another library which zips the folder in one line :
zip-local
var zipper = require('zip-local');
zipper.sync.zip("./hello/world/").compress().save("pack.zip");

Archive.bulk is now deprecated, the new method to be used for this is glob:
var fileName = 'zipOutput.zip'
var fileOutput = fs.createWriteStream(fileName);
fileOutput.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.pipe(fileOutput);
archive.glob("../dist/**/*"); //some glob pattern here
archive.glob("../dist/.htaccess"); //another glob pattern
// add as many as you like
archive.on('error', function(err){
throw err;
});
archive.finalize();

To include all files and directories:
archive.bulk([
{
expand: true,
cwd: "temp/freewheel-bvi-120",
src: ["**/*"],
dot: true
}
]);
It uses node-glob(https://github.com/isaacs/node-glob) underneath, so any matching expression compatible with that will work.

To pipe the result to the response object (scenarios where there is a need to download the zip rather than store locally)
archive.pipe(res);
Sam's hints for accessing the content of the directory worked for me.
src: ["**/*"]

I have found this small library that encapsulates what you need.
npm install zip-a-folder
const zip-a-folder = require('zip-a-folder');
await zip-a-folder.zip('/path/to/the/folder', '/path/to/archive.zip');
https://www.npmjs.com/package/zip-a-folder

Adm-zip has problems just compressing an existing archive https://github.com/cthackers/adm-zip/issues/64 as well as corruption with compressing binary files.
I've also ran into compression corruption issues with node-zip https://github.com/daraosn/node-zip/issues/4
node-archiver is the only one that seems to work well to compress but it doesn't have any uncompress functionality.

Since archiver is not compatible with the new version of webpack for a long time, I recommend using zip-lib.
var zl = require("zip-lib");
zl.archiveFolder("path/to/folder", "path/to/target.zip").then(function () {
console.log("done");
}, function (err) {
console.log(err);
});

As today, I'm using AdmZip and works great:
import AdmZip = require('adm-zip');
export async function archiveFile() {
try {
const zip = new AdmZip();
const outputDir = "/output_file_dir.zip";
zip.addLocalFolder("./yourFolder")
zip.writeZip(outputDir);
} catch (e) {
console.log(`Something went wrong ${e}`);
}
}

import ... from answer based on https://stackoverflow.com/a/51518100
To zip single directory
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectory;
/**
* From: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
To zip multiple directories:
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectories;
/**
* Adapted from: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectories(sourceDirs, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
var result = archive;
sourceDirs.forEach(sourceDir => {
result = result.directory(sourceDir, false);
});
result
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}

You can try in a simple way:
Install zip-dir :
npm install zip-dir
and use it
var zipdir = require('zip-dir');
let foldername = src_path.split('/').pop()
zipdir(<<src_path>>, { saveTo: 'demo.zip' }, function (err, buffer) {
});

I ended up wrapping archiver to emulate JSZip, as refactoring through my project woult take too much effort. I understand Archiver might not be the best choice, but here you go.
// USAGE:
const zip=JSZipStream.to(myFileLocation)
.onDone(()=>{})
.onError(()=>{});
zip.file('something.txt','My content');
zip.folder('myfolder').file('something-inFolder.txt','My content');
zip.finalize();
// NodeJS file content:
var fs = require('fs');
var path = require('path');
var archiver = require('archiver');
function zipper(archive, settings) {
return {
output: null,
streamToFile(dir) {
const output = fs.createWriteStream(dir);
this.output = output;
archive.pipe(output);
return this;
},
file(location, content) {
if (settings.location) {
location = path.join(settings.location, location);
}
archive.append(content, { name: location });
return this;
},
folder(location) {
if (settings.location) {
location = path.join(settings.location, location);
}
return zipper(archive, { location: location });
},
finalize() {
archive.finalize();
return this;
},
onDone(method) {
this.output.on('close', method);
return this;
},
onError(method) {
this.output.on('error', method);
return this;
}
};
}
exports.JSzipStream = {
to(destination) {
console.log('stream to',destination)
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
return zipper(archive, {}).streamToFile(destination);
}
};

Related

How to use fs.createReadStream() in sync without call back

I am planning to copy a file content from the zip and place the binary contents in another file. which will be used when I require a package.
example:-
func_name(CURRENT_DIR);
const MainController = require('./src/controllers/mainController');
// This mainController file will require the binary file which is create the func_name function
controller = new MainController(
context,
db2ConnectOutputChannel,
undefined /*vscodeWrapper*/
);
context.subscriptions.push(controller);
controller.activate();
func_name defination
var odbcBindingsNode;
var ODBC_BINDINGS = path.resolve(CURRENT_DIR, 'node_modules\/ibm_db\/build\/Release\/odbc_bindings.node');
odbcBindingsNode = 'build\/Release\/odbc_bindings_e' + electron_version + '.node';
readStream = fs.createReadStream(BUILD_FILE);
readStream.pipe(unzipper.Parse())
.on('entry', function (entry) {
if(entry.path === odbcBindingsNode) {
entry.pipe(fstream.Writer(ODBC_BINDINGS));
} else {
entry.autodrain();
}
})
.on('error', function(e) {
console.log('Installation Failed! \n',e);
})
.on('finish', function() {
console.log("\n" +
"===================================\n"+
"installed successfully!\n"+
"===================================\n");
})
The problem is the first function will not wait till the second function completes. It moves to the next line and tries to require maincontroller file which requires this .node file and returns .node is not found.
But .node is created after the require is called. Is there a way to make it in sync?
I tried callback which returns that require cannot be used in callback.
CallBack Code:-
function akhil(CURRENT_DIR){
var BUILD_FILE = path.resolve(CURRENT_DIR, 'folder\/build.zip');
var odbcBindingsNode;
var ODBC_BINDINGS = path.resolve(CURRENT_DIR, 'folder\/build\/Release\/odbc_bindings.node');
odbcBindingsNode = 'build\/Release\/odbc_bindings_e' + electron_version + '.node'
readStream = fs.createReadStream(BUILD_FILE);
/*
* unzipper will parse the build.zip file content and
* then it will check for the odbcBindingsNode
* (node Binary), when it gets that binary file,
* fstream.Writer will write the same node binary
* but the name will be odbc_bindings.node, and the other
* binary files and build.zip will be discarded.
*/
readStream.pipe(unzipper.Parse())
.on('entry', function (entry) {
if(entry.path === odbcBindingsNode) {
entry.pipe(fstream.Writer(ODBC_BINDINGS));
} else {
entry.autodrain();
}
})
.on('error', function(e) {
console.log('Installation Failed! \n',e);
})
.on('finish', function() {
console.log("\n" +
"===================================\n"+
"installed successfully!\n"+
"===================================\n");
console.log("This is rebuild");
const MainController = require('./src/controllers/mainController');
controller = new MainController(
context,
db2ConnectOutputChannel,
undefined /*vscodeWrapper*/
);
context.subscriptions.push(controller);
controller.activate();
})
return 1;
}
Using the ADM-zip package I was able to solve this issue.

using archiver module with downloadable online links

In a node application, I wish to download a zip file that contains pdfs downloaded from various urls on the internet (where if I type the url into a browser, it just directs me to download a pdf). I've been using the archiver module which is documented on github at https://github.com/archiverjs/node-archiver, and the official documentation is at https://www.archiverjs.com/.
I'm stuck at the part where it gives the following examples for adding files to the zip file.
// append a file from stream
var file1 = __dirname + '/file1.txt';
archive.append(fs.createReadStream(file1), { name: 'file1.txt' });
// append a file from string
archive.append('string cheese!', { name: 'file2.txt' });
// append a file from buffer
var buffer3 = Buffer.from('buff it!');
archive.append(buffer3, { name: 'file3.txt' });
// append a file
archive.file('file1.txt', { name: 'file4.txt' });
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('subdir/', false);
// append files from a glob pattern
archive.glob('subdir/*.txt');
Unfortunately, it seems just pasting urls into the first parameter of .append or .directory doesn't work - would anyone know how I can add downloadable files (that are online) into the zip file?
sure, using download-pdf first something like that
var download = require('download-pdf')
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
var pdf = "http://www.consejoconsultivoemt.cl/wp-content/uploads/2018/12 /Presentaci%C3%B3n-Lineamientos-Estrat%C3%A9gicos-de-Corfo.pdf"
var pdf2 = "https://www.biobiochile.cl/static/tarifas.pdf"
var options = {
directory: "./files/",
filename: "first.pdf"
}
var options2 = {
directory: "./files/",
filename: "second.pdf"
}
download(pdf, options, function (err) {
if (err) throw err
console.log("meow")
})
download(pdf2, options2, function (err) {
if (err) throw err
console.log("meow2")
})
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('./files/first.pdf', { name: 'first.pdf' });
archive.file('./files/second.pdf', { name: 'second.pdf' });
archive.finalize();

NodeJS: Merge two PDF files into one using the buffer obtained by reading them

I am using fill-pdf npm module for filling template pdf's and it creates new file which is read from the disk and returned as buffer to callback. I have two files for which i do the same operation. I want to combine the two buffers there by to form a single pdf file which i can send back to the client. I tried different methods of buffer concatenation. The buffer can be concatenated using Buffer.concat, like,
var newBuffer = Buffer.concat([result_pdf.output, result_pdf_new.output]);
The size of new buffer is also the sum of the size of the input buffers. But still when the newBuffer is sent to client as response, it shows only the file mentioned last in the array.
res.type("application/pdf");
return res.send(buffer);
Any idea ?
As mentioned by #MechaCode, the creator has ended support for HummusJS.
So I would like to give you 2 solutions.
Using node-pdftk npm module
The Following sample code uses node-pdftk npm module to combine
two pdf buffers seamlessly.
const pdftk = require('node-pdftk');
var pdfBuffer1 = fs.readFileSync("./pdf1.pdf");
var pdfBuffer2 = fs.readFileSync("./pdf2.pdf");
pdftk
.input([pdfBuffer1, pdfBuffer2])
.output()
.then(buf => {
let path = 'merged.pdf';
fs.open(path, 'w', function (err, fd) {
fs.write(fd, buf, 0, buf.length, null, function (err) {
fs.close(fd, function () {
console.log('wrote the file successfully');
});
});
});
});
The requirement for node-pdftk npm module is you need to install the
PDFtk library. Some of you may find this overhead / tedious. So I have another solution using pdf-lib library.
Using pdf-lib npm module
const PDFDocument = require('pdf-lib').PDFDocument
var pdfBuffer1 = fs.readFileSync("./pdf1.pdf");
var pdfBuffer2 = fs.readFileSync("./pdf2.pdf");
var pdfsToMerge = [pdfBuffer1, pdfBuffer2]
const mergedPdf = await PDFDocument.create();
for (const pdfBytes of pdfsToMerge) {
const pdf = await PDFDocument.load(pdfBytes);
const copiedPages = await mergedPdf.copyPages(pdf, pdf.getPageIndices());
copiedPages.forEach((page) => {
mergedPdf.addPage(page);
});
}
const buf = await mergedPdf.save(); // Uint8Array
let path = 'merged.pdf';
fs.open(path, 'w', function (err, fd) {
fs.write(fd, buf, 0, buf.length, null, function (err) {
fs.close(fd, function () {
console.log('wrote the file successfully');
});
});
});
Personally I prefer to use pdf-lib npm module.
HummusJS supports combining PDFs using its appendPDFPagesFromPDF method
Example using streams to work with buffers:
const hummus = require('hummus');
const memoryStreams = require('memory-streams');
/**
* Concatenate two PDFs in Buffers
* #param {Buffer} firstBuffer
* #param {Buffer} secondBuffer
* #returns {Buffer} - a Buffer containing the concactenated PDFs
*/
const combinePDFBuffers = (firstBuffer, secondBuffer) => {
var outStream = new memoryStreams.WritableStream();
try {
var firstPDFStream = new hummus.PDFRStreamForBuffer(firstBuffer);
var secondPDFStream = new hummus.PDFRStreamForBuffer(secondBuffer);
var pdfWriter = hummus.createWriterToModify(firstPDFStream, new hummus.PDFStreamForResponse(outStream));
pdfWriter.appendPDFPagesFromPDF(secondPDFStream);
pdfWriter.end();
var newBuffer = outStream.toBuffer();
outStream.end();
return newBuffer;
}
catch(e){
outStream.end();
throw new Error('Error during PDF combination: ' + e.message);
}
};
combinePDFBuffers(PDFBuffer1, PDFBuffer2);
Here's what we use in our Express server to merge a list of PDF blobs.
const { PDFRStreamForBuffer, createWriterToModify, PDFStreamForResponse } = require('hummus');
const { WritableStream } = require('memory-streams');
// Merge the pages of the pdfBlobs (Javascript buffers) into a single PDF blob
const mergePdfs = pdfBlobs => {
if (pdfBlobs.length === 0) throw new Error('mergePdfs called with empty list of PDF blobs');
// This optimization is not necessary, but it avoids the churn down below
if (pdfBlobs.length === 1) return pdfBlobs[0];
// Adapted from: https://stackoverflow.com/questions/36766234/nodejs-merge-two-pdf-files-into-one-using-the-buffer-obtained-by-reading-them?answertab=active#tab-top
// Hummus is useful, but with poor interfaces -- E.g. createWriterToModify shouldn't require any PDF stream
// And Hummus has many Issues: https://github.com/galkahana/HummusJS/issues
const [firstPdfRStream, ...restPdfRStreams] = pdfBlobs.map(pdfBlob => new PDFRStreamForBuffer(pdfBlob));
const outStream = new WritableStream();
const pdfWriter = createWriterToModify(firstPdfRStream, new PDFStreamForResponse(outStream));
restPdfRStreams.forEach(pdfRStream => pdfWriter.appendPDFPagesFromPDF(pdfRStream));
pdfWriter.end();
outStream.end();
return outStream.toBuffer();
};
module.exports = exports = {
mergePdfs,
};

How do I unzip a .zip/.rar file in Node.js into a folder

I am using zlib along with fstream now for zipping and sending to the client, Now I need to unzip an archive(which may contains sub folders) into a folder maintaining the folder structure. How do I do that?
There are plenty of node modules that can do this for you. One of them is node-unzip. You can extract a .zip file to a directory as simple as this.
fs.createReadStream('path/to/archive.zip').pipe(unzip.Extract({ path: 'output/path' }));
Further reading: https://github.com/EvanOxfeld/node-unzip
Rar is a closed-source software. The only way you can do it -- install command-line rar (rar.exe or linux version of rar, which is available on most platforms) and call it by means of this:
var exec = require('child_process').exec;
exec("rar.exe x file.rar", function (error) {
if (error) {
// error code here
} else {
// success code here
}
});
you can use this amazing module http://node-machine.org/machinepack-zip
for uncompress a zip file with directory structure inside zip
var Zip = require('machinepack-zip');
// Unzip the specified .zip file and write the decompressed files/directories as contents of the specified destination directory.
Zip.unzip({
source: '/Users/mikermcneil/stuff.zip',
destination: '/Users/mikermcneil/my-stuff',
}).exec(callbackSuccess, callbackFail );
for download remote file and unzip you can use this code:
var fs = require('fs');
var unzip = require("unzip2");
var tar = require('tar');
var zlib = require('zlib');
var path = require('path');
var mkdirp = require('mkdirp'); // used to create directory tree
var request = require("request");
var http = require('http');
var zip = require("machinepack-zip");
for (var i = 0; i < _diff.length; i++) {
request(constants.base_patch +"example.zip")
request = http.get({ host: 'localhost',
path: '/update/patchs/' + "example.zip",
port: 80,
headers: { 'accept-encoding': 'gzip,deflate' } });
request.on('response', (response) => {
var output = fs.createWriteStream(__dirname + "/tmp/" +"example.zip");
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(unzip.Extract({ path: __dirname }));
break;
case 'deflate':
response.pipe(zlib.createInflate()).pipe(unzip.Extract({ path: __dirname }));
break;
default:
response.pipe(output);
break;
}
})
request.on('close', function(){
zip.unzip({
source: __dirname + "/tmp/" + "example.zip",
destination: __dirname,
}).exec({
error: function (err){
alert("error")
},
success: function (){
//delete temp folder content after finish uncompress
},
});
})
}
note : remove unnecesary modules.
Use node js decompress-zip, first install it with npm:
npm install decompress-zip --save
Then you have to require it:
const DecompressZip = require('decompress-zip');
Finally you can use it in the following way:
let unzipper = new DecompressZip( absolutePathFileZip );
The directory to be extracted must be specified:
unzipper.extract({
path: pathToExtract
});
Additional you can use the following for better control:
Handle Error:
unzipper.on('error', function (err) {
console.log('event error')
});
Notify when everything is extracted
unzipper.on('extract', function (log) {
console.log('log es', log);
});
Notify "progress" of the decompressed files:
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
If anyone looking for async-await way syntax:
const request = require('request');
const unzip = require('unzip');
await new Promise(resolve =>
request('url')
.pipe(fs.createWriteStream('path/zipfilename'))
.on('finish', () => {
resolve();
}));
await new Promise(resolve =>
fs.createReadStream('path/filename')
.pipe(unzip.Extract({ path: 'path/extractDir }))
.on('close', ()=>{
resolve()
}));

How to create a directory if it doesn't exist using Node.js

Is the following the right way to create a directory if it doesn't exist?
It should have full permission for the script and readable by others.
var dir = __dirname + '/upload';
if (!path.existsSync(dir)) {
fs.mkdirSync(dir, 0744);
}
For individual dirs:
var fs = require('fs');
var dir = './tmp';
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
Or, for nested dirs:
var fs = require('fs');
var dir = './tmp/but/then/nested';
if (!fs.existsSync(dir)){
fs.mkdirSync(dir, { recursive: true });
}
No, for multiple reasons.
The path module does not have an exists/existsSync method. It is in the fs module. (Perhaps you just made a typo in your question?)
The documentation explicitly discourage you from using exists.
fs.exists() is an anachronism and exists only for historical reasons. There should almost never be a reason to use it in your own code.
In particular, checking if a file exists before opening it is an anti-pattern that leaves you vulnerable to race conditions: another process may remove the file between the calls to fs.exists() and fs.open(). Just open the file and handle the error when it's not there.
Since we're talking about a directory rather than a file, this advice implies you should just unconditionally call mkdir and ignore EEXIST.
In general, you should avoid the *Sync methods. They're blocking, which means absolutely nothing else in your program can happen while you go to the disk. This is a very expensive operation, and the time it takes breaks the core assumption of node's event loop.
The *Sync methods are usually fine in single-purpose quick scripts (those that do one thing and then exit), but should almost never be used when you're writing a server: your server will be unable to respond to anyone for the entire duration of the I/O requests. If multiple client requests require I/O operations, your server will very quickly grind to a halt.
The only time I'd consider using *Sync methods in a server application is in an operation that happens once (and only once), at startup. For example, require actually uses readFileSync to load modules.
Even then, you still have to be careful because lots of synchronous I/O can unnecessarily slow down your server's startup time.
Instead, you should use the asynchronous I/O methods.
So if we put together those pieces of advice, we get something like this:
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') { // Allow the `mask` parameter to be optional
cb = mask;
mask = 0o744;
}
fs.mkdir(path, mask, function(err) {
if (err) {
if (err.code == 'EEXIST') cb(null); // Ignore the error if the folder already exists
else cb(err); // Something else went wrong
} else cb(null); // Successfully created folder
});
}
And we can use it like this:
ensureExists(__dirname + '/upload', 0o744, function(err) {
if (err) // Handle folder creation error
else // We're all good
});
Of course, this doesn't account for edge cases like
What happens if the folder gets deleted while your program is running? (assuming you only check that it exists once during startup)
What happens if the folder already exists, but with the wrong permissions?
The mkdir method has the ability to recursively create any directories in a path that don't exist, and ignore the ones that do.
From the Node.js v10/11 documentation:
// Creates /tmp/a/apple, regardless of whether `/tmp` and /tmp/a exist.
fs.mkdir('/tmp/a/apple', { recursive: true }, (err) => {
if (err) throw err;
});
NOTE: You'll need to import the built-in fs module first.
Now here's a little more robust example that leverages native ECMAScript Modules (with flag enabled and .mjs extension), handles non-root paths, and accounts for full pathnames:
import fs from 'fs';
import path from 'path';
function createDirectories(pathname) {
const __dirname = path.resolve();
pathname = pathname.replace(/^\.*\/|\/?[^\/]+\.[a-z]+|\/$/g, ''); // Remove leading directory markers, and remove ending /file-name.extension
fs.mkdir(path.resolve(__dirname, pathname), { recursive: true }, e => {
if (e) {
console.error(e);
} else {
console.log('Success');
}
});
}
You can use it like createDirectories('/components/widget/widget.js');.
And of course, you'd probably want to get more fancy by using promises with async/await to leverage file creation in a more readable synchronous-looking way when the directories are created; but, that's beyond the question's scope.
With the fs-extra package you can do this with a one-liner:
const fs = require('fs-extra');
const dir = '/tmp/this/path/does/not/exist';
fs.ensureDirSync(dir);
I have found an npm module that works like a charm for this.
It simply does a recursive mkdir when needed, like a "mkdir -p ".
The one line version:
// Or in TypeScript: import * as fs from 'fs';
const fs = require('fs');
!fs.existsSync(dir) && fs.mkdirSync(dir);
You can just use mkdir and catch the error if the folder exists.
This is async (so best practice) and safe.
fs.mkdir('/path', err => {
if (err && err.code != 'EEXIST') throw 'up'
.. safely do your stuff here
})
(Optionally add a second argument with the mode.)
Other thoughts:
You could use then or await by using native promisify.
const util = require('util'), fs = require('fs');
const mkdir = util.promisify(fs.mkdir);
var myFunc = () => { ..do something.. }
mkdir('/path')
.then(myFunc)
.catch(err => { if (err.code != 'EEXIST') throw err; myFunc() })
You can make your own promise method, something like (untested):
let mkdirAsync = (path, mode) => new Promise(
(resolve, reject) => mkdir (path, mode,
err => (err && err.code !== 'EEXIST') ? reject(err) : resolve()
)
)
For synchronous checking, you can use:
fs.existsSync(path) || fs.mkdirSync(path)
Or you can use a library, the two most popular being
mkdirp (just does folders)
fsextra (supersets fs, adds lots of useful stuff)
solutions
CommonJS
const fs = require('fs');
const path = require('path');
const dir = path.resolve(path.join(__dirname, 'upload');
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
// OR
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, {
mode: 0o744, // Not supported on Windows. Default: 0o777
});
}
ESM
update your package.json file config
{
// declare using ECMAScript modules(ESM)
"type": "module",
//...
}
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// create one custom `__dirname`, because it does not exist in es-module env ⚠️
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const dir = path.resolve(path.join(__dirname, 'upload');
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
// OR
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, {
mode: 0o744, // Not supported on Windows. Default: 0o777
});
}
update 2022
import { existsSync } from 'node:fs';
refs
NodeJS Version: v18.2.0
https://nodejs.org/api/fs.html#fsexistssyncpath
https://nodejs.org/api/fs.html#fsmkdirsyncpath-options
https://nodejs.org/api/url.html#urlfileurltopathurl
https://github.com/nodejs/help/issues/2907#issuecomment-757446568
ESM: ECMAScript modules
https://nodejs.org/api/esm.html#introduction
One-line solution: Creates the directory if it does not exist
// import
const fs = require('fs') // In JavaScript
import * as fs from "fs" // in TypeScript
import fs from "fs" // in Typescript
// Use
!fs.existsSync(`./assets/`) && fs.mkdirSync(`./assets/`, { recursive: true })
The best solution would be to use the npm module called node-fs-extra. It has a method called mkdir which creates the directory you mentioned. If you give a long directory path, it will create the parent folders automatically. The module is a superset of npm module fs, so you can use all the functions in fs also if you add this module.
var dir = 'path/to/dir';
try {
fs.mkdirSync(dir);
} catch(e) {
if (e.code != 'EEXIST') throw e;
}
Use:
var filessystem = require('fs');
var dir = './path/subpath/';
if (!filessystem.existsSync(dir))
{
filessystem.mkdirSync(dir);
}
else
{
console.log("Directory already exist");
}
For node v10 and above
As some answers pointed out, since node 10 you can use recursive:true for mkdir
What is not pointed out yet, is that when using recursive:true, mkdir does not return an error if the directory already existed.
So you can do:
fsNative.mkdir(dirPath,{recursive:true},(err) => {
if(err) {
//note: this does NOT get triggered if the directory already existed
console.warn(err)
}
else{
//directory now exists
}
})
Using promises
Also since node 10, you can get Promise versions of all fs functions by requiring from fs/promises
So putting those two things together, you get this simple solution:
import * as fs from 'fs/promises';
await fs.mkdir(dirPath, {recursive:true}).catch((err) => {
//decide what you want to do if this failed
console.error(err);
});
//directory now exists
fs.exist() is deprecated. So I have used fs.stat() to check the directory status. If the directory does not exist, fs.stat() throws an error with a message like 'no such file or directory'. Then I have created a directory.
const fs = require('fs').promises;
const dir = './dir';
fs.stat(dir).catch(async (err) => {
if (err.message.includes('no such file or directory')) {
await fs.mkdir(dir);
}
});
With Node.js 10 + ES6:
import path from 'path';
import fs from 'fs';
(async () => {
const dir = path.join(__dirname, 'upload');
try {
await fs.promises.mkdir(dir);
} catch (error) {
if (error.code === 'EEXIST') {
// Something already exists, but is it a file or directory?
const lstat = await fs.promises.lstat(dir);
if (!lstat.isDirectory()) {
throw error;
}
} else {
throw error;
}
}
})();
I'd like to add a TypeScript Promise refactor of josh3736's answer.
It does the same thing and has the same edge cases. It just happens to use Promises, TypeScript typedefs, and works with "use strict".
// https://en.wikipedia.org/wiki/File_system_permissions#Numeric_notation
const allRWEPermissions = parseInt("0777", 8);
function ensureFilePathExists(path: string, mask: number = allRWEPermissions): Promise<void> {
return new Promise<void>(
function(resolve: (value?: void | PromiseLike<void>) => void,
reject: (reason?: any) => void): void{
mkdir(path, mask, function(err: NodeJS.ErrnoException): void {
if (err) {
if (err.code === "EEXIST") {
resolve(null); // Ignore the error if the folder already exists
} else {
reject(err); // Something else went wrong
}
} else {
resolve(null); // Successfully created folder
}
});
});
}
I had to create sub-directories if they didn't exist. I used this:
const path = require('path');
const fs = require('fs');
function ensureDirectoryExists(p) {
//console.log(ensureDirectoryExists.name, {p});
const d = path.dirname(p);
if (d && d !== p) {
ensureDirectoryExists(d);
}
if (!fs.existsSync(d)) {
fs.mkdirSync(d);
}
}
You can use the Node.js File System command fs.stat to check if a directory exists and fs.mkdir to create a directory with callback, or fs.mkdirSync to create a directory without callback, like this example:
// First require fs
const fs = require('fs');
// Create directory if not exist (function)
const createDir = (path) => {
// Check if dir exist
fs.stat(path, (err, stats) => {
if (stats.isDirectory()) {
// Do nothing
} else {
// If the given path is not a directory, create a directory
fs.mkdirSync(path);
}
});
};
From the documentation this is how you do it asynchronously (and recursively):
const fs = require('fs');
const fsPromises = fs.promises;
fsPromises.access(dir, fs.constants.F_OK)
.catch(async() => {
await fs.mkdir(dir, { recursive: true }, function(err) {
if (err) {
console.log(err)
}
})
});
Here is a little function to recursivlely create directories:
const createDir = (dir) => {
// This will create a dir given a path such as './folder/subfolder'
const splitPath = dir.split('/');
splitPath.reduce((path, subPath) => {
let currentPath;
if(subPath != '.'){
currentPath = path + '/' + subPath;
if (!fs.existsSync(currentPath)){
fs.mkdirSync(currentPath);
}
}
else{
currentPath = subPath;
}
return currentPath
}, '')
}
my solutions
CommonJS
var fs = require("fs");
var dir = __dirname + '/upload';
// if (!fs.existsSync(dir)) {
// fs.mkdirSync(dir);
// }
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, {
mode: 0o744,
});
// mode's default value is 0o744
}
ESM
update package.json config
{
//...
"type": "module",
//...
}
import fs from "fs";
import path from "path";
// create one custom `__dirname`, because it not exist in es-module env ⚠️
const __dirname = path.resolve();
const dir = __dirname + '/upload';
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
// OR
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, {
mode: 0o744,
});
// mode's default value is 0o744
}
refs
https://nodejs.org/api/fs.html#fsexistssyncpath
https://github.com/nodejs/help/issues/2907#issuecomment-671782092
Using async / await:
const mkdirP = async (directory) => {
try {
return await fs.mkdirAsync(directory);
} catch (error) {
if (error.code != 'EEXIST') {
throw e;
}
}
};
You will need to promisify fs:
import nodeFs from 'fs';
import bluebird from 'bluebird';
const fs = bluebird.promisifyAll(nodeFs);
A function to do this asynchronously (adjusted from a similar answer on SO that used sync functions, that I can't find now)
// ensure-directory.js
import { mkdir, access } from 'fs'
/**
* directoryPath is a path to a directory (no trailing file!)
*/
export default async directoryPath => {
directoryPath = directoryPath.replace(/\\/g, '/')
// -- preparation to allow absolute paths as well
let root = ''
if (directoryPath[0] === '/') {
root = '/'
directoryPath = directoryPath.slice(1)
} else if (directoryPath[1] === ':') {
root = directoryPath.slice(0, 3) // c:\
directoryPath = directoryPath.slice(3)
}
// -- create folders all the way down
const folders = directoryPath.split('/')
let folderPath = `${root}`
for (const folder of folders) {
folderPath = `${folderPath}${folder}/`
const folderExists = await new Promise(resolve =>
access(folderPath, error => {
if (error) {
resolve(false)
}
resolve(true)
})
)
if (!folderExists) {
await new Promise((resolve, reject) =>
mkdir(folderPath, error => {
if (error) {
reject('Error creating folderPath')
}
resolve(folderPath)
})
)
}
}
}

Resources