How to use the brotli npm module to compress files - node.js

I would like to use this npm module to compress files, but I'm a bit stuck by the documention.
In a linux shell :
npm install brotli # npm#4.1.2 # brotli#1.3.1
node # v6.9.4
Then inside node:
var fs = require('fs');
var brotli = require('brotli');
brotli.compress(fs.readFileSync('myfile.txt')); // output is numbers
fs.writeFile('myfile.txt.br', brotli.compress(fs.readFileSync('bin.tar')), function(err){ if (!err) { console.log('It works!');}});
"It works!"
But the file is full of numbers too...
I've never used streams and fs like that in node, can someone explains how to deal with this? Thanks!

With this simple JS code you are compressing each *.html *.css *.js file inside the folder you choose (in this case /dist)
const fs = require('fs');
const compress = require('brotli/compress');
const brotliSettings = {
extension: 'br',
skipLarger: true,
mode: 1, // 0 = generic, 1 = text, 2 = font (WOFF2)
quality: 10, // 0 - 11,
lgwin: 12 // default
};
fs.readdirSync('dist/').forEach(file => {
if (file.endsWith('.js') || file.endsWith('.css') || file.endsWith('.html')) {
const result = compress(fs.readFileSync('dist/' + file), brotliSettings);
fs.writeFileSync('dist/' + file + '.br', result);
}
});

Related

How can you archive with tar in NodeJS while only storing the subdirectory you want?

Basically I want to do the equivalent of this How to strip path while archiving with TAR but with the tar commands imported to NodeJS, so currently I'm doing this:
const gzip = zlib.createGzip();
const pack = new tar.Pack(prefix="");
const source = Readable.from('public/images/');
const destination = fs.createWriteStream('public/archive.tar.gz');
pipeline(source, pack, gzip, destination, (err) => {
if (err) {
console.error('An error occurred:', err);
process.exitCode = 1;
}
});
But doing so leaves me with files like: "/public/images/a.png" and "public/images/b.png", when what I want is files like "/a.png" and "/b.png". I want to know how I can add to this process to strip out the unneeded directories, while keeping the files where they are.
You need to change working directory:
// cwd The current working directory for creating the archive. Defaults to process.cwd().
new tar.Pack({ cwd: "./public/images" });
const source = Readable.from('');
Source: documentation of node-tar
Example: https://github.com/npm/node-tar/blob/main/test/pack.js#L93

Node search for file upwards

I'm looking for a function in node.js to search upwards in the filesystem to check if a given file exists and if so get its content.
For example, if I have the following folder structure:
* root
|--dir1
| |-dir2
| |-dir3
|...
If I'm in dir3 I want to search for a given file that could be in any folder if I go up .. but not in their subfolders. So what I want is a simple way to check in current folder for the file if it doesn't exist go up one folder and search there, until you find the file or you are in the root folder.
I think it's easy to write by yourself. You can use fs.readdir or fs.readdirSync.
Synchronous solution might look like this: (not tested)
var fs = require('fs');
var process = require('process');
var path = require('path');
function findFile(filename, startdir) {
if(!startdir) statdir = process.cwd();
while(true) {
var list = fs.readdirSync(startdir);
if((index = list.indexOf(filename)) != -1)
// found
return fs.readFileSync(path.join([startdir, filename]));
else if(startdir == '/')
// root dir, file not found
return null;
else
startdir = path.normalize(path.join([startdir, '..']));
}
}
There is also an NPM package - fileUp that does this.
From the Readme:
const path = require('path');
const findUp = require('find-up');
(async () => {
console.log(await findUp('unicorn.png'));
//=> '/Users/sindresorhus/unicorn.png'
console.log(await findUp(['rainbow.png', 'unicorn.png']));
//=> '/Users/sindresorhus/unicorn.png'
console.log(await findUp(async directory => {
const hasUnicorns = await findUp.exists(path.join(directory, 'unicorn.png'));
return hasUnicorns && directory;
}, {type: 'directory'}));
//=> '/Users/sindresorhus'
})();

Exclude SVG from gulp-svg-sprite build process

I'm currently building an SVG Icon system using gulp-svg-sprite and have run into a situation where I need to exclude some icons from the build process.
Is there a way to EXCLUDE an SVG from running through these 2 pipes? Somehow I need to get the src filename and compare it to the SVG I want to exclude and so something like:
if src != svgToExclude then run the pipes.
I don't want specific icons being optimized via SVGO and other plugins for those 1-off SVG's that require 2 styleable paths.
Here is the code I'm working with:
const gulp = require('gulp');
const svgo = require('gulp-svgo');
const rsp = require('remove-svg-properties').stream;
const dom = require('gulp-dom');
const xmlEdit = require('gulp-edit-xml');
const gulpIf = require('gulp-if');
const gulpIgnore = require('gulp-ignore');
const { toPath } = require('svg-points');
var excludeIcon = './utilities/checkbox-checked/checkbox-checked--s.svg';
const svgBuild = src => {
return gulp
.src(src)
.pipe(
rsp.remove({
properties: ['fill', rsp.PROPS_STROKE],
log: false,
})
)
.pipe(
svgo({
js2svg: {
indent: 2,
pretty: true,
},
plugins: [{ removeTitle: true }],
})
)
};
module.exports = svgBuild;
I'm new to gulp & node so any help would be appreciated!
Thanks,
- Ryan
Using gulp-filter:
const filter = require('gulp-filter');
something like
var excludeIconArray = ["iconToExclude1.svg", "iconToExclude2.svg", etc.]
const svgBuild = src => {
const svgFilter = filter(file => {
// will probably need file.path string manipulations here
// file.path is the full path but you can select portions of it
// so the below is just pseudocode
return !excludeIconArray.includes(file.path)
});
return gulp
.src(src)
// put the next pipe wherever you want to exclude certain files
// either right after source or just before the svgo pipe
.pipe(svgFilter())

nodejs get file character encoding

How can I find out what character encoding a given text file has?
var inputFile = "filename.txt";
var file = fs.readFileSync(inputFile);
var data = new Buffer(file, "ascii");
var fileEncoding = some_clever_function(file);
if (fileEncoding !== "utf8") {
// do something
}
Thanks
You can try to use external module, such as https://www.npmjs.com/package/detect-character-encoding
The previously mentioned module works for me too. Alternatively you could have a look at detect-file-encoding-and-language which I'm using at the moment.
Installation:
$ npm install detect-file-encoding-and-language
Usage:
// index.js
const languageEncoding = require("detect-file-encoding-and-language");
const pathToFile = "/home/username/documents/my-text-file.txt"
languageEncoding(pathToFile).then(fileInfo => console.log(fileInfo));
// Possible result: { language: japanese, encoding: Shift-JIS, confidence: { language: 0.97, encoding: 1 } }

Check package version at runtime in nodejs?

I have some of my entries in package.json defined as "*"
"dependencies": {
"express": "4.*",
"passport": "*",
"body-parser": "*",
"express-error-handler": "*"
},
I wan't to freeze those values to the current version. How can I know what version my packages are at run time? I don't mind checking one by one since I don't have many of them :)
BTW: I cannot do npm list --depth=0 because I cannot access the vm directly (PaaS restriction), just the logs.
You can use the fs module to read the directories in the node_modules directory and then read package.json in each of them.
var fs = require('fs');
var dirs = fs.readdirSync('node_modules');
var data = {};
dirs.forEach(function(dir) {
try{
var file = 'node_modules/' + dir + '/package.json';
var json = require(file);
var name = json.name;
var version = json.version;
data[name] = version;
}catch(err){}
});
console.debug(data['express']); //= 4.11.2
Just in case if you need the version on the front-end, there is an npm package just for this and it can be used both on client-side and server-side.
global-package-version
You can use it in your code like this
import globalPackageVersion from 'global-package-version';
// package name is 'lodash'
globalPackageVersion(require('lodash/package.json'));
// You can type 'packageVersion' in browser console to check lodash version
// => packageVersion = { lodash: '4.7.2'}
packageVersion becomes a global object when used in server side and becomes a window object when used on the client side. Works well with webpack and all other bundling tools.
Disclaimer: I am the author of this package :)
I've 'modernised' a bit #laggingreflex answer, this works on ES6+, node 10, tested on a lambda running in aws. It's an endpoint from an express app.
const fs = require("fs");
module.exports.dependencies = async (_req, res) => {
const dirs = fs.readdirSync("./node_modules");
const modulesInfo = dirs.reduce((acc, dir) => {
try {
const file = `${dir}/package.json`;
const { name, version } = require(file);
return { ...acc, [name]: version };
} catch (err) {}
}, {});
res.status(200).json(modulesInfo);
};
The accepted solution can be improved upon in both terms of performance and stability:
1: the package name IS THE directory. In typically cases where you are looking for a specific package, you do not need to load every module.
2: this code will not run on all os due to the way the paths are formed
3: using require means the path needs to be relative to the current file (this would only work if your file is located at the top of your project folder & along side node_modules). In most cases, using readFile or readFileSync is a easier approach.
const fs = require('fs');
const path = require('path');
const dirs = fs.readdirSync('node_modules');
const data = {};
//add ones you care about
const trackedPackages = ['express', 'passport', 'body-parser'];
dirs.forEach(function(dir) {
if(trackedPackages.indexOf(dir) > -1){
try{
const json = JSON.parse(
fs.readFileSync(path.join('node_modules', dir, 'package.json'), 'utf8')
);
data[dir] = json.version;
}catch(e){
console.log(`failed to read/parse package.json for ${dir}`, e);
}
}
});
console.debug(data['express']); //= 4.11.2

Resources