I'm using gulp and I trying to create a gulp task that combine files in a javascript file.
For example, image I have this:
File template\template1.html :
<h2>some html content</h2>
<p>blah blah blah</p>
File template\template2.html :
<h2>some other html content</h2>
<img src='cat.png'>
I'd like to read and merge these files into a single javascript file like this :
const templates = {
"template1" : "<h2>some html content</h2>\n<p>blah blah blah</p>",
"template2" : "<h2>some other html content</h2>\n<img src='cat.png'>"
}
export default templates
However, I'm failing when dealing with gulp plumbing (I'm quite new to gulp I admit).
How to reach my goal ?
Right now I tried to play with gulp-trough, but it fails at execution:
const gulp = require('gulp');
const through = require('gulp-through');
gulp.task('templates', function () {
var result = {}
gulp.src('src/templates/**/*.html')
.pipe(through('readFile', function(){
console.log(arguments); // not reached!
}, defaults));
})
gulp.task('default', ['templates'])
It shouldn't be hard to write your own plugin using through2 module (as explained in official docs.)
// gulpfile.js
const gulp = require('gulp');
const path = require('path');
const through = require('through2'); // npm install --save-dev through2
const toTemplateModule = obj => {
return [
`const template = ${JSON.stringify(obj, null, 2)};`,
'',
'export default template;'
].join('\n');
};
const mergeTemplate = file => {
const results = {};
let latestFile;
return through.obj(
function(file, encoding, callback) {
latestFile = file;
results[path.basename(file.path)] = file.contents.toString(encoding);
callback(null, file);
},
function(callback) {
const joinedFile = latestFile.clone({
contents: false
});
joinedFile.path = path.join(latestFile.base, file);
joinedFile.contents = Buffer.from(toTemplateModule(results), 'utf-8');
this.push(joinedFile);
callback();
});
};
gulp.task('templates', () => {
return gulp
.src('./src/templates/**/*.html')
.pipe(mergeTemplate('templates.js'))
.pipe(gulp.dest('./build'))
});
gulp.task('default', ['templates'])
Related
I need help with my gulp project. Drops me error in terminal: TypeError: parallel is not a function Already searched information whole internet. Answer is to update Gulp version to 4.0.. Already updated. still shows the "parallel function" issue
My file:
const { src, dest, parallel, series, watch } = import('gulp');
const twig = import('gulp-twig');
const sass = import('gulp-sass');
const prefix = import('gulp-autoprefixer');
const data = import('gulp-data');
const sourcemaps = import('gulp-sourcemaps');
const concat = import('gulp-concat');
const plumber = import('gulp-plumber');
const browsersync = import('browser-sync');
const gulpcopy = import('gulp-copy');
const fs = import('fs');
const del = import('del');
const path = import('path');
var paths = {
build: {
html: 'dist/',
js: 'dist/assets/js/',
css: 'dist/assets/css/',
img: 'dist/assets/img/',
fonts: 'dist/assets/fonts/',
icons: 'dist/assets/icons/',
json: 'dist/assets/'
},
src: {
html: 'src/*.{htm,html,php}',
js: 'src/assets/js/*.js',
css: 'src/assets/sass/style.scss',
img: 'src/assets/img/**/*.*',
fonts: 'src/assets/fonts/**/*.*',
icons: 'src/assets/icons/**/*.*',
json: 'src/assets/*.json'
},
watch: {
html: 'src/**/*.{htm,html,php}',
js: 'src/assets/js/**/*.js',
css: 'src/assets/sass/**/*.scss',
img: 'src/assets/img/**/*.*',
fonts: 'src/assets/fonts/**/*.*',
icons: 'src/assets/icons/**/*.*',
json: 'src/assets/*.json'
},
clean: './dist'
};
// SCSS bundled into CSS task
function css() {
return src('client/scss/vendors/*.scss')
.pipe(sourcemaps.init())
// Stay live and reload on error
.pipe(plumber({
handleError: function (err) {
console.log(err);
this.emit('end');
}
}))
.pipe(sass({
includePaths: [paths.src.css + 'vendors/'],
outputStyle: 'compressed'
}).on('error', function (err) {
console.log(err.message);
// sass.logError
this.emit('end');
}))
.pipe(prefix(['last 15 versions','> 1%','ie 8','ie 7','iOS >= 9','Safari >= 9','Android >= 4.4','Opera >= 30'], {
cascade: true
}))
//.pipe(minifyCSS())
.pipe(concat('bootstrap.min.css'))
.pipe(sourcemaps.write('.'))
.pipe(dest('build/assets/css'));
}
// JS bundled into min.js task
function js() {
return src('dist/js/*.js')
.pipe(sourcemaps.init())
.pipe(concat('scripts.min.js'))
.pipe(sourcemaps.write('.'))
.pipe(dest('build/assets/js'));
}
function twigTpl () {
return src(['./dist/templates/*.twig'])
// Stay live and reload on error
.pipe(plumber({
handleError: function (err) {
console.log(err);
this.emit('end');
}
}))
// Load template pages json data
.pipe(data(function (file) {
return JSON.parse(fs.readFileSync(paths.data + path.basename(file.path) + '.json'));
}).on('error', function (err) {
process.stderr.write(err.message + '\n');
this.emit('end');
})
)
// Load default json data
.pipe(data(function () {
return JSON.parse(fs.readFileSync(paths.data + path.basename('default.twig.json')));
}).on('error', function (err) {
process.stderr.write(err.message + '\n');
this.emit('end');
})
)
// Twig compiled
.pipe(twig()
.on('error', function (err) {
process.stderr.write(err.message + '\n');
this.emit('end');
})
)
.pipe(dest(paths.build));
}
function copyAssets() {
// Copy assets
return src(['./dist/assets/**/*.*','!./dist/assets/**/*.psd','!./dist/assets/**/*.*.map'],
del(paths.build + 'assets/**/*')
)
.pipe(gulpcopy(paths.build + 'assets', { prefix: 2 }));
}
// BrowserSync
function browserSync() {
browsersync({
server: {
baseDir: paths.build
},
notify: false,
browser: "google chrome",
// proxy: "0.0.0.0:5000"
});
}
// BrowserSync reload
function browserReload () {
return browsersync.reload;
}
// Watch files
function watchFiles() {
// Watch SCSS changes
watch(paths.scss + '**/*.scss', parallel(css))
.on('change', browserReload());
// Watch javascripts changes
watch(paths.js + '*.js', parallel(js))
.on('change', browserReload());
// Watch template changes
watch(['dist/templates/**/*.twig','dist/data/*.twig.json'], parallel(twigTpl))
.on('change', browserReload());
// Assets Watch and copy to build in some file changes
watch('dist/assets/**/*')
.on('change', series(copyAssets, css, css_vendors, js, browserReload()));
}
const watching = parallel(watchFiles, browserSync);
exports.js = js;
exports.css = css;
exports.default = parallel(copyAssets, css, js, twigTpl);
exports.watch = watching;
My gulp version:
"node": "18.12.1"
"gulp": "^4.0.2",
On terminal when i write gulp -v it shows:
CLI version: 2.3.0
Local version: 4.0.2
Expecting professional help
Since your gulpfile is a CommonJS module, use require instead of import:
const { src, dest, parallel, series, watch } = require('gulp');
const twig = require('gulp-twig');
const sass = require('gulp-sass');
const prefix = require('gulp-autoprefixer');
const data = require('gulp-data');
const sourcemaps = require('gulp-sourcemaps');
const concat = require('gulp-concat');
const plumber = require('gulp-plumber');
const browsersync = require('browser-sync');
const gulpcopy = require('gulp-copy');
const fs = require('fs');
const del = require('del');
const path = require('path');
Using an ESM style gulpfile is also an option, but it's not as simple as just replacing require with import (btw the syntax is import sass from 'gulp-sass';...), you will also need to rename the file to "gulpfile.mjs", change the style of your exports, and probably more.
I just wrote a script to release a build of one of the products I'm working on. The script does the job, but I don't really like the code itself, looks like spaghetti code and callback hell combined.
Is there a cleaner way to do this? I'd like to be able to run commands in series, log the outputs (stdout.on('data')) and when the task is finished. (easier for further debug and when waiting for the task to be done, reassuring to know what's happening on the background)
Maybe using Promises would help clean the mess a bit, but still, I feel like there should be a cleaner way to deal with multiple commands.
Some explanation about what the code does:
Create a tag with the commit you want and the tag version you want, i.e: git tag 1.2.5.
Build the release file with gulp build.
Create a folder doc/<tag>.
Convert doc/doc_reader.odt to doc/<tag>/documentation.pdf. (Open it and export as PDF)
Copy build/reader.js and doc/changelog.txt in the created folder.
Zip the 3 files.
Commit everything with commit message: Release 1.2.11 (for example)
Push.
Create a new release on GitHub using the commit you just pushed and the same tag.
Here is the code, as an example. (ES5, Node 4.6.0+)
var mkdirp = require('mkdirp');
var fs = require('fs-extra');
var path = require('path');
var spawn = require('child_process').spawn;
var zip = new require('node-zip')();
var package = require('../package.json');
var version = package.version;
var releaseDirectory = 'doc'
var gitTagProcess = spawn('git', ['tag', version]);
var gulpBuildProcess = spawn('gulp', ['build']);
console.log(`Running "git tag ${version}"...`);
gitTagProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
gitTagProcess.on('close', function () {
console.log('Tag created.')
console.log('Running "gulp build"...');
gulpBuildProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
gulpBuildProcess.on('close', function () {
console.log('"gulp build" done.')
console.log(`Creating "${releaseDirectory}/${version}" directory.`)
mkdirp(`${releaseDirectory}/${version}`, function () {
console.log('Directory created.');
var docFile = `${releaseDirectory}/doc_reader.md`;
console.log(`Converting ${docFile} to pdf ...`);
var docBuildProcess = spawn('npm', ['run', 'build:doc']);
docBuildProcess.stdout.on('data', function (chunk) {
console.log(chunk.toString('utf8'));
});
docBuildProcess.on('close', function () {
console.log('Doc created.');
console.log('Copying changelog.txt ...');
fs.copySync('doc/changelog.txt', `doc/${version}/changelog.txt`);
console.log('changelog.txt copied.');
console.log(`Copying "build/reader.js" to "doc/reader-${version}.js" and "doc/reader.js" ...`);
fs.copySync('build/reader.js', `doc/${version}/reader.js`);
fs.copySync('build/reader.js', `doc/${version}/reader-${version}.js`);
console.log('reader.js copied.');
console.log('Zipping all files ...');
zip.file('changelog.txt', fs.readFileSync(`doc/${version}/changelog.txt`));
zip.file('doc_reader.pdf', fs.readFileSync(`doc/${version}/doc_reader.pdf`));
zip.file('reader.js', fs.readFileSync(`doc/${version}/reader.js`));
zip.file(`reader-${version}.js`, fs.readFileSync(`doc/${version}/reader-${version}.js`));
var data = zip.generate({ base64: false, compression: 'DEFLATE' });
var zipFilename = `doc/${version}/HTML5Reader_${version}.zip`;
fs.writeFileSync(zipFilename, data, 'binary'); // it's important to use *binary* encode
console.log(`${zipFilename} created.`);
console.log(`\nRelease ${version} done. Please add generated files and commit using:`);
console.log(`\n\tgit add * && git commit -m "Release ${version}"`);
console.log(`\n\nDon't forget to push and create a new release on GitHub at https://github.com/$domain/$product/releases/new?tag=${version}`);
});
});
});
});
Edit:
Here is the implementation using async/await (node 7.8.0)
I used special mkdirp and exec modules, that allow usage with await. But I couldn't find an equivalent for spawn.
const mkdirp = require('async-mkdirp');
const fs = require('fs-extra');
const spawn = require('child-process-promise').spawn;
const exec = require('mz/child_process').exec;
const zip = new require('node-zip')();
const c = require('chalk');
const error = c.bold.red;
const warn = c.yellow;
const info = c.cyan;
const info2 = c.magenta;
const version = require('../package.json').version;
const releaseDirectory = 'doc'
async function git_tag() {
async function exec_git_tag() {
return await exec(`git tag ${version}`);
}
console.log(info(`Creating git tag ${version}`));
return exec_git_tag()
.then(() => {
console.log(info(`Git tag created for ${version}`))
})
.catch((err) => {
console.log(warn('warn', err));
})
// Finally
.then(() => {
console.log(info(`"git tag ${version}" - Completed`))
});
};
async function gulp_build() {
async function exec_gulp_build() {
const promise = spawn('gulp', ['build'])
const childProcess = promise.childProcess;
childProcess.stdout.on('data', (data) => {
console.log(info2(data.toString()));
});
childProcess.stderr.on('data', (data) => {
console.log(error(data.toString()));
});
return promise
.catch((err) => {
console.error(error(err));
})
// Finally
.then(() => {
console.log(info('"gulp build" - Completed'))
});
}
console.log(info('Running "gulp build"...'))
return exec_gulp_build()
}
async function create_dir() {
const dirPath = `${releaseDirectory}/${version}`;
console.log(info(`Creating "${dirPath}" directory.`))
await mkdirp(`${dirPath}`);
console.log(info(`Directory ${dirPath} created.`));
}
async function build_doc() {
const docFile = `${releaseDirectory}/doc_reader.md`;
console.log(info(`Converting ${docFile} to pdf ...`));
async function exec_build_doc() {
return await exec(`npm run build:doc`);
}
return exec_build_doc()
.catch((err) => {
console.error(error(err));
})
.then(() => {
console.log(info(`Doc "${docFile}" created.`));
})
}
function copy_files() {
console.log(info('Copying changelog.txt ...'));
fs.copySync('doc/changelog.txt', `doc/${version}/changelog.txt`);
console.log(info('changelog.txt copied.'));
console.log(info(`Copying "build/reader.js" to "doc/reader-${version}.js" and "doc/reader.js" ...`));
fs.copySync('build/reader.js', `doc/${version}/reader.js`);
fs.copySync('build/reader.js', `doc/${version}/reader-${version}.js`);
console.log(info('reader.js copied.'));
}
function zip_files() {
console.log(info('Zipping all files ...'));
zip.file('changelog.txt', fs.readFileSync(`doc/${version}/changelog.txt`));
zip.file('doc_reader.pdf', fs.readFileSync(`doc/${version}/doc_reader.pdf`));
zip.file('reader.js', fs.readFileSync(`doc/${version}/reader.js`));
zip.file(`reader-${version}.js`, fs.readFileSync(`doc/${version}/reader-${version}.js`));
const data = zip.generate({ base64: false, compression: 'DEFLATE' });
const zipFilename = `doc/${version}/HTML5Reader_${version}.zip`;
fs.writeFileSync(zipFilename, data, 'binary'); // it's important to use *binary* encode
console.log(info(`${zipFilename} created.`));
}
async function release() {
await git_tag();
await gulp_build();
await create_dir();
await build_doc();
copy_files();
zip_files();
console.log(`\nRelease ${version} done. Please add generated files and commit using:`);
console.log(`\n\tgit add . && git commit -m "Release ${version}"`);
}
release();
There is an mz module that can be very helpful here. See:
https://www.npmjs.com/package/mz
This, combined with async/await will allow you to write code like this:
let exec = require('mz/child_process').exec;
(async () => {
let version = await exec('node --version');
console.log(version);
let result = await exec('some other command');
console.log(result);
// ...
})();
This is a simple example but you can use all functions from the child_process, fs and many other modules that way.
What's important here is that this code is still asynchronous and non-blocking.
Note that you can only use await inside of a function created with the async keyword. For more info, see:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await
For support in browsers, see:
http://caniuse.com/async-functions
For support in Node, see:
http://node.green/#ES2017-features-async-functions
In places where you don't have native support for async and await you can use Babel:
https://babeljs.io/docs/plugins/transform-async-to-generator/
or with a slightly different syntax a generator based approach like in co or Bluebird coroutines:
https://www.npmjs.com/package/co
http://bluebirdjs.com/docs/api/promise.coroutine.html
In my package.json, I'm running a node module that can only handle one file at a time (I didn't write it). For example:
cleancss somefile.css -o somefile-min.css
I want to be able to do something like:
printfilelist -dir /public/css -files *.css | cleancss {filepath}.{fileext} -o {filename}-min.css
Is there any way to do this?
I recently encountered a similar requirement to run cleancss across multiple files. Eventually opted for a solution similar to the one discussed in the comments, i.e. using the clean-css-api with a utility node.js.
npm-script
"scripts": {
"cleancss": "glob \"public/css/**/*.css\" | node .scripts/cleancss.js"
},
Note the initial use of cli-glob (added to package.json) for obtaining the paths before piping them to cleancss.js.
cleancss.js
The utitlity node script was as follows (albeit rather rudimentary!):
#!/usr/bin/env node
var fs = require('fs');
var path = require('path');
var readline = require('readline');
var CleanCSS = require('clean-css');
var mkdirp = require('mkdirp');
var rl = readline.createInterface({
input: process.stdin,
output: null,
terminal: false
});
var options = {
// ... https://www.npmjs.com/package/clean-css#constructor-options
// ... https://www.npmjs.com/package/clean-css#formatting-options
};
function saveFile(outputPath, minified) {
fs.writeFile(outputPath, minified, function(err) {
if (err) {
return console.log(err);
}
});
}
function pathParts(srcPath) {
var ext = path.extname(srcPath),
name = path.basename(srcPath, ext),
dirPath = path.dirname(srcPath),
pathParts = dirPath.split(path.sep),
pathNoRoot;
pathParts.shift();
pathNoRoot = pathParts.join(path.sep);
return {
ext: ext,
name: name,
pathNoRoot: pathNoRoot
};
}
function minify(srcPath, outputPath) {
var output = new CleanCSS(options).minify([srcPath]);
saveFile(outputPath, output.styles);
}
function processPath(srcPath) {
var outDir = process.env.npm_package_config_css_outdir || path.dirname(srcPath),
p = pathParts(srcPath),
newName = p.name + '.min' + p.ext,
mkDirPath,
outputPath;
if (process.env.npm_package_config_css_outdir) {
mkDirPath = outDir + path.sep + p.pathNoRoot + path.sep;
mkdirp(mkDirPath, function() {
outputPath = mkDirPath + newName;
minify(srcPath, outputPath);
});
} else {
outputPath = outDir + path.sep + newName;
minify(srcPath, outputPath);
}
}
rl.on('line', function(srcPath) {
processPath(srcPath);
});
Additional info
By default the .min.css files are output to the same path as the .css source file.
However, if you need to change the output destination then you can utilize the config object in package.json. The following example will output all .css.min to the ./dist/ path:
"config": {
"css_outdir": "./dist"
},
"scripts": {
"cleancss": "glob \"public/css/**/*.css\" | node .scripts/cleancss.js"
},
cleancss.js utilizes mkdirp to replicate/mirror the .css source paths/subfolders in the destination folder when using config.css_outdir.
I am loading browser-sync proxy and want to load search and replace terms from an external file in order to amend the page as it is loaded into a browser.
The reason I want to load the search and replace terms from a separate file is because I want to make use of gulp-watch and reload browser-sync as the search and replace terms are updated.
My Project folder:
regex/search.txt <- search term is stored in this file
regex/replace.txt <- replace term is stored in this file
gulpfile.js
Contents of gulpfile.js:
var gulp = require('gulp'),
fs = require("fs"),
browserSync = require('browser-sync');
var proj_url = "http://www.example.com";
var search_text = "";
var replace_text = "";
gulp.task('readRegEx', function() {
return gulp.src('regex/*.txt')
.pipe(fs.readFile("regex/search.txt", "utf-8", function(err, data) {
search_text = data;
}))
.pipe(fs.readFile("regex/replace.txt", "utf-8", function(err, data) {
replace_text = data;
}))
});
gulp.task('browser-sync', function() {
browserSync({
proxy: {
target: proj_url
},
rewriteRules: [
{
match: search_text,
fn: function (match) {
return replace_text;
}
}
]
});
});
gulp.task('default', ['readRegEx','browser-sync'], function() {
gulp.watch(['regex/*.txt'], [browserSync.reload]);
});
This doesn't work. I get the following error:
TypeError: Cannot call method 'on' of undefined ...
For that to work you need to make browser-sync dependant in readRegEx
gulp.task('browser-sync', ['readRegEx'], function() {
this guarantees the proper execution order.
then you can make readRegEx sync (and simpler) this way:
gulp.task('readRegEx', function() {
search_text = fs.readFileSync("regex/search.txt", "utf-8").toString();
replace_text = fs.readFileSync("regex/replace.txt", "utf-8").toString();
});
i need to send a PDF file from angularjs client to NodeJS service.
I did the angularjs service, and when i receive the file its a string like this:
%PDF-1.3
3 0 obj
<</Type /Page
/Parent 1 0 R
/Reso
How can i reconvert this string to PDF in NodeJS?
This is the client code:
var sendByEmail = function () {
$scope.generatingPdf = true;
$('#budget').show();
var pdf = new JsPDF('p', 'pt', 'letter');
var source = $('#budget')[0];
pdf.addHTML(source, 0, 0, function () {
var resultPdf = pdf.output();
BillService.sendByEmail("rbrlnx#gmail.com", resultPdf).then(function () {
});
$('#budget').hide();
});
};
var sendByEmail = function (email, file) {
var deferred = $q.defer();
var data = {
email: email,
file: file
};
BillService.sendByEmail(data, function (result) {
deferred.resolve(result);
}, function () {
deferred.reject();
});
return deferred.promise;
};
The server code controller its empty:
var sendByEmail = function (req, res, next) {
var file = req.body.file;
};
I experimented with this a while ago, and I came up with this. It's not production ready by a long shot maybe you find it useful. It's free of front end libraries (except Angular ofcourse), but assumes you're using Express 4x and body-parser.
The result:
In the browser:
On the server:
What you're seeing:
You're seeing a tiny node server, serving static index.html and angular files, and a POST route receiving a PDF in base64 as delivered by the HTML FileReader API, and saves it to disk.
Instead of saving to disk, you can send it as an email attachment. See for instance here or here for some info on that.
The example below assumes uploading a PDF by a user through a file input, but the idea is the same for all other ways of sending a document to your back end system. The most important thing is to send the pdf data as BASE64, because this is the format that most file writers and email packages use (as opposed to straight up binary for instance..). This also goes for images, documents etc.
How did I do that:
In your HTML:
<div pdfs>Your browser doesn't support File API.</div>
A directive called pdfs:
myApp.directive('pdfs', ['upload', function(upload) {
return {
replace: true,
scope: function() {
files = null;
},
template: '<input id="files" type="file">',
link: function(scope,element) {
element.bind('change', function(evt) {
scope.$apply(function() {
scope.files = evt.target.files;
});
});
},
controller: function($scope, $attrs) {
$scope.$watch('files', function(files) {
//upload.put(files)
if(typeof files !== 'undefined' && files.length > 0) {
for(var i = 0; i<files.length;i++) {
readFile(files[i])
}
}
}, true);
function readFile(file) {
var reader = new FileReader();
reader.addEventListener("loadend", function(evt) {
upload.post({name: file.name, data: reader.result})
})
if(reader.type = 'application/pdf') {
reader.readAsDataURL(file);
}
}
}
}
}]);
A tiny service:
myApp.service('upload', function($http) {
this.post = function(file) {
$http.post('/pdf', file);
}
});
And a node server:
var express = require('express');
var bodyParser = require('body-parser')
var fs = require("fs");
var app = express();
app.use(express.static('.'));
app.use( bodyParser.json({limit: '1mb'}) );
app.post('/pdf', function(req, res){
var name = req.body.name;
var pdf = req.body.data;
var pdf = pdf.replace('data:application/pdf;base64,', '');
res.send('received');
fs.writeFile(name, pdf, 'base64', function(err) {
console.log(err);
});
});
var server = app.listen(3000, function() {
console.log('Listening on port %d', server.address().port);
});