I'm trying to use gulp-if and gulp-is-binary in order to skip over binary files in a my HTML task, but I'm having a lot of trouble.
I've got this task:
// html task, converts includes & variables in HTML
gulp.task("html", function () {
"use strict";
// development HTML directory
var htmlDirectory = dev;
// production HTML directory (if --dist is passed)
if (argv.dist) htmlDirectory = dist;
// clean directory if --dist is passed
if (argv.dist) del([htmlDirectory + "/**/*", "!" + htmlDirectory + "{/assets,/assets/**}"]);
// process HTML
return gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "{/partials,/partials/**}"]})))
// check if a file is a binary
.pipe(gulpif(isBinary(), function () { /* somehow skip? */ } ))
// replace variables
.pipe(fileinclude({
prefix: "##",
basepath: "#file",
context: {
name: name,
description: description,
version: version,
repository: repository,
license: license,
}
}))
// replace FontAwesome placeholders
.pipe(replace(/(?:<icon:)([A-Za-z0-9\-\_]+)[^>]*(?:>)/g, "<i class='fa fa-$1' aria-hidden='true'><\/i>"))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory))
// reload the files
.pipe(browserSync.reload({stream: true}))
// notify that the task is complete, if not part of default or watch
.pipe(gulpif(gulp.seq.indexOf("html") > gulp.seq.indexOf("default"), notify({title: "Success!", message: "HTML task complete!", onLast: true})))
// push the task to the ranTasks array
.on("data", function() {
if (ranTasks.indexOf("html") < 0) ranTasks.push("html");
});
});
This is the line I'm having trouble with:
.pipe(gulpif(isBinary(), function () { /* somehow skip? */ } ))
I can't figure out how to tell Gulp to skip that file and continue the rest of the task. I feel like I'm missing something simple.
After a ton of research, experimenting, and a some help from the developer of gulp-is-binary, I figured this out. My task is below:
// html task, copies binaries, converts includes & variables in HTML
gulp.task("html", function () {
"use strict";
// development HTML directory
var htmlDirectory = dev;
// production HTML directory (if --dist is passed)
if (argv.dist) htmlDirectory = dist;
// clean directory if --dist is passed
if (argv.dist) del([htmlDirectory + "/**/*", "!" + htmlDirectory + "{/assets,/assets/**}"]);
// copy binaries
var binaries = gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "/**/*", "!" + src + "{/assets,/assets/**}"]})))
// check if a file is a binary
.pipe(isBinary())
// skip the file if it's not a binary
.pipe(through.obj(function(file, enc, next) {
if (!file.isBinary()) {
next();
return;
}
next(null, file);
}))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory));
// process HTML
var html = gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "/**/*", "!" + src + "{/assets,/assets/**}"]})))
// check if a file is a binary
.pipe(isBinary())
// skip the file if it's a binary
.pipe(through.obj(function(file, enc, next) {
if (file.isBinary()) {
next();
return;
}
next(null, file);
}))
// replace variables
.pipe(fileinclude({
prefix: "##",
basepath: "#file",
context: {
name: name,
description: description,
version: version,
repository: repository,
license: license,
}
}))
// replace icon placeholders
.pipe(replace(/(?:<icon:)([A-Za-z0-9\-\_][^:>]+)(?:\:([A-Za-z0-9\-\_\ ][^:>]*))?(?:>)/g, "<i class='icon'><svg class='icon_svg $2' aria-hidden='true'><use xlink:href='#$1' \/><\/svg></i>"))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory));
// merge both steams back in to one
return merge(binaries, html)
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// reload the files
.pipe(browserSync.reload({stream: true}))
// notify that the task is complete, if not part of default or watch
.pipe(gulpif(gulp.seq.indexOf("html") > gulp.seq.indexOf("default"), notify({title: "Success!", message: "HTML task complete!", onLast: true})))
// push the task to the ranTasks array
.on("data", function() {
if (ranTasks.indexOf("html") < 0) ranTasks.push("html");
});
});
The full gulpfile can be found here:
https://github.com/JacobDB/new-site/blob/2d510e33863d25a99de4fe350bf9a181aefa3761/gulpfile.js
Related
I have two gulp tasks as following:
gulp.task("merge-json", () => {
return gulp
.src(
[
src_folder + "/modules/**/*.json",
src_folder + "/organisms/**/*.json",
src_folder + "/pages/**/*.json",
],
{
since: gulp.lastRun("merge-json"),
}
)
.pipe(plumber())
.pipe(merge({fileName: "data.json"}))
.pipe(gulp.dest(src_folder + "/datas/dist/"))
.pipe(browserSync.stream());
});
gulp.task("nunjucks", () => {
return gulp
.src([src_folder + "pages/**/*.njk"], {
base: src_folder + "pages",
since: gulp.lastRun("nunjucks"),
})
.pipe(plumber())
.pipe(data(() => JSON.parse(fs.readFileSync(src_folder + "datas/dist/data.json"))))
.pipe(nunjucks({path: src_folder}))
.pipe(gulp.dest(dist_folder))
.pipe(browserSync.stream());
});
The first task uses merge-json plugin to merge all the json files into a single one called data.json, and then this data.json will be used by the gulp-nunjucks-render plugin to generate html pages.
The problem I have here is that a file called data.json is generated in my project src, which will only be used for the nunjucks plugin.
My question is, Isn't there any way to use the output of the merge-json (data.json) plugin directly inside the nunjucks plugin?
This is not necessarily an ideal solution, but I will leave it here nonetheless.
Using map-stream, introduce a new stage in the pipeline that intercepts the contents of the merged JSON file buffer and store it into a shared variable.
const map = require('map-stream');
// a variable to store the merged JSON string
let mergedJsonString;
gulp.task("merge-json", () => {
return gulp
.src(
[
src_folder + "/modules/**/*.json",
src_folder + "/organisms/**/*.json",
src_folder + "/pages/**/*.json",
],
{
since: gulp.lastRun("merge-json"),
}
)
.pipe(plumber())
.pipe(merge({fileName: "data.json"}))
// only one file should ever be passed - data.json
.pipe(map((file, callback) => {
// capture the file buffer as a string
mergedJsonString = file.contents.toString()
// pass on the file to the next pipe, unmodified
callback(null, file)
}))
// can optionally be removed if not necessary to save to a file anymore
.pipe(gulp.dest(src_folder + "/datas/dist/"))
.pipe(browserSync.stream());
});
Now the nunjucks task will need to be modified to use a different data source and specify a gulp task dependency. The data source could instead be conditionally generated to use an old cached version if the merge-json task is not run prior; e.g. JSON.parse(typeof mergedJsonString !== "undefined" ? mergedJsonString : fs.readFileSync(src_folder + "datas/dist/data.json"))
// optionally use gulp.series to create a task dependency on "merge-json", ensuring it runs prior to the "nunjucks" task
gulp.task("nunjucks", gulp.series(["merge-json"], () => {
return gulp
.src([src_folder + "pages/**/*.njk"], {
base: src_folder + "pages",
since: gulp.lastRun("nunjucks"),
})
.pipe(plumber())
// source the JSON from a variable in memory; or if undefined, maybe change the implementation to use an old cached "/data/dists/data.json" file
.pipe(data(() => JSON.parse(mergedJsonString)))
.pipe(nunjucks({path: src_folder}))
.pipe(gulp.dest(dist_folder))
.pipe(browserSync.stream());
});
Might need a few minor tweaks for your specific use case, but this does meet the use case, even if it is hacky.
I am trying copy my vendor files to my dev folder using gulp. When I was in development mode, I want copy only the unminified files, if unminified is not present copy minified files. And in production mode I want copy minifed files if files are not present minify the normal files.
my folder structure
js
app.js
jquery
jquery.min.js
jquery.js
fontawesome
fontawesome.min.js
fontawesome.min.css
fonts.ttf...
Here my basic I had written.
var scriptsPath = '../vendor/';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('vendor', function() {
var folders = getFolders(scriptsPath);
var cssFilter = $.filter('**/*.css')
var tasks = folders.map(function(folder) {
var jsFilter;
if (isProduction) {
jsFilter = $.filter('**/*.min.js');
} else {
jsFilter = $.filter(['**/*.js', '!**/*.min.js']);
}
return gulp.src(path.join(scriptsPath, '**/'))
.pipe(jsFilter)
.pipe($.if(useSourceMaps, $.sourcemaps.init()))
.pipe($.if(isProduction, $.uglify({preserveComments: 'some'})))
.on('error', handleError)
.pipe(jsFilter.restore())
.pipe(cssFilter)
.pipe($.if( isProduction, $.minifyCss() ))
.on('error', handleError)
.pipe(cssFilter.restore())
.on('error', handleError)
.pipe(gulp.dest(build.vendor.js));
});
return es.concat.apply(null, tasks);
});
I am trying the last two days using gulp-if& some methods. But not yet get the solution.Thanks in advance.
You are trying to cram way to much into your vendor task. The stuff you do with your JS files is completely unrelated to the stuff you do with your CSS files. That's hard to read.
Instead of using gulp-filter try splitting vendor up into smaller tasks like vendor-js, vendor-css, etc... and then declare them as dependencies for your vendor task:
gulp.task('vendor', ['vendor-js', 'vendor-css' /* etc ... */]);
Your vendor-js task could then look like this:
var glob = require('glob');
gulp.task('vendor-js', function () {
var js = glob.sync('../vendor/**/*.js');
if (isProduction) {
// use <file>.min.js, unless there is only <file>.js
js = js.filter(function(file) {
return file.match(/\.min\.js$/) ||
js.indexOf(file.replace(/\.js$/, '.min.js')) < 0;
});
} else {
// use <file>.js, unless there is only <file>.min.js
js = js.filter(function(file) {
return !file.match(/\.min\.js$/) ||
js.indexOf(file.replace(/\.min\.js$/, '.js')) < 0;
});
}
gulp.src(js, { base: '../vendor' })
.pipe($.if(isProduction, // only minify for prod and when
$.if("!**/*.min.js", uglify()))) // the file isn't minified already
.pipe(gulp.dest('build'));
});
Adapting this to you specific needs should be fairly trivial from here on.
I've just started using Gulp (and NodeJs)... Obviously I ran into my first wall.
Here it is:
I have a large project that uses themes. Each theme has it's own assets (scss and js files). Here is my gulpfile.js:
// < require block here (not included, to keep this short)
var themes = ["theme1", "theme2", "theme3"];
// Since I can have up to 20 different themes, I use the 'themes' array so I can create tasks dynamically, like this:
themes.forEach(function (theme) {
gulp.task('css:' + theme, function () {
setVersion([theme], 'css'); // write asset version into a json file
gulp.src('../themes/frontend/' + theme + '/assets/css/style.scss')
.pipe(sourcemaps.init())
.pipe(sass({outputStyle: 'compressed'}).on('error', sass.logError))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest('../themes/frontend/' + theme + '/assets/css'))
});
});
// Of course, I need an "all" task to build all CSS in rare ocasions I need to do so:
gulp.task('css:all', ("css:" + themes.join(",css:")).split(","));
// ("css:" + themes.join(",css:")).split(",") => results in the needed ['css:theme1', 'css:theme2'] tasks array
// The same logic as above for JS files
// but added the use of gulp-concat and gulp-uglify
// Having scripts = { "theme1" : ['script1', 'script2'], "theme2": ['script1', 'script2'] }
// ...
// And "per theme" both "css and js"
themes.forEach(function (theme) {
gulp.task('theme:' + theme, ['css:' + theme, 'js:' + theme]);
});
// Next I need to set versions for each asset
// I'm writing all the versions into a json file
assetsVersion = someRandomGeneratedNumber;
function setVersion(themes, assetType) {
/**
* themes: array
* assetType: 'all', 'css' or 'js'
*/
var fs = require('fs'),
path = require("path");
var versionsFilePath = path.normalize(__dirname + '/../protected/config/theme/frontend/');
var versionsFileName = '_assets-version.json';
if (!fs.existsSync(versionsFilePath + versionsFileName)) {
// Create file if it doesn't exist
fs.writeFile(versionsFilePath + versionsFileName, "{}", function (err) {
if (err) {
return console.log(err);
}
});
}
gulp.src(versionsFilePath + versionsFileName)
.pipe(jeditor(function (json) {
themes.forEach(function(theme) {
if ("undefined" == typeof (json[theme])) {
json[theme] = {};
}
if ('css' == assetType) {
json[theme]['css'] = assetsVersion;
} else if ('js' == assetType) {
json[theme]['js'] = assetsVersion;
} else {
json[theme] = {"css": assetsVersion, "js": assetsVersion};
}
if ("undefined" == typeof(json[theme]['css'])) {
// if we're missing the 'css' key (i.e. we've just created the json file), add that too
json[theme]['css'] = assetsVersion;
}
if ("undefined" == typeof(json[theme]['js'])) {
// if we're missing the 'js' key (i.e. we've just created the json file), add that too
json[theme]['js'] = assetsVersion;
}
});
return json;
}))
.pipe(gulp.dest(versionsFilePath));
}
The assets versioning json should look like this:
{
"theme1": {
"css": "20150928163236",
"js": "20150928163236"
},
"theme2": {
"css": "20150928163236",
"js": "20150928163236"
},
"theme3": {
"css": "20150928163236",
"js": "20150928163236"
}
}
running 'gulp css:theme#' - works fine...
BUT running 'gulp css:all' - makes a messy json
Of course, this happens because all css:theme# (or js:theme#) tasks run async, and more often than not there are multiple tasks writing simultaneously to my json file.
I've read about tasks depending on other tasks, but that doesn't really fit into my whole "dynamic tasks" flow (or I don't know how to fit it in).
I mean I don't think that this:
gulp.task('css:theme1', ['versioning'], function() {
//do stuff after 'versioning' task is done.
});
would help me. SO what if it waits for the version to be written? Multiple tasks would still write to the file at the same time. Also, for this to work, I would need to pass parameters that I also don't know how to do... like:
gulp.task('css:'+theme, ['versioning --theme ' + theme], function() {
//do stuff after 'versioning' task is done.
});
Like I could make it work in the console. I know this isn't working, BUT would be really useful in some cases if it would somehow be possible to send parameters to the task in the task name.
Neither runSequence() { ... done(); }, I really don't see how could I make it work within my flow...
Please, anybody... help a newb...
How can I solve this, while:
Having tasks created dynamically;
Having one versioning json file for all themes.
Basically I'm looking for a Gulp plugin to turn a directory like this:
/app
- htmlfile1.html
- htmlfile2.html
- htmlfile3.html
- /css
-cssmain.css
-/js
-global.js
And turn that into this:
/dist
-/htmlfile1
- htmlfile1.html
- /css
-cssmain.css
-/js
-global.js
- /htmlfile2
- htmlfile2.html
- /css
-cssmain.css
-/js
-global.js
- /htmlfile3
- htmlfile3.html
- /css
-cssmain.css
-/js
-global.js
Any thoughts on how to do accomplish a build system like this?
The code allows common files to be added to every page distribution as well as unique dependencies defined as an array in the pages object.
The following Gulp file relies on gulp-foreach, parse-filepath, and event-stream: npm install gulp gulp-foreach parse-filepath event-stream --save-dev
gulpfile.js:
// Command:
// npm install gulp gulp-foreach parse-filepath event-stream --save-dev
// Include gulp
var gulp = require('gulp');
var foreach = require('gulp-foreach'); // https://www.npmjs.org/package/gulp-foreach
var parsePath = require('parse-filepath'); // https://www.npmjs.org/package/parse-filepath
var es = require('event-stream'); // https://www.npmjs.org/package/event-stream
// The pages that each make a distribution
// Unique dependencies are defined as an array value for each page.
var pages = {
'./app/htmlfile1.html': [
'./app/images/1.png',
'./app/images/1-another.png',
],
'./app/htmlfile2.html': [],
'./app/htmlfile3.html': []
};
// Files added to each page distribution
var common = [
'./app/css/cssmain.css',
'./app/js/global.js',
];
function makeDistributionStream(page)
{
var gulpStream = gulp.src(page)
.pipe(foreach(function(stream, file) {
var pathParts = parsePath(file.path);
// Assemble the distribution path
var destinationPath = './dist/' + pathParts.name + '/';
// Pipe the html into the distribution folder
stream.pipe(gulp.dest(destinationPath));
// Move all of the unique and common files into the distibution
var uniqueDependencies = pages[page];
// Merge the common files to the unique ones
var distFiles = uniqueDependencies.concat(common);
gulp.src(distFiles, {base: './app/'})
.pipe(gulp.dest(destinationPath));
}));
return gulpStream;
}
// Assemble the distribution directories for each page
gulp.task('make-distributions', function() {
var mergedStream = null;
for(var page in pages)
{
var stream = makeDistributionStream(page);
// Merge the streams, if there is already one
if(mergedStream)
{
mergedStream = es.merge(mergedStream, stream);
}
// Otherwise, just make it this one
else
{
mergedStream = stream;
}
}
return mergedStream;
});
// Rerun the task when a file changes
gulp.task('watch', function() {
// If the html pages change, re-make the distributions
gulp.watch(Object.keys(pages), ['make-distributions']);
});
// Default Task
gulp.task('default', ['make-distributions', 'watch']);
Background
I've been using grunt.js with a hogan.js task to build the static HTML for our internal docs. I'm learning JavaScript as I go, but I've gotten the task to work well enough for layouts and pages, but it would really help our workflow to have the hogan task render mustache partials to HTML, as in the example in this gist: https://gist.github.com/4132781
Current Setup and what I want to accomplish
All of our mustache partials are in a folder called "partials". Ideally when the grunt build is run, the hogan task will grab any partials from the partials folder and insert them into the HTML wherever they are referenced (also, shown in gist).
What I DON'T want
I don't want to have to define each partial in the task or task configuration. This won't work, we have ~200 partials and growing, so we need to have the task scan a folder and grab partials based on either file name or something. I also don't want to use a different language or build tool. We've used Jade, some markdown-based docs builders, a number of others. If we can just get partials to render as described we'll be in great shape!
Is it possible to accomplish this? Thanks in advance for any feedback
I was looking at your code in the gist and some of the options don't match with the filenames you're referencing.
Here is my stab at updating the code you provided to allow rendering partials:
grunt.js
The src is the list of pages you're building that might contain partials
In this case, components.mustache would be located at 'docs/components/templates/pages/components.mustache'
Updating the layout option to layout.mustache which is used for all the pages (including components.mustache)
Adding a paths object to options which has a path to the partials folder. All these partials will be read and compiled and stored in options.partials for later use in the grunt task.
module.exports = function(grunt) {
'use strict';
// Project configuration
grunt.initConfig({
pkg: '<json:package.json>',
meta: {
banner:
'/**\n' +
'* <%= pkg.name %>.js v<%= pkg.version %> by #fat & #mdo\n' +
'* Copyright <%= grunt.template.today("yyyy") %> <%= pkg.author %>\n' +
'* http://www.apache.org/licenses/LICENSE-2.0.txt\n' +
'*/'
},
// Build HTML docs from .mustache files
hogan: {
production: {
src: 'docs/components/templates/pages/*.mustache',
dest: 'docs/components/FILE.html',
options: {
title: 'Sellside',
url: 'docs',
setAccount: 'NA',
setSiteId: 'NA',
layout: 'docs/components/templates/layout.mustache',
dev: true,
docs: true,
app: false,
website: false,
paths: {
partials: 'docs/components/templates/partials/*.mustache'
}
}
}
}
});
// Load npm tasks.
grunt.loadNpmTasks('grunt-contrib');
// Load local tasks.
grunt.loadTasks('tasks');
grunt.registerTask('default', 'hogan');
};
hogan.js
Updating this task to read in all the partials and compile them.
The helper is being updated to add the 'body' partial (which is the compiled page) to the options.partials list.
The options.partials is then passed into the hogan.render method so all the partials are available to all the pages.
/*
* Build HTML from mustache files
* https://github.com/sellside/ui/grunt.js
*
* Copyright (c) 2012 Sellside
* Authored by Jon Schlinkert
*/
module.exports = function(grunt) {
// Grunt utilities.
var task = grunt.task,
file = grunt.file,
utils = grunt.util,
log = grunt.log,
verbose = grunt.verbose,
fail = grunt.fail,
option = grunt.option,
config = grunt.config,
template = grunt.template,
_ = utils._
// external dependencies
var fs = require('fs'),
hogan = require('hogan');
// ==========================================================================
// TASKS
// ==========================================================================
grunt.registerMultiTask('hogan', 'Compile mustache files to HTML with hogan.js', function() {
var data = this.data,
src = grunt.file.expandFiles(this.file.src),
dest = grunt.template.process(data.dest),
// Options are set in gruntfile
defaults = {
production: false,
docs: false,
title: 'Sellside',
setAccount: 'NA',
setSiteId: 'NA',
layout: 'docs/templates/layout.mustache',
paths: {},
partials: {}
},
options = _.extend(defaults, this.data.options || {})
!src && grunt.warn('Missing src property.')
if(!src) return false
!dest && grunt.warn('Missing dest property')
if(!dest) return false
var done = this.async()
var srcFiles = file.expandFiles(src)
if(options.paths.partials) {
var partials = grunt.file.expandFiles(options.paths.partials);
log.writeln('Compiling Partials...');
partials.forEach(function(filepath) {
var filename = _.first(filepath.match(/[^\\\/:*?"<>|\r\n]+$/i)).replace(/\.mustache$/, '');
log.writeln(filename.magenta);
var partial = fs.readFileSync(filepath, 'utf8');
options.partials[filename] = hogan.compile(partial);
});
log.writeln();
}
try {
options.layout = fs.readFileSync(options.layout, 'utf8')
options.layout = hogan.compile(options.layout, {
sectionTags: [{
o: '_i',
c: 'i'
}]
})
} catch(err) {
grunt.warn(err) && done(false)
return
}
srcFiles.forEach(function(filepath) {
var filename = _.first(filepath.match(/[^\\\/:*?"<>|\r\n]+$/i)).replace(/\.mustache$/, '')
grunt.helper('hogan', filepath, filename, options, function(err, result) {
err && grunt.warn(err) && done(false)
if(err) return
file.write(dest.replace('FILE', filename), result)
})
})
done()
})
// ==========================================================================
// HELPERS
// ==========================================================================
grunt.registerHelper('hogan', function(src, filename, options, callback) {
log.writeln('Compiling ' + filename.magenta);
var page = fs.readFileSync(src, 'utf8'),
html = null,
layout = options.layout,
context = {};
context[filename] = 'active';
context._i = true;
context.production = options.production;
context.docs = options.docs;
context.setAccount = options.setAccount;
context.setSiteId = options.setSiteId;
var title = _.template("<%= page == 'Index' ? site : page + ' ยท ' + site %>")
context.title = title({
page: _(filename).humanize().replace('css', 'CSS'),
site: options.title
})
try {
page = hogan.compile(page, {
sectionTags: [{
o: '_i',
c: 'i'
}]
})
options.partials.body = page;
page = layout.render(context, options.partials)
callback(null, page)
} catch(err) {
callback(err)
return
}
})
};
One thing to note, if you're going to pass data into the partials, you'll need to add that to the context object in the file layout.render call.
Hope this all make sense and helps you out.