Using a gulp task output inside another task - node.js

I have two gulp tasks as following:
gulp.task("merge-json", () => {
return gulp
.src(
[
src_folder + "/modules/**/*.json",
src_folder + "/organisms/**/*.json",
src_folder + "/pages/**/*.json",
],
{
since: gulp.lastRun("merge-json"),
}
)
.pipe(plumber())
.pipe(merge({fileName: "data.json"}))
.pipe(gulp.dest(src_folder + "/datas/dist/"))
.pipe(browserSync.stream());
});
gulp.task("nunjucks", () => {
return gulp
.src([src_folder + "pages/**/*.njk"], {
base: src_folder + "pages",
since: gulp.lastRun("nunjucks"),
})
.pipe(plumber())
.pipe(data(() => JSON.parse(fs.readFileSync(src_folder + "datas/dist/data.json"))))
.pipe(nunjucks({path: src_folder}))
.pipe(gulp.dest(dist_folder))
.pipe(browserSync.stream());
});
The first task uses merge-json plugin to merge all the json files into a single one called data.json, and then this data.json will be used by the gulp-nunjucks-render plugin to generate html pages.
The problem I have here is that a file called data.json is generated in my project src, which will only be used for the nunjucks plugin.
My question is, Isn't there any way to use the output of the merge-json (data.json) plugin directly inside the nunjucks plugin?

This is not necessarily an ideal solution, but I will leave it here nonetheless.
Using map-stream, introduce a new stage in the pipeline that intercepts the contents of the merged JSON file buffer and store it into a shared variable.
const map = require('map-stream');
// a variable to store the merged JSON string
let mergedJsonString;
gulp.task("merge-json", () => {
return gulp
.src(
[
src_folder + "/modules/**/*.json",
src_folder + "/organisms/**/*.json",
src_folder + "/pages/**/*.json",
],
{
since: gulp.lastRun("merge-json"),
}
)
.pipe(plumber())
.pipe(merge({fileName: "data.json"}))
// only one file should ever be passed - data.json
.pipe(map((file, callback) => {
// capture the file buffer as a string
mergedJsonString = file.contents.toString()
// pass on the file to the next pipe, unmodified
callback(null, file)
}))
// can optionally be removed if not necessary to save to a file anymore
.pipe(gulp.dest(src_folder + "/datas/dist/"))
.pipe(browserSync.stream());
});
Now the nunjucks task will need to be modified to use a different data source and specify a gulp task dependency. The data source could instead be conditionally generated to use an old cached version if the merge-json task is not run prior; e.g. JSON.parse(typeof mergedJsonString !== "undefined" ? mergedJsonString : fs.readFileSync(src_folder + "datas/dist/data.json"))
// optionally use gulp.series to create a task dependency on "merge-json", ensuring it runs prior to the "nunjucks" task
gulp.task("nunjucks", gulp.series(["merge-json"], () => {
return gulp
.src([src_folder + "pages/**/*.njk"], {
base: src_folder + "pages",
since: gulp.lastRun("nunjucks"),
})
.pipe(plumber())
// source the JSON from a variable in memory; or if undefined, maybe change the implementation to use an old cached "/data/dists/data.json" file
.pipe(data(() => JSON.parse(mergedJsonString)))
.pipe(nunjucks({path: src_folder}))
.pipe(gulp.dest(dist_folder))
.pipe(browserSync.stream());
});
Might need a few minor tweaks for your specific use case, but this does meet the use case, even if it is hacky.

Related

Skip binary files in a gulp task

I'm trying to use gulp-if and gulp-is-binary in order to skip over binary files in a my HTML task, but I'm having a lot of trouble.
I've got this task:
// html task, converts includes & variables in HTML
gulp.task("html", function () {
"use strict";
// development HTML directory
var htmlDirectory = dev;
// production HTML directory (if --dist is passed)
if (argv.dist) htmlDirectory = dist;
// clean directory if --dist is passed
if (argv.dist) del([htmlDirectory + "/**/*", "!" + htmlDirectory + "{/assets,/assets/**}"]);
// process HTML
return gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "{/partials,/partials/**}"]})))
// check if a file is a binary
.pipe(gulpif(isBinary(), function () { /* somehow skip? */ } ))
// replace variables
.pipe(fileinclude({
prefix: "##",
basepath: "#file",
context: {
name: name,
description: description,
version: version,
repository: repository,
license: license,
}
}))
// replace FontAwesome placeholders
.pipe(replace(/(?:<icon:)([A-Za-z0-9\-\_]+)[^>]*(?:>)/g, "<i class='fa fa-$1' aria-hidden='true'><\/i>"))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory))
// reload the files
.pipe(browserSync.reload({stream: true}))
// notify that the task is complete, if not part of default or watch
.pipe(gulpif(gulp.seq.indexOf("html") > gulp.seq.indexOf("default"), notify({title: "Success!", message: "HTML task complete!", onLast: true})))
// push the task to the ranTasks array
.on("data", function() {
if (ranTasks.indexOf("html") < 0) ranTasks.push("html");
});
});
This is the line I'm having trouble with:
.pipe(gulpif(isBinary(), function () { /* somehow skip? */ } ))
I can't figure out how to tell Gulp to skip that file and continue the rest of the task. I feel like I'm missing something simple.
After a ton of research, experimenting, and a some help from the developer of gulp-is-binary, I figured this out. My task is below:
// html task, copies binaries, converts includes & variables in HTML
gulp.task("html", function () {
"use strict";
// development HTML directory
var htmlDirectory = dev;
// production HTML directory (if --dist is passed)
if (argv.dist) htmlDirectory = dist;
// clean directory if --dist is passed
if (argv.dist) del([htmlDirectory + "/**/*", "!" + htmlDirectory + "{/assets,/assets/**}"]);
// copy binaries
var binaries = gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "/**/*", "!" + src + "{/assets,/assets/**}"]})))
// check if a file is a binary
.pipe(isBinary())
// skip the file if it's not a binary
.pipe(through.obj(function(file, enc, next) {
if (!file.isBinary()) {
next();
return;
}
next(null, file);
}))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory));
// process HTML
var html = gulp.src([src + "/**/*", "!" + src + "{/assets,/assets/**}"])
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// check if source is newer than destination
.pipe(gulpif(!argv.dist, newer({dest: htmlDirectory, extra: [src + "/**/*", "!" + src + "{/assets,/assets/**}"]})))
// check if a file is a binary
.pipe(isBinary())
// skip the file if it's a binary
.pipe(through.obj(function(file, enc, next) {
if (file.isBinary()) {
next();
return;
}
next(null, file);
}))
// replace variables
.pipe(fileinclude({
prefix: "##",
basepath: "#file",
context: {
name: name,
description: description,
version: version,
repository: repository,
license: license,
}
}))
// replace icon placeholders
.pipe(replace(/(?:<icon:)([A-Za-z0-9\-\_][^:>]+)(?:\:([A-Za-z0-9\-\_\ ][^:>]*))?(?:>)/g, "<i class='icon'><svg class='icon_svg $2' aria-hidden='true'><use xlink:href='#$1' \/><\/svg></i>"))
// output to the compiled directory
.pipe(gulp.dest(htmlDirectory));
// merge both steams back in to one
return merge(binaries, html)
// prevent breaking on error
.pipe(plumber({errorHandler: onError}))
// reload the files
.pipe(browserSync.reload({stream: true}))
// notify that the task is complete, if not part of default or watch
.pipe(gulpif(gulp.seq.indexOf("html") > gulp.seq.indexOf("default"), notify({title: "Success!", message: "HTML task complete!", onLast: true})))
// push the task to the ranTasks array
.on("data", function() {
if (ranTasks.indexOf("html") < 0) ranTasks.push("html");
});
});
The full gulpfile can be found here:
https://github.com/JacobDB/new-site/blob/2d510e33863d25a99de4fe350bf9a181aefa3761/gulpfile.js

Copy specified files with condition using gulp

I am trying copy my vendor files to my dev folder using gulp. When I was in development mode, I want copy only the unminified files, if unminified is not present copy minified files. And in production mode I want copy minifed files if files are not present minify the normal files.
my folder structure
js
app.js
jquery
jquery.min.js
jquery.js
fontawesome
fontawesome.min.js
fontawesome.min.css
fonts.ttf...
Here my basic I had written.
var scriptsPath = '../vendor/';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('vendor', function() {
var folders = getFolders(scriptsPath);
var cssFilter = $.filter('**/*.css')
var tasks = folders.map(function(folder) {
var jsFilter;
if (isProduction) {
jsFilter = $.filter('**/*.min.js');
} else {
jsFilter = $.filter(['**/*.js', '!**/*.min.js']);
}
return gulp.src(path.join(scriptsPath, '**/'))
.pipe(jsFilter)
.pipe($.if(useSourceMaps, $.sourcemaps.init()))
.pipe($.if(isProduction, $.uglify({preserveComments: 'some'})))
.on('error', handleError)
.pipe(jsFilter.restore())
.pipe(cssFilter)
.pipe($.if( isProduction, $.minifyCss() ))
.on('error', handleError)
.pipe(cssFilter.restore())
.on('error', handleError)
.pipe(gulp.dest(build.vendor.js));
});
return es.concat.apply(null, tasks);
});
I am trying the last two days using gulp-if& some methods. But not yet get the solution.Thanks in advance.
You are trying to cram way to much into your vendor task. The stuff you do with your JS files is completely unrelated to the stuff you do with your CSS files. That's hard to read.
Instead of using gulp-filter try splitting vendor up into smaller tasks like vendor-js, vendor-css, etc... and then declare them as dependencies for your vendor task:
gulp.task('vendor', ['vendor-js', 'vendor-css' /* etc ... */]);
Your vendor-js task could then look like this:
var glob = require('glob');
gulp.task('vendor-js', function () {
var js = glob.sync('../vendor/**/*.js');
if (isProduction) {
// use <file>.min.js, unless there is only <file>.js
js = js.filter(function(file) {
return file.match(/\.min\.js$/) ||
js.indexOf(file.replace(/\.js$/, '.min.js')) < 0;
});
} else {
// use <file>.js, unless there is only <file>.min.js
js = js.filter(function(file) {
return !file.match(/\.min\.js$/) ||
js.indexOf(file.replace(/\.min\.js$/, '.js')) < 0;
});
}
gulp.src(js, { base: '../vendor' })
.pipe($.if(isProduction, // only minify for prod and when
$.if("!**/*.min.js", uglify()))) // the file isn't minified already
.pipe(gulp.dest('build'));
});
Adapting this to you specific needs should be fairly trivial from here on.

Write to same file from multiple tasks (Gulp, Node, gulp-json)

I've just started using Gulp (and NodeJs)... Obviously I ran into my first wall.
Here it is:
I have a large project that uses themes. Each theme has it's own assets (scss and js files). Here is my gulpfile.js:
// < require block here (not included, to keep this short)
var themes = ["theme1", "theme2", "theme3"];
// Since I can have up to 20 different themes, I use the 'themes' array so I can create tasks dynamically, like this:
themes.forEach(function (theme) {
gulp.task('css:' + theme, function () {
setVersion([theme], 'css'); // write asset version into a json file
gulp.src('../themes/frontend/' + theme + '/assets/css/style.scss')
.pipe(sourcemaps.init())
.pipe(sass({outputStyle: 'compressed'}).on('error', sass.logError))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest('../themes/frontend/' + theme + '/assets/css'))
});
});
// Of course, I need an "all" task to build all CSS in rare ocasions I need to do so:
gulp.task('css:all', ("css:" + themes.join(",css:")).split(","));
// ("css:" + themes.join(",css:")).split(",") => results in the needed ['css:theme1', 'css:theme2'] tasks array
// The same logic as above for JS files
// but added the use of gulp-concat and gulp-uglify
// Having scripts = { "theme1" : ['script1', 'script2'], "theme2": ['script1', 'script2'] }
// ...
// And "per theme" both "css and js"
themes.forEach(function (theme) {
gulp.task('theme:' + theme, ['css:' + theme, 'js:' + theme]);
});
// Next I need to set versions for each asset
// I'm writing all the versions into a json file
assetsVersion = someRandomGeneratedNumber;
function setVersion(themes, assetType) {
/**
* themes: array
* assetType: 'all', 'css' or 'js'
*/
var fs = require('fs'),
path = require("path");
var versionsFilePath = path.normalize(__dirname + '/../protected/config/theme/frontend/');
var versionsFileName = '_assets-version.json';
if (!fs.existsSync(versionsFilePath + versionsFileName)) {
// Create file if it doesn't exist
fs.writeFile(versionsFilePath + versionsFileName, "{}", function (err) {
if (err) {
return console.log(err);
}
});
}
gulp.src(versionsFilePath + versionsFileName)
.pipe(jeditor(function (json) {
themes.forEach(function(theme) {
if ("undefined" == typeof (json[theme])) {
json[theme] = {};
}
if ('css' == assetType) {
json[theme]['css'] = assetsVersion;
} else if ('js' == assetType) {
json[theme]['js'] = assetsVersion;
} else {
json[theme] = {"css": assetsVersion, "js": assetsVersion};
}
if ("undefined" == typeof(json[theme]['css'])) {
// if we're missing the 'css' key (i.e. we've just created the json file), add that too
json[theme]['css'] = assetsVersion;
}
if ("undefined" == typeof(json[theme]['js'])) {
// if we're missing the 'js' key (i.e. we've just created the json file), add that too
json[theme]['js'] = assetsVersion;
}
});
return json;
}))
.pipe(gulp.dest(versionsFilePath));
}
The assets versioning json should look like this:
{
"theme1": {
"css": "20150928163236",
"js": "20150928163236"
},
"theme2": {
"css": "20150928163236",
"js": "20150928163236"
},
"theme3": {
"css": "20150928163236",
"js": "20150928163236"
}
}
running 'gulp css:theme#' - works fine...
BUT running 'gulp css:all' - makes a messy json
Of course, this happens because all css:theme# (or js:theme#) tasks run async, and more often than not there are multiple tasks writing simultaneously to my json file.
I've read about tasks depending on other tasks, but that doesn't really fit into my whole "dynamic tasks" flow (or I don't know how to fit it in).
I mean I don't think that this:
gulp.task('css:theme1', ['versioning'], function() {
//do stuff after 'versioning' task is done.
});
would help me. SO what if it waits for the version to be written? Multiple tasks would still write to the file at the same time. Also, for this to work, I would need to pass parameters that I also don't know how to do... like:
gulp.task('css:'+theme, ['versioning --theme ' + theme], function() {
//do stuff after 'versioning' task is done.
});
Like I could make it work in the console. I know this isn't working, BUT would be really useful in some cases if it would somehow be possible to send parameters to the task in the task name.
Neither runSequence() { ... done(); }, I really don't see how could I make it work within my flow...
Please, anybody... help a newb...
How can I solve this, while:
Having tasks created dynamically;
Having one versioning json file for all themes.

jasmine with gulp, run tests on test file changed

Hi what I trying to do is to make watcher task with gulp which will run my jasmine tests. What I have done so far:
var watch = require("gulp-watch");
var jasmine = require("gulp-jasmine");
gulp.task('tests.run.change-watcher', function (cb) {
gulp.src(testsFiles)
.pipe(watch(testsFiles))
.pipe(jasmine({ verbose: true }));
});
But when I run that task and try to change any file which meets the testsFiles rules it doesn't show anything in console.
However when I run the next task:
gulp.task('tests.run', function (cb) {
gulp.src(testsFiles)
.pipe(jasmine({verbose:true}));
});
It works and shows next:
8 specs, 0 failures Finished in 0 seconds
Maybe I miss something?
Do it in two steps
1) Declare the test-unit task (like you did)
gulp.task('tests.run', function () {
return gulp.src(testsFiles)
.pipe(jasmine({verbose:true}));
});
2) Declare the watch task that will run this test-unit task when those testsFiles change
gulp.task('tests.watch', function () {
gulp.watch(testsFiles, ['tests.run']);
});
Then, you run gulp tests.watch
To run only needed specs, try something like this:
/** Watches file changes in source or spec files and executes specs automatically */
gulp.task("specs-watcher", function() {
return watch(["src/**/*.ts", "spec/**/*.ts"], { events: ["add", "change"] }, function(vinyl, event) {
if (!vinyl.isDirectory()) {
if (vinyl.basename.endsWith(".spec.ts")) {
// We are dealing with a spec file here, so call jasmine!
runJasmine(vinyl.path);
} else {
// Try to find out specs file
const specFilePath = findSpecsFile(vinyl);
if (typeof specFilePath === "string") {
runJasmine(specFilePath);
}
}
}
});
});
This watcher uses two functions, one is for deriving the spec name based on the file name. In my case, it's:
/**
* For your specs-watcher: This function is called every time a file changed which doesn't end with '.spec.ts'.
* The function's task is to return the fitting specs path of this file. For example by looking for a corresponding file in the "/spec/" folder.
* #param {vinyl} changedFile Vinyl object of changed file (see https://github.com/gulpjs/vinyl)
* #return {string|undefined} Path to the specs file to execute or undefined if your watcher shouldn't do anything.
*/
function findSpecsFile(changedFile) {
return changedFile.path.replace(__dirname, `${__dirname}/spec`).replace(".ts", ".spec.ts");
}
The other function is runJasmine, which runs jasmine with a given test file.
Just make everything fit to your setup and it should work. :-)
You can listen to file changes for both tests and source code folders with this:
"use strict";
var gulp = require('gulp');
var mocha = require('gulp-mocha');
var batch = require('gulp-batch');
gulp.watch(['tests/**', 'src/**'], batch(function (events, cb) {
return gulp.src(['tests/*.js'])
.pipe(jasmine({ verbose: true }))
.on('error', function (err) {
console.log(err.stack);
});
}));
gulp.task('default', () => {
console.log('Gulp is watching file changes...');
});

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Resources