Gulp clean gives inconsistent results - node.js

I have the following basic Gulp tasks:
gulp.task('clean', function(){
return del('./build/public/js');
});
gulp.task('doSomething', ['clean'], function(){
console.log(fs.existsSync('./build/public/js'));
return gulp.src(..... // Do some stuff here
});
According to this, the directory ./build/public/js is first deleted by the clean task. This runs synchronously since I am using return in that task.
Next the first thing my doSomething task does is prints out whether or not the ./build/public/js directory exists or not. This should be false EVERY TIME.
But sometimes I get true. I have no idea why. Here is my output when it is true:
[13:57:13] Using gulpfile /vhosts/website/gulpfile.js
[13:57:13] Starting 'clean'...
[13:57:13] Finished 'clean' after 30 ms
[13:57:13] Starting 'doSomething'...
true
[13:57:13] 'doSomething' errored after 196 ms
[13:57:13] Error: EEXIST, mkdir '/vhosts/website/build/public/js'
What is going on here? Why are my results inconsistent? clean finishes before doSomething starts, so the directory should definitely be completely deleted at that point. Why would it ever return true, saying the directory still exists?

You need to help gulp understand when the task is finished:
gulp.task('clean', function(cb) {
del('./build/public/js', cb);
});
https://github.com/gulpjs/gulp/blob/master/docs/API.md#deps
Note: Are your tasks running before the dependencies are complete? Make sure your dependency tasks are correctly using the async run hints: take in a callback or return a promise or event stream.

Related

How to execute the same task with gulp, synchronously, once per each folder (in a monorepo with subpackages)?

I have the following project structure (monorepo with many packages)
/pkgA
gulpfile.js
/pkgB
gulpfile.js
/pkgC
gulpfile.js
Each package has a gulpfile.js which just loads /shared/shared-gulp-tasks.js
Important: we want to keep this independence (so that I can run gulp whatever only for a given package, if wanted) - i.e. I don't want to remove the existing tasks from the shared gulpfile, I want to reuse them.
If we want to build everything at once, we run a task synchronously like this:
bash for-each-package.sh "gulp package"
Which does something like
cd pkgA
gulp package
cd pkgB
gulp package
cd pkgC
gulp package
However this is slow, because I start gulp executable from scratch for each package, and it takes ~3 seconds every time to load gulp and all the needed deps. (we have 20+ subpackages).
What I want is to have a task defined in gulpfile.js in the root which would let me do
gulp package-all
The code would look like this:
gulp.task('package-all', function(done) {
['pkgA', 'pkgB', 'pkgC'].forEach(function(pkgName) {
process.chdir(path.join(__dirname, pkgName));
// need to run 'package' task here, synchronously
// gulp.start('package'); is async
});
done();
}
Note that folder-specific package task is already declared in shared gulpfile and I don't want to rewrite it.
The problem is that I want to do this synchronously, and when all folders are finished processing, call done().
Options explored so far:
gulp.run is deprecated, gulp.start is undocumented, generally not advised
and they don't work in this case (they are async)
runSequence looks promising, but how would I run same task many times, per-folder, with cd to that folder before
I am aware that what I ask is kind-of orthogonal to "the gulp way" but I don't want to rewrite all my tasks.
What can be a good way to achieve my goals?
Finally I solved the issue by using run-sequence and creating fake tasks (not sure if there's an easier way - BTW it seems that gulp tasks can not be anonymous, you can't just pass functions to run-sequence, you need to pass string names of registered gulp tasks) and then a sequence out of those tasks (and passing done at the end of the sequence).
gulpfile.js
var runSequence = require('run-sequence');
var gulp = require('gulp');
require('./shared-gulp-tasks')(gulp);
var folders = ['pkgA', 'pkgB', 'pkgC']; // this array comes from external helper method which reads it from disk
function registerTaskForAllFolders(wrappedTaskName) {
var tasksToExecute = [];
folders.forEach(function(folderName) {
var taskName = wrappedTaskName + '_' + folderName;
gulp.task(taskName, function(done) {
console.log(folderName);
process.chdir(path.join(__dirname, folderName));
runSequence(wrappedTaskName, done);
});
tasksToExecute.push(taskName);
});
gulp.task(wrappedTaskName + '-all', function(done) {
tasksToExecute.push(done);
runSequence.apply(null, tasksToExecute);
});
}
// this registers a task called 'nothing-all'
registerTaskForAllFolders('nothing');
// this registers a task called 'clean-all'
registerTaskForAllFolders('clean');
// this registers a task called 'package-all'
registerTaskForAllFolders('package');
shared-gulp-tasks.js
module.exports = function(gulp) {
gulp.task('nothing', function(done) {
console.log('doing nothing in ' + process.cwd());
done();
});
}
terminal
gulp nothing-all
output
[17:08:51] Starting 'nothing-all'...
[17:08:52] Starting 'nothing_pkgA'...
[17:08:52] Starting 'nothing'...
doing nothing in d:\git\myproject\pkgA
[17:08:52] Finished 'nothing' after 171 μs
[17:08:52] Finished 'nothing_pkgA' after 2.23 ms
[17:08:52] Starting 'nothing_pkgB'...
[17:08:52] Starting 'nothing'...
doing nothing in d:\git\myproject\pkgB
[17:08:52] Finished 'nothing' after 2.03 ms
[17:08:52] Finished 'nothing_pkgB' after 11 ms
[17:08:52] Starting 'nothing_pkgC'...
[17:08:52] Starting 'nothing'...
doing nothing in d:\git\myproject\pkgC
[17:08:52] Finished 'nothing' after 1.93 ms
[17:08:52] Finished 'nothing_pkgC' after 11 ms
[17:08:52] Finished 'nothing-all' after 345 ms

gulp-less task exits with zero on invalid styles

Firstly, here is a repo with very simple example of my problem: https://github.com/nkoder/example-of-gulp-less-zero-exit-on-error
I want to define gulp build pipeline which fails on any error. This pipeline would be used in automated process (eg. Continuous Integration) so I need the build command to return non-zero exit code on failure. It works for me with invalid Jade templates or non-passing tests run with Karma.
The problem is when I use gulp-less to prepare CSS from LESS. Whenever I add any pipeline step after gulp-less execution, all errors are "consumed" and the whole task exits with 0. But it shouldn't.
The first question is: How to force wrong task to fail with non-zero exit code? (solved, see answer below)
The second question is: Why error event emitted in less() call followed by piped stream is not making a whole task to return non-zero exit code? Non-zero is returned in some other similar cases, eg. in Jade or ESLint failure. (not solved yet)
Versions:
node 5.11.0
npm 3.8.6
gulp 3.9.1
gulp-less 3.1.0
Listen for the error event and then exit the process explicitly:
gulp.task('wrong', function () {
return gulp
.src('invalid-styles.less')
.pipe(less().on('error', function(error) {
console.log(error.message);
process.exit(1);
}))
.pipe(gulp.dest('generated-styles.css'));
});

pages:reset error gulp.run() has been deprecated. Use task dependencies or gulp.watch task triggering instead

As delivered, Foundation for sites 6 CLI generates an error when the gulp task pages:reset is called. This happens anytime you edit files down in src/layouts or src/partials.
Apparently, gulp.run() has been deprecated.
Using run-sequence repairs the problem.
Around line 80 of gulpfile.js:
gulp.task('pages:reset', function(cb) {
panini.refresh();
//gulp.run('pages');
sequence('pages',cb);
browser.reload();
});
Thanks for this. I had to do it a little differently in order to ensure the change and copy happens before the browser reload.
gulp.task('pages:reset', function(cb) {
panini.refresh();
// gulp.run('pages', cb);
sequence('pages', function(){
cb();
browser.reload();
});
});

How to abort/kill grunt-contrib-watch on purpose?

Sorry for the weird naming, but I wanted to clearly distinguish my question from all the questions, which are about grunt aborting without the user wanting it to.
I'm on the other hand trying to kill the current instance of grunt via a task to prevent grunt from messing up, if there are accidently more than one instance running at the same time (e.g. build and watch). I tried to use grunt.fail.fatal but the watch task stillcontinues running.
grunt.task.registerTask('KILL', 'KILL KILL KILL.', function() {
grunt.fail.fatal('Aborted grunt to prevent conflicts in _config.js.');
});
...
watch:{
preventConfigConflicts:{
files: [appConfig.configFilePath],
tasks:[
'KILL'
]
}
}
Any ideas how to achieve that?
I also tried nodes process.exit without any effect:
grunt.task.registerTask('KILL', 'KILL KILL KILL.', function() {
process.exit(code=1);
});

Gulp.js: "gulp-chug" only runs one file even when set to watching many

I've started working with Gulp and the problem I'm having is getting gulp-chug to work properly.
I've followed everything in the documentation, telling my gulpfile to watch all gulpfiles within certain directories, whereas it only watches one file.
This is the code I have used following the documentation...
var gulp = require('gulp');
var chug = require('gulp-chug');
gulp.task('default', function () {
gulp.src('**/task_runner/gulpfile.js')
.pipe(chug());
});
I even tried to see if it makes a difference if I put the filepath in an array...
...
gulp.src(
[ '**/task_runner/gulpfile.js' ]
)
...
I also tried this (and a version without the array in gulp.src())...
...
gulp.src(
[ 'Project_01/task_runner/gulpfile.js', 'Project_02/task_runner/gulpfile.js' ]
)
...
...and it still does the same thing.
My file structure looks like this,
*root*
node_modules
gulpfile.js
package.json
Project_01
css
scss
task_runner
Project_02
css
scss
task_runner
All the gulpfiles work when running them individually, but I want them all to run at the same time within one cmd window with gulp-chug.
This is what my cmd looks like, which is showing that it's only watching Project_02,
C:\Users\WaheedJ\Desktop\UniServer\www\Practice\gulp>gulp
[14:19:40] Using gulpfile ~\Desktop\UniServer\www\Practice\gulp\gulpfile.js
[14:19:40] Starting 'default'...
[14:19:40] Finished 'default' after 6.37 ms
[gulp-chug] File is a buffer. Need to write buffer to temp file...
[gulp-chug] Writing buffer to Project_02\task_runner\gulpfile.tmp.1411996780120.
js...
[gulp-chug] Spawning process C:\Users\WaheedJ\Desktop\UniServer\www\Practice\gul
p\Project_02\task_runner\node_modules\gulp\bin\gulp.js with args C:\Users\Waheed
J\Desktop\UniServer\www\Practice\gulp\Project_02\task_runner\node_modules\gulp\b
in\gulp.js --gulpfile gulpfile.tmp.1411996780120.js default from directory C:\Us
ers\WaheedJ\Desktop\UniServer\www\Practice\gulp\Project_02\task_runner...
[gulp-chug](Project_02\task_runner\gulpfile.tmp.1411996780120.js) [14:19:42] Usi
ng gulpfile ~\Desktop\UniServer\www\Practice\gulp\Project_02\task_runner\gulpfil
e.tmp.1411996780120.js
[gulp-chug](Project_02\task_runner\gulpfile.tmp.1411996780120.js) [14:19:42] Sta
rting 'watch'...
[gulp-chug](Project_02\task_runner\gulpfile.tmp.1411996780120.js) [14:19:43] Fin
ished 'watch' after 18 ms
[14:19:43] Starting 'default'...
[14:19:43] Finished 'default' after 7.13 µs
What can I do to fix this?
I have the same thing happening. For now i employed this workaround :
gulp.task('default', ['one-gulpfile', 'another-gulpfile'], function () {});
gulp.task('one-gulpfile', function () { return gulp.src('./project-one/gulpfile.js').pipe(chug()); });
gulp.task('another-gulpfile', function () { return gulp.src('./project-another/gulpfile.js').pipe(chug()); });
Basically an empty default task, with dependencies on hard coded tasks that each, run one gulp file.
Of course not dynamic, and needs maintenance, but I got it going which is what i needed most at this point in time. I hope to see chug mature a bit more.

Resources