I have this script that watches over files in a directory
// the_script.js
var fs = require('fs');
fs.watch('someDir', function() {
console.log('I see you');
});
I run it with
node the_script.js
and it correctly keeps running forever.
Now I'd like to make it a grunt task, but if I write the task as
// Gruntfile.js
module.exports = function(grunt) {
grunt.registerTask('default', function(grunt) {
console.log('loading task');
var fs = require('fs');
fs.watch('someDir', function() {
console.log('I see you');
});
});
};
when I run
grunt
I just see "loading task" and than the process exits.
I want to know how to make the grunt task run forever and understand what's happening.
I only have gulp experience and from what I recall gulp executes and then terminates and if grunt is similar and you need to force it to stay open then try reading from the console (example, via the readline npm module):
https://www.npmjs.com/package/readline
Related
I checked out my project on a new machine and get an unsatisfying console output.
Console Output
gulp default
Using gulpfile ...\gulpfile.js
Task never defined: default
Please check the documentation for proper gulpfile formatting
gulp -v
CLI version 1.2.2
Local version 4.0.0-alpha.2
Settings
Node interpreter: nodejs-6.9.2\node.exe
Gulp package: node_modules\gulp-4.0.build
Gulp File Content
'use strict';
var gulp = require('gulp');
var del = require('del');
var path = require('path');
var sass = require('gulp-sass');
var ts = require('gulp-typescript');
var gulpPromise = require("gulp-promise");
var gulpPromiseAll = require('gulp-all');
var merge = require('merge-stream');
var through = require('through2');
var runSequence = require('run-sequence');
var async = require("async");
var Rx = require('rx');
var chok = require('chokidar');
var deleteEmpty = require('delete-empty');
var browserSync = require('browser-sync').create();
var webserver = require('gulp-webserver');
var historyApiFallback = require('connect-history-api-fallback');
/* A few functions like this */
var yadda1 = function() {
return yadda9("yadda", yaddayadda);
};
/* A few tasks like this */
gulp.task('build', function(){
return gulpPromiseAll(
yadda1(),
yadda2(),
yadda3()
).then(
function() {
console.log("yadda");
}, function(err) {
console.error("yadda:", err);
}
);
});
gulp.task('default', gulp.series('build', 'serve', function(done) {
console.log("Default task that cleans, builds and runs the application [END]");
done();
}));
What am I doing wrong?
I just had this problem.
We both are using browser-sync, and a task related to browser-sync is where the problem was in my code, so there's a good chance that you have the same problem (and possibly in your other gulp tasks as well), but can't say for sure because you didn't share the tasks' code.
The problem:
In gulp 3.x you could write code like this:
gulp.task('watch', () => {
gulp.watch('./dev/styles/**/*.scss', ['styles'];
...
});
You can't in gulp 4.x.
The solution:
You have to pass a function instead of the name of a task in these cases. You can do this by passing a gulp.series() function. The above code becomes:
gulp.task('watch', () => {
gulp.watch('./dev/styles/**/*.scss', gulp.series('styles'));
...
});
Try making this change wherever applicable. Worked for me.
I'm pretty new to node. I built a sample express app and wrote some BDD tests for it using cucumber.js. I would like to automate the tests using gulp so that when I run my tests it will first start the express app and, once the server is up, run through the tests. However, currently the tests run before the server is up and therefore all fail. Is there a way to wait (using gulp) for the server to start and only then run the tests?
I'm using gulp-cucumber and gulp-express here (somewhat hidden by gulp-load-plugins).
My gulpfile (partial):
var gulp = require('gulp'),
plugins = require('gulp-load-plugins')();
gulp.task('server', function () {
plugins.express.run(['bin/www']);
});
gulp.task('cucumber', ['server'], function() {
return gulp.src('features/*').pipe(plugins.cucumber({
'steps': 'features/steps/*.steps.js',
'support': 'features/support/*.js'
}));
});
gulp.task('test', ['server', 'cucumber']);
I have to use this solution - https://www.npmjs.com/package/run-sequence
var gulp = require('gulp'),
plugins = require('gulp-load-plugins')(),
runSequence = require('run-sequence');
gulp.task('server', function () {
plugins.express.run(['bin/www']);
});
gulp.task('cucumber', function() {
return gulp.src('features/*').pipe(plugins.cucumber({
'steps': 'features/steps/*.steps.js',
'support': 'features/support/*.js'
}));
});
gulp.task('test', function(){
runSequence('server', 'cucumber');
});
I am new to Gulp build tool.I recently starts using Gulp in my project.I done correctly the following things minified, compress,jsx file to js file.But My requirement is to send my build folder to my remote server which is located at another place.For this i am using gulp-scp npm to copy folder to remote server but it not copying folder to server.
The below code is gulpfile.js
var gulp = require('gulp'),
scp = require('gulp-scp2');
gulp.task('default', function() {
return gulp.src('src/*.js')
.pipe(scp({
host: '192.50.10.31',
username: 'applmgr',
password:'123456',
dest: '/Data/project'
}))
.on('error', function(err) {
console.log(err);
});
});
In this code i am copying all js files project in Data folder of remote server.
I run my gulpfile,js
c:\gulpproject>gulp
[10:19:36] Using gulpfile c:\gulpproject\gulpfile.js
[10:19:36] Starting 'default'...
coming like this and not showing any hing
can any help me why it is coming like this and in which place my code is wrong
Ok I solved this problem using gulp-rsync npm plugin.But In windows it you want copy file from your local to remote system for this you system support rsync and also contain ssh command.I tried this one in Linux and install rsync.
var gulp = require('gulp');
var uglify = require('gulp-uglify');
var react = require('gulp-react');
var rsync = require('gulp-rsync');
gulp.task('scripts', function() {
// Minify and copy all JavaScript (except vendor scripts)
// with sourcemaps all the way down
return gulp.src("src/*.jsx")
.pipe(react())
.pipe(uglify())
.pipe(gulp.dest('build/js'));
});
gulp.task('deploy',['scripts'], function() {
gulp.src('build/**')
.pipe(rsync({
root: 'build',
hostname: 'ramprasad#10.60.48.11',
destination: '/Data/rrv'
}));
gulp.src('build/**')
.pipe(rsync({
root: 'build',
hostname: 'appl#10.30.10.42',
destination: '/Data/rrv'
}));
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['deploy']);
you can use powershell or cmd or batch:
var spawn = require("child_process").spawn,child;
child = spawn("powershell.exe",["c:\\temp\\helloworld.ps1"]);
child.stdout.on("data",function(data){
console.log("Powershell Data: " + data);
});
child.stderr.on("data",function(data){
console.log("Powershell Errors: " + data);
});
child.on("exit",function(){
console.log("Powershell Script finished");
});
child.stdin.end(); //end input
I am trying to set up a Node.js job queue on Heroku. I am using RabbitMQ with the jackrabbit npm module for this task.
After following the example in the repository, I have the following files:
server.js. Node simple web server.
worker.js. Worker.
As I understand it, on Heroku I start a web process and a worker process in the Procfile:
// Procfile
web: node server.js
worker: node worker.js
In the worker.js file, I do the following:
// worker.js
var http = require('http');
var throng = require('throng');
var jackrabbit = require('jackrabbit')
// want to use "worker: node worker.js" in heroku Procfile
var queue = jackrabbit(process.env.CLOUDAMQP_URL || 'amqp://localhost')
http.globalAgent.maxSockets = Infinity;
var start = function() {
queue.on('connected', function() {
queue.create('myJob', { prefetch: 5 }, function() {
queue.handle('myJob', function(data, ack) {
console.log(data)
console.log("Job completed!")
ack()
})
})
})
}
throng(start, {
workers: 1,
lifetime: Infinity,
grace: 4000
})
Now I want a situation where I can push data to a job from the web application. So I in a middleware function I have,
// middleware.js
var jackrabbit = require('jackrabbit')
var queue = jackrabbit(process.env.CLOUDAMQP_URL || 'amqp://localhost')
app.get('/example', function(req, res, next) {
queue.publish('myJob', { data: 'my_data' })
res.send(200, { data: 'my_data' })
})
On development, I start worker.js and app.js in separate terminal windows. When I call the /example method, I expect to see the worker terminal window show the relevant console logs. However, the request just hangs. The same thing happens on Heroku.
I feel like there is something really fundamental I am missing here in terms of my understanding of jackrabbit and ampq, and how to set up a job queuing system using that.
Any help on this would be greatly appreciated as this is obviously new to me.
I have a server.js module that exports a start() function to start my server.
I require this module and start the server from index.js.
I'm trying to unit test the server.js module in isolation (with Mocha) by starting the server in a child_process.fork call but I don't see how to call the exported start function.
It's currently working by passing 'index.js' to the fork call but then I don't see how to pass options through to the server.js module (sending a port number for example).
Here's my server.js and the unit test that uses index.js (which only requires and calls server.start()).
I'd like to test server.js directly so I can pass environment variables to it.
====EDIT====
I'm not sure what I thought I would gain by starting the server in a separate process.
I've changed the test to just start the server in the before block.
Suggestions welcome.
var assert = require("assert");
var request = require("request");
describe("Server", function(){
var server;
var port = 4008;
before(function(done){
server = require("../server");
server.start(port);
done();
});
it('listens on specified port (' + port + ')', function(done){
request('http://localhost:' + port, function(err, res, body){
assert(res.statusCode == 200);
done();
});
});
});
You may want to use the cluster module for this, which makes handling processes a little simpler. The following may be along the lines of what you need.
var cluster = require('cluster');
// Runs only on the master process.
if (cluster.isMaster) {
var environmentVariables = { PORT: 2020 };
var worker = cluster.fork(environmentVariables);
// Catch a message from the worker, and then destroy it.
worker.on('message', function(message) {
if (message.hasOwnProperty('testResult')) {
// Kill the worker.
worker.destroy();
// Then do something with the result.
}
});
}
// Runs only on a worker process.
if (cluster.isWorker) {
var server = require('./server');
server.start();
// Do some stuff for tests.
// Send the test result.
process.send({ testResults: 'pass', failReason: null });
}
I haven't tested this, but hopefully the gist is clear. You can pass in custom environment variables when the worker process is spawned, and then have the worker process message the master with the result. You probably need to handle exit events and a time out for when the worker crashes or hangs up.
As an aside, you should probably be wrapping the process.env.PORT in a parseInt.