I am loading browser-sync proxy and want to load search and replace terms from an external file in order to amend the page as it is loaded into a browser.
The reason I want to load the search and replace terms from a separate file is because I want to make use of gulp-watch and reload browser-sync as the search and replace terms are updated.
My Project folder:
regex/search.txt <- search term is stored in this file
regex/replace.txt <- replace term is stored in this file
gulpfile.js
Contents of gulpfile.js:
var gulp = require('gulp'),
fs = require("fs"),
browserSync = require('browser-sync');
var proj_url = "http://www.example.com";
var search_text = "";
var replace_text = "";
gulp.task('readRegEx', function() {
return gulp.src('regex/*.txt')
.pipe(fs.readFile("regex/search.txt", "utf-8", function(err, data) {
search_text = data;
}))
.pipe(fs.readFile("regex/replace.txt", "utf-8", function(err, data) {
replace_text = data;
}))
});
gulp.task('browser-sync', function() {
browserSync({
proxy: {
target: proj_url
},
rewriteRules: [
{
match: search_text,
fn: function (match) {
return replace_text;
}
}
]
});
});
gulp.task('default', ['readRegEx','browser-sync'], function() {
gulp.watch(['regex/*.txt'], [browserSync.reload]);
});
This doesn't work. I get the following error:
TypeError: Cannot call method 'on' of undefined ...
For that to work you need to make browser-sync dependant in readRegEx
gulp.task('browser-sync', ['readRegEx'], function() {
this guarantees the proper execution order.
then you can make readRegEx sync (and simpler) this way:
gulp.task('readRegEx', function() {
search_text = fs.readFileSync("regex/search.txt", "utf-8").toString();
replace_text = fs.readFileSync("regex/replace.txt", "utf-8").toString();
});
Related
I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});
I'm using gulp and I trying to create a gulp task that combine files in a javascript file.
For example, image I have this:
File template\template1.html :
<h2>some html content</h2>
<p>blah blah blah</p>
File template\template2.html :
<h2>some other html content</h2>
<img src='cat.png'>
I'd like to read and merge these files into a single javascript file like this :
const templates = {
"template1" : "<h2>some html content</h2>\n<p>blah blah blah</p>",
"template2" : "<h2>some other html content</h2>\n<img src='cat.png'>"
}
export default templates
However, I'm failing when dealing with gulp plumbing (I'm quite new to gulp I admit).
How to reach my goal ?
Right now I tried to play with gulp-trough, but it fails at execution:
const gulp = require('gulp');
const through = require('gulp-through');
gulp.task('templates', function () {
var result = {}
gulp.src('src/templates/**/*.html')
.pipe(through('readFile', function(){
console.log(arguments); // not reached!
}, defaults));
})
gulp.task('default', ['templates'])
It shouldn't be hard to write your own plugin using through2 module (as explained in official docs.)
// gulpfile.js
const gulp = require('gulp');
const path = require('path');
const through = require('through2'); // npm install --save-dev through2
const toTemplateModule = obj => {
return [
`const template = ${JSON.stringify(obj, null, 2)};`,
'',
'export default template;'
].join('\n');
};
const mergeTemplate = file => {
const results = {};
let latestFile;
return through.obj(
function(file, encoding, callback) {
latestFile = file;
results[path.basename(file.path)] = file.contents.toString(encoding);
callback(null, file);
},
function(callback) {
const joinedFile = latestFile.clone({
contents: false
});
joinedFile.path = path.join(latestFile.base, file);
joinedFile.contents = Buffer.from(toTemplateModule(results), 'utf-8');
this.push(joinedFile);
callback();
});
};
gulp.task('templates', () => {
return gulp
.src('./src/templates/**/*.html')
.pipe(mergeTemplate('templates.js'))
.pipe(gulp.dest('./build'))
});
gulp.task('default', ['templates'])
I am working on a Meteor application.
Currently, I have a few PDFs on my server. To serve these already existing PDFs directly to the client, I do it this way and it works very well:
Router.route("/file/:fileName", function() {
var fileName = this.params.fileName;
// console.log(process.env.PWD);
var filePath = process.env.PWD + '/' + fileName;
var fs = Meteor.npmRequire('fs');
var data = fs.readFileSync(filePath);
this.response.writeHead(200, {
"Content-Type": "application/pdf",
"Content-Length": data.length
});
this.response.write(data);
this.response.end();
}, {
where: "server"
});
I save these PDFs to Mongo using CollectionFS (Later, I shall generate PDFs and save them. For now, I am just directly saving these already existing PDFs to Mongo as I first want to get the Mongo part to work.).
testCollection = new FS.Collection("testCollection", {
stores: [new FS.Store.GridFS("testCollection")]
});
testCollection.allow({
'insert': function () {
return true;
}
});
var file = new FS.File(process.env.PWD + '/PDFKitExampleServerSide.pdf');
file.encoding = 'binary';
file.name('myPDF.pdf');
var document = testCollection.insert(file);
console.log(document._id);
My question is, after I save these PDFs to Mongo using CollectionFS (like I do above), how do I retrieve and serve these PDFs?
Router.route("/database/:pdfId", function() {
//need help here
}, { where: "server"});
After a lot of searching and trying, I've finally gotten it to work.
Router.route("/database/:pdfId", function() {
var pdfId = this.params.pdfId;
var file = testCollection.findOne({_id: pdfId});
var readable = file.createReadStream("tmp");
var buffer = new Buffer(0);
readable.on("data", function(b) {
buffer = Buffer.concat([buffer, b]);
});
var response = this.response;
readable.on("end", function() {
response.writeHead(200, {
"Content-Type": "application/pdf",
"Content-Length": buffer.length
});
response.write(buffer);
response.end();
});
}, {
where: "server"
});
I know that this question is old, but I found an easier way to store and retrieve PDFs. Apparently if you store your PDFs in the database and they are smaller than 16MB (which is likely in this type of files) the performance is way slower than if you store the files in your server file system.
For doing that, you can use FS.Store.FileSystem instead of FS.Store.GridFS. The following code works for me:
// Client
var pdfStore = new FS.Store.FileSystem("uploadedFiles");
UploadedFiles = new FS.Collection("uploadedFiles", {
stores: [pdfStore],
filter: {
allow: {
extensions: ['pdf','doc','docx','xls','xlsx','ppt','pptx''jpg','png','jpeg']
}
}
});
// Server
var pdfStore = new FS.Store.FileSystem("uploadedFiles", {path: uploadFilesPath});
UploadedFiles = new FS.Collection("uploadedFiles", {
stores: [pdfStore],
filter: {
maxSize: 5242880, // 5MB in bytes
allow: {
extensions: ['pdf','doc','docx','xls','xlsx','ppt','pptx''jpg','png','jpeg']
},
deny: {
extensions: ['exe']
},
onInvalid: function (message) {
if (Meteor.isClient) {
alert(message);
} else {
console.log(message);
}
}
}
});
And then just use this little helper to retrieve the url to the file:
get_uploaded_link: function(id){
console.log(id);
var file = UploadedFiles.findOne({_id: id});
return file.url();}
I'm trying to get some html from a page online and place inside my jade template so I can style without copying and pasting every time a need it.
var request = require("request");
var cheerio = require("cheerio");
var loadContent = function() {
request({
uri: "http://www.mywebsite.com.br/test"
}, function(error, response, body) {
var $ = cheerio.load(body);
var result;
$('.content').each(function(){
result={"content":$(this).html()};
});
placeContent(result);
return true;
});
};
var placeContent = function(content) {
return content;
};
module.exports = loadContent;
Inside my gulpfile.js, besides the right requirements, I have:
gulp.task('jadeBuild', function() {
var options = {
pretty: true
};
return gulp.src(src+'/*.jade')
.pipe(data(function(){
return loadContent();
}))
.pipe(jade(options))
.pipe(gulp.dest(build))
.pipe(connect.reload());
});
And my jade file:
.mycontent
#{content}
What am I missing?
Try changing #{content} to !{content} in your jade file. This will tell jade not to escape any of the characters(which can be dangerous depending on where the input is coming from!).
See http://jade-lang.com/reference/interpolation/
Also, when you loop over each .content you are overwriting result every time. You need to append to result if you want to aggregate all the content together. Something like:
var result = {content: ''};
$('.content').each(function(){
result.content += $(this).html();
});
Below is a Gulp ES6 transpilation task. It works fine, but I'm trying to replace gulp.watch with the gulp-watch plugin so new files will be caught. The problem is that gulp-watch isn't giving me what gulp.watch does in the callback, and I'm not sure what to do about it.
Here's my original working task:
var gulp = require('gulp'),
rename = require('gulp-rename'),
plumber = require('gulp-plumber'),
gprint = require('gulp-print'),
notify = require('gulp-notify'),
babel = require('gulp-babel');
gulp.task('default', function() {
return gulp.watch('../**/**-es6.js', function(obj){
if (obj.type === 'changed') {
gulp.src(obj.path, { base: './' })
.pipe(plumber({
errorHandler: function (error) { /* elided */ }
}))
.pipe(babel())
.pipe(rename(function (path) {
path.basename = path.basename.replace(/-es6$/, '');
}))
.pipe(gulp.dest(''))
.pipe(gprint(function(filePath){ return "File processed: " + filePath; }));
}
});
});
And here's all that I have so far with gulp-watch:
var gulp = require('gulp'),
rename = require('gulp-rename'),
plumber = require('gulp-plumber'),
gprint = require('gulp-print'),
notify = require('gulp-notify'),
babel = require('gulp-babel'),
gWatch = require('gulp-watch');
gulp.task('default', function() {
return gWatch('../**/**-es6.js', function(obj){
console.log('watch event - ', Object.keys(obj).join(','));
console.log('watch event - ', obj.event);
console.log('watch event - ', obj.base);
return;
if (obj.type === 'changed') {
gulp.src(obj.path, { base: './' })
.pipe(plumber({
errorHandler: function (error) { /* elided */ }
}))
.pipe(babel())
.pipe(rename(function (path) {
path.basename = path.basename.replace(/-es6$/, '');
}))
.pipe(gulp.dest(''))
.pipe(gprint(function(filePath){ return "File processed: " + filePath; }));
}
});
});
The output of the logging is this:
watch event - history,cwd,base,stat,_contents,event
watch event - change
watch event - ..
How do I get gulp-watch to give me the info I had before, or, how can I change my task's code to get this working again with gulp-watch?
According to the tests, obj.relative should contain the relative filename, and obj.path will still hold the absolute file path, just as it did in your original code. Also, the callback accepts a Vinyl object, which is documented here: https://github.com/wearefractal/vinyl
You probably can't see them in your logs since Object.keys doesn't enumerate properties in the prototype chain.
Using a for..in loop, you should be able to see all the properties.