Creating and naming a new file according to a string - gulp - node.js

Total newbie in Gulp, really would like some assistance..
I am trying to name & create a new file using a string that exist in another file. this will give me the name of the white label that was deployed onto the server.
The content of the file that holds the string is (among other things) {"TITLE":"name_env"}
name_env should be the new name of the file with the suffix of .web
meaning that the new file would be like this name_env.web
What I've came up until now was:
gulp.task('label', function () {
var str = require('path/to/file/file.json')
return file('label', str, {src: true})
.pipe(gulp.dest('build/'))
});
Am I on the right track ?
Hopefully I've managed to explain myself..

Here's gulp file which will do your task(assuming there's dist folder already exist!!)
var gulp = require('gulp');
var fs = require('fs');
gulp.task('label', function() {
var buffer = JSON.parse(fs.readFileSync('path/to/file.json', 'utf8'));
return fs.writeFile('dist/' + buffer['TITLE'] + '.web' , buffer, { flag: 'wx' }, function(err) {
if (err) throw err;
console.log("It's saved!");
});
});

Related

gulp - wrap plugin (which uses through2) output with string

I would like to know how exactly can I manipulate the output of my Gulp plugin so, for example, no matter how many files are passed to the plugin, it will wrap the output with a string. Currently I cannot know when does the last file is done.
The super simplified example below will iterate on 3 files and will create a new file named output.js and in it there will be three times the string xxx (xxxxxxxxx).
I would like the plugin itself to wrap the contents so the output will
be: +xxxxxxxxx+.
How can I do this?
Thanks!
Gulpfile
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo())
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});
The most basic gulp plugin (index.js):
var through2 = require('through2'),
gutil = require('gulp-util');
var PLUGIN_NAME = 'foo';
module.exports = function( options ){
// through2.obj(fn) is a convenience wrapper around
// through2({ objectMode: true }, fn)
return through2.obj(function( file, enc, callback ){
file.contents = new Buffer( 'xxx' );
this.push(file);
callback();
});
}
I understand the files are currently simply returned modified, but what I don't understand is how to append text and return the concatenated result that I want, while keeping it OK with Gulp working standards.
The "real" plugin should actually wrap the files results with:
var foo = { FILES_CONTENT }
where FILES_CONTENT will actually be a a concatenated string of all the files:
"file_name" : "file_content",
"file_name" : "file_content",
...
I would make the following changes to your gulpfile.js:
var gulp = require('gulp');
var foo = require('./index.js');
gulp.task('default', function() {
return gulp.src(['a.html', 'b.html', 'c.html'])
.pipe(foo({fileName:'output.js', varName:'bar'}))
.pipe(gulp.dest('./test/output'))
});
Since your foo() plugin itself will concatenate all the files, there's no need to use gulp-concat at all. Instead your plugin should accept an option fileName that provides the name of the generated file. I've also added another option varName that will provide the name of the var in the output file.
I'll assume that a.html, b.html and c.html are simple HTML files, something like this:
<h1 class="header">a</h1>
As you've already realized you need to concat all the files in the plugin itself. That's not really difficult however and doesn't require a lot of code. Here's a index.js which does exactly that:
var through2 = require('through2'),
gutil = require('gulp-util'),
path = require('path'),
File = require('vinyl');
var PLUGIN_NAME = 'foo';
module.exports = function(options) {
var files = { };
var outputFile = null;
return through2.obj(function(file, enc, callback){
outputFile = outputFile || file;
var filePath = path.relative(file.base, file.path);
files[filePath] = file.contents.toString();
callback();
}, function(callback) {
outputFile = outputFile ? outputFile.clone() : new File();
outputFile.path = path.resolve(outputFile.base, options.fileName);
outputFile.contents = new Buffer(
'var ' + options.varName + ' = ' +
JSON.stringify(files, null, 2) + ';'
);
this.push(outputFile);
callback();
});
}
Since you want to output a key/value mapping from file names to file contents our transformFunction just stores both of those things in a regular JavaScript object files. None of the input files themselves are emitted. Their names and contents are just stored until we have all of them.
The only tricky part is making sure that we respect the .base property of each file as is customary for gulp plugins. This allows the user to provide a custom base folder using the base option in gulp.src().
Once all files have been processed through2 calls the flushFunction. In there we create our output file with the provided fileName (once again making sure we respect the .base property).
Creating the output file contents is then just a matter of serializing our files object using JSON.stringify() (which automatically takes care of any escaping that has to be done).
The resulting ./test/output/output.js will then look like this:
var bar = {
"a.html": "<h1 class=\"header\">a</h1>\n",
"b.html": "<h1 class=\"header\">b</h1>\n",
"c.html": "<h1 class=\"header\">c</h1>\n"
};
You should use the gulp pipeline technique (standard).
This means that you can use the gulp-insert package in order
to add the string xxx.
var insert = require('gulp-insert');
.pipe(insert.append('xxx')); // Appends 'xxx' to the contents of every file
You can also prepend, append and wrap with this package and it support of course the gulp standards.
So the full example will be:
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
var insert = require('gulp-insert');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo()
.pipe(insert.append('xxx'))
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});

Create an archive and add files in it before sending to client

I'd like to create an archive with archiver and put some files in it. Client's side, an user will click on a button and it'll generate the archive (server's side).
I'm using Express.js, this is my server side code where the archive will be generated. I did something like this :
app.get('/export/siteoccupancy', function(req,res){
if(_.isEmpty(req.query)){
res.status(404).send('requires a start and end date');
}else{
//getting paramas
var sDate = req.query.startDate;
var eDate = req.query.endDate;
}
var fs = require('fs');
var archiver = require('archiver');
var archive = archiver('zip');
archive.on('err',function(err){
res.status(500).send({error : err.message});
});
res.on('close',function(){
console.log('Archive size : %d b',archive.pointer());
return res.status(200).send('OK').end();
});
res.attachment('data-export.zip');
archive.pipe(res);
var stream = fs.createWriteStream("data-report.txt')");
stream.once('open',function(fd) {
stream.write('test1');
stream.write('\n test2');
stream.write('\n test3');
stream.end();
});
archive.append(stream);
archive.finalize();
});
This is totally new for me and I'd like to understand why the console tells me the stream file is empty ?
Error: append: entry name must be a non-empty string value
at Archiver.append
Regards
you can only append a readStream, because an archive can only take data from readstreams. You can use the method named archive.append, and you should pass as a second argument with the name property to name the file. Like so :
archive.append(myReadStream,{ name : 'myTest.txt'});

Check uploaded file extension in Sails js

How we can check uploaded file extension in sails js?
I tried on skipper and multer but have no result.
any suggestion?
You should use saveAs options for each file before saving.
var md5 = require('md5');
module.exports = {
testUpload:function(req,res){
// setting allowed file types
var allowedTypes = ['image/jpeg', 'image/png'];
// skipper default upload directory .tmp/uploads/
var allowedDir = "../../assets/images";
// don not define dirname , use default path
req.file("uploadFiles").upload({
saveAs:function(file, cb) {
var d = new Date();
var extension = file.filename.split('.').pop();
// generating unique filename with extension
var uuid=md5(d.getMilliseconds())+"."+ extension;
// seperate allowed and disallowed file types
if(allowedTypes.indexOf(file.headers['content-type']) === -1) {
// save as disallowed files default upload path
cb(null,uuid);
}else{
// save as allowed files
cb(null,allowedDir+"/"+uuid);
}
}
},function whenDone(err,files){
return res.json({
files:files,
err:err
});
});
}
}
Just get uploaded files array and check last chunk of string after dot.
req.file('file').upload({
maxBytes: 2000000,
dirname: 'uploadFolder'
}, function (error, files) {
if (error) return sails.log.error(error);
// You have files array, so you can do this
files[0].fd.split('.').pop(); // You get extension
}
What is going on here? When upload is finished you will get array of files with their filenames. You can get data from that array and see where this file is located (full path).
The last thing is splitting string by dots and get last item from the array with pop() method.

Using Grunt to Replace Text in a File

I'm trying to get Grunt to replace a path reference and I'm not sure what I'm doing wrong. This looks like it should work. Essentially, I'm copying a Bootstrap file up a directory and changing the #import paths, so I'm just trying to replace 'bootstrap/' with the new destination path 'MY/NEW/DEST/PATH/bootstrap'. I don't want to use a module for something as straight forward as this, seems needless. Everything works but the replace.
var destFilePath = path.join(basePath, file);
// Does the file already exist?
if (!grunt.file.exists(destFilePath)) {
// Copy Bootstrap source #import file to destination
grunt.file.copy(
// Node API join to keep this cross-platform
path.join(basePath, 'bootstrap/_bootstrap.scss'),
destFilePath
);
// Require node filesystem module, since not a global
var fs = require('fs');
// Replace #import paths to be relative to new destination
fs.readFile(destFilePath, 'utf8', function(err, data) {
// Check for any errs during read
if (err) {
return grunt.log.write(err);
}
var result = data.replace('/bootstrap\//g', 'bootstrap/bootstrap/');
fs.writeFile(destFilePath, result, 'utf8', function(err) {
return grunt.log.write(err);
});
});
}
You wrapped your regex in quotes - don't do that and it should work fine:
var result = data.replace(/bootstrap\//g, 'bootstrap/bootstrap/');

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Resources