Passing a variable between pipes in Gulp 3.9.1 - node.js

Using gulp 3.9.1
I am attempting to return a bunch of files and perform a task that requires a var to be passed between two pipes.
I'm using node uuid to create a v3 UUID for each file path to
ultimately end up with a uuid for each page. I'm grabbing the file path with gulp-print.
I want to store that uuid value as a var. In the next pipe Im using
gulp-inject-string to write it into the page during the build.
Help: Either I need help getting the file path inside the gulp-inject-string pipe or I need to pass the var between the two different pipes. If I globally set a var with a default value outside the src it gets passed easily to the pipe(inject).
Super simplified code below:
// test code
var gulp = require('gulp');
var print = require('gulp-print');
var inject = require('gulp-inject-string');
var reload = browserSync.reload;
const uuidv3 = require('uuid/v3');
var uuid;
gulp.task('uuid', function() {
return gulp.src('**/*.html'])
// create uuid
.pipe(print(function(filepath) {
uuid = uuidv3(filepath, uuidv3.URL);
return "compiled: " + filepath + ' uuid: ' + uuid;
}))
// need to to add UUIDv3 to each page
.pipe(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">'))
.pipe(gulp.dest('/prod/./'))
.pipe(reload({ stream: true }));
});
It's worth noting that I need a cross platform way to get the file path starting in the root of the project and including forward slashes. The gulp(print) does this perfectly starting at the root of the project and ignoring anything upstream from that point. The format of the path is important because it's one half of the equation in creating the uuid and the uuid's must match on Mac or PC platforms.
examples:
/index.html
/dir1/file.html
/dir1/dir2/dir3/file.html

var gulp = require('gulp');
var print = require('gulp-print');
var inject = require('gulp-inject-string');
const uuidv3 = require('uuid/v3');
var tap = require('gulp-tap');
// you can declare here
var uuid;
gulp.task('pages', function() {
// or you can declare here
var uuid;
return gulp.src('**/*.html')
// bunch of stuff happens here involving templating/minifying
// create uuid
.pipe(print(function(filepath) {
// then set it here and use it further below
// it will be available
uuid = uuidv3(filepath, uuidv3.URL);
return "compiled: " + filepath + ' uuid: ' + uuid;
}))
// need to to add UUIDv3 to each page
//.pipe(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">\n'))
.pipe(tap(function(file, t) {
return t.through(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">\n');
})
.pipe(gulp.dest('/prod/./'))
.pipe(reload({stream:true}));
});
You are just creating a variable at a higher scope that you can set and refer to later. If you need a bunch of them create an array with filepath as an index. But I would try it first as just a simple value.

I solved the problem. It was an amateur mistake. I returned the statement where the var was set so the var was essentially killed. Updated code that allows the var to pass through the pipes.
var gulp = require('gulp');
var print = require('gulp-print');
var replace = require('gulp-replace');
const uuidv3 = require('uuid/v3');
var uuid;
gulp.task('build', function() {
return gulp.src('**/*.html')
// get a cross-platform filepath and create a uuid
.pipe(print(function(filepath) {
uuid = uuidv3(filepath, uuidv3.URL);
}))
// inject uuid
.pipe(replace('dc.identifier" content=""', function() {
return 'dc.identifier" content="' + uuid + '"';
}))
.pipe(gulp.dest('/prod/./'));
});
The var uuid passes through the pipes just fine now. This code creates a UUID based on a cross-platform file path and injects it into an empty dc.identifier meta tag.

Related

NodeJS/Express share function between multiple routes files [duplicate]

Let's say I have a file called app.js. Pretty simple:
var express = require('express');
var app = express.createServer();
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.get('/', function(req, res){
res.render('index', {locals: {
title: 'NowJS + Express Example'
}});
});
app.listen(8080);
What if I have a functions inside "tools.js". How would I import them to use in apps.js?
Or...am I supposed to turn "tools" into a module, and then require it? << seems hard, I rather do the basic import of the tools.js file.
You can require any js file, you just need to declare what you want to expose.
// tools.js
// ========
module.exports = {
foo: function () {
// whatever
},
bar: function () {
// whatever
}
};
var zemba = function () {
}
And in your app file:
// app.js
// ======
var tools = require('./tools');
console.log(typeof tools.foo); // => 'function'
console.log(typeof tools.bar); // => 'function'
console.log(typeof tools.zemba); // => undefined
If, despite all the other answers, you still want to traditionally include a file in a node.js source file, you can use this:
var fs = require('fs');
// file is included here:
eval(fs.readFileSync('tools.js')+'');
The empty string concatenation +'' is necessary to get the file content as a string and not an object (you can also use .toString() if you prefer).
The eval() can't be used inside a function and must be called inside the global scope otherwise no functions or variables will be accessible (i.e. you can't create a include() utility function or something like that).
Please note that in most cases this is bad practice and you should instead write a module. However, there are rare situations, where pollution of your local context/namespace is what you really want.
Update 2015-08-06
Please also note this won't work with "use strict"; (when you are in "strict mode") because functions and variables defined in the "imported" file can't be accessed by the code that does the import. Strict mode enforces some rules defined by newer versions of the language standard. This may be another reason to avoid the solution described here.
You need no new functions nor new modules.
You simply need to execute the module you're calling if you don't want to use namespace.
in tools.js
module.exports = function() {
this.sum = function(a,b) { return a+b };
this.multiply = function(a,b) { return a*b };
//etc
}
in app.js
or in any other .js like myController.js :
instead of
var tools = require('tools.js') which force us to use a namespace and call tools like tools.sum(1,2);
we can simply call
require('tools.js')();
and then
sum(1,2);
in my case I have a file with controllers ctrls.js
module.exports = function() {
this.Categories = require('categories.js');
}
and I can use Categories in every context as public class after require('ctrls.js')()
Create two js files
// File cal.js
module.exports = {
sum: function(a,b) {
return a+b
},
multiply: function(a,b) {
return a*b
}
};
Main js file
// File app.js
var tools = require("./cal.js");
var value = tools.sum(10,20);
console.log("Value: "+value);
Console Output
Value: 30
create two files e.g app.js and tools.js
app.js
const tools= require("./tools.js")
var x = tools.add(4,2) ;
var y = tools.subtract(4,2);
console.log(x);
console.log(y);
tools.js
const add = function(x, y){
return x+y;
}
const subtract = function(x, y){
return x-y;
}
module.exports ={
add,subtract
}
output
6
2
Here is a plain and simple explanation:
Server.js content:
// Include the public functions from 'helpers.js'
var helpers = require('./helpers');
// Let's assume this is the data which comes from the database or somewhere else
var databaseName = 'Walter';
var databaseSurname = 'Heisenberg';
// Use the function from 'helpers.js' in the main file, which is server.js
var fullname = helpers.concatenateNames(databaseName, databaseSurname);
Helpers.js content:
// 'module.exports' is a node.JS specific feature, it does not work with regular JavaScript
module.exports =
{
// This is the function which will be called in the main file, which is server.js
// The parameters 'name' and 'surname' will be provided inside the function
// when the function is called in the main file.
// Example: concatenameNames('John,'Doe');
concatenateNames: function (name, surname)
{
var wholeName = name + " " + surname;
return wholeName;
},
sampleFunctionTwo: function ()
{
}
};
// Private variables and functions which will not be accessible outside this file
var privateFunction = function ()
{
};
I was also looking for a NodeJS 'include' function and I checked the solution proposed by Udo G - see message https://stackoverflow.com/a/8744519/2979590. His code doesn't work with my included JS files.
Finally I solved the problem like that:
var fs = require("fs");
function read(f) {
return fs.readFileSync(f).toString();
}
function include(f) {
eval.apply(global, [read(f)]);
}
include('somefile_with_some_declarations.js');
Sure, that helps.
Create two JavaScript files. E.g. import_functions.js and main.js
1.) import_functions.js
// Declaration --------------------------------------
module.exports =
{
add,
subtract
// ...
}
// Implementation ----------------------------------
function add(x, y)
{
return x + y;
}
function subtract(x, y)
{
return x - y;
}
// ...
2.) main.js
// include ---------------------------------------
const sf= require("./import_functions.js")
// use -------------------------------------------
var x = sf.add(4,2);
console.log(x);
var y = sf.subtract(4,2);
console.log(y);
output
6
2
The vm module in Node.js provides the ability to execute JavaScript code within the current context (including global object). See http://nodejs.org/docs/latest/api/vm.html#vm_vm_runinthiscontext_code_filename
Note that, as of today, there's a bug in the vm module that prevenst runInThisContext from doing the right when invoked from a new context. This only matters if your main program executes code within a new context and then that code calls runInThisContext. See https://github.com/joyent/node/issues/898
Sadly, the with(global) approach that Fernando suggested doesn't work for named functions like "function foo() {}"
In short, here's an include() function that works for me:
function include(path) {
var code = fs.readFileSync(path, 'utf-8');
vm.runInThisContext(code, path);
}
say we wants to call function ping() and add(30,20) which is in lib.js file
from main.js
main.js
lib = require("./lib.js")
output = lib.ping();
console.log(output);
//Passing Parameters
console.log("Sum of A and B = " + lib.add(20,30))
lib.js
this.ping=function ()
{
return "Ping Success"
}
//Functions with parameters
this.add=function(a,b)
{
return a+b
}
Udo G. said:
The eval() can't be used inside a function and must be called inside
the global scope otherwise no functions or variables will be
accessible (i.e. you can't create a include() utility function or
something like that).
He's right, but there's a way to affect the global scope from a function. Improving his example:
function include(file_) {
with (global) {
eval(fs.readFileSync(file_) + '');
};
};
include('somefile_with_some_declarations.js');
// the declarations are now accessible here.
Hope, that helps.
app.js
let { func_name } = require('path_to_tools.js');
func_name(); //function calling
tools.js
let func_name = function() {
...
//function body
...
};
module.exports = { func_name };
It worked with me like the following....
Lib1.js
//Any other private code here
// Code you want to export
exports.function1 = function(params) {.......};
exports.function2 = function(params) {.......};
// Again any private code
now in the Main.js file you need to include Lib1.js
var mylib = requires('lib1.js');
mylib.function1(params);
mylib.function2(params);
Please remember to put the Lib1.js in node_modules folder.
Another way to do this in my opinion, is to execute everything in the lib file when you call require() function using (function(/* things here */){})(); doing this will make all these functions global scope, exactly like the eval() solution
src/lib.js
(function () {
funcOne = function() {
console.log('mlt funcOne here');
}
funcThree = function(firstName) {
console.log(firstName, 'calls funcThree here');
}
name = "Mulatinho";
myobject = {
title: 'Node.JS is cool',
funcFour: function() {
return console.log('internal funcFour() called here');
}
}
})();
And then in your main code you can call your functions by name like:
main.js
require('./src/lib')
funcOne();
funcThree('Alex');
console.log(name);
console.log(myobject);
console.log(myobject.funcFour());
Will make this output
bash-3.2$ node -v
v7.2.1
bash-3.2$ node main.js
mlt funcOne here
Alex calls funcThree here
Mulatinho
{ title: 'Node.JS is cool', funcFour: [Function: funcFour] }
internal funcFour() called here
undefined
Pay atention to the undefined when you call my object.funcFour(), it will be the same if you load with eval(). Hope it helps :)
You can put your functions in global variables, but it's better practice to just turn your tools script into a module. It's really not too hard – just attach your public API to the exports object. Take a look at Understanding Node.js' exports module for some more detail.
I just want to add, in case you need just certain functions imported from your tools.js, then you can use a destructuring assignment which is supported in node.js since version 6.4 - see node.green.
Example:
(both files are in the same folder)
tools.js
module.exports = {
sum: function(a,b) {
return a + b;
},
isEven: function(a) {
return a % 2 == 0;
}
};
main.js
const { isEven } = require('./tools.js');
console.log(isEven(10));
output: true
This also avoids that you assign those functions as properties of another object as its the case in the following (common) assignment:
const tools = require('./tools.js');
where you need to call tools.isEven(10).
NOTE:
Don't forget to prefix your file name with the correct path - even if both files are in the same folder, you need to prefix with ./
From Node.js docs:
Without a leading '/', './', or '../' to indicate a file, the module
must either be a core module or is loaded from a node_modules folder.
Include file and run it in given (non-global) context
fileToInclude.js
define({
"data": "XYZ"
});
main.js
var fs = require("fs");
var vm = require("vm");
function include(path, context) {
var code = fs.readFileSync(path, 'utf-8');
vm.runInContext(code, vm.createContext(context));
}
// Include file
var customContext = {
"define": function (data) {
console.log(data);
}
};
include('./fileToInclude.js', customContext);
Using the ESM module system:
a.js:
export default function foo() {};
export function bar() {};
b.js:
import foo, {bar} from './a.js';
This is the best way i have created so far.
var fs = require('fs'),
includedFiles_ = {};
global.include = function (fileName) {
var sys = require('sys');
sys.puts('Loading file: ' + fileName);
var ev = require(fileName);
for (var prop in ev) {
global[prop] = ev[prop];
}
includedFiles_[fileName] = true;
};
global.includeOnce = function (fileName) {
if (!includedFiles_[fileName]) {
include(fileName);
}
};
global.includeFolderOnce = function (folder) {
var file, fileName,
sys = require('sys'),
files = fs.readdirSync(folder);
var getFileName = function(str) {
var splited = str.split('.');
splited.pop();
return splited.join('.');
},
getExtension = function(str) {
var splited = str.split('.');
return splited[splited.length - 1];
};
for (var i = 0; i < files.length; i++) {
file = files[i];
if (getExtension(file) === 'js') {
fileName = getFileName(file);
try {
includeOnce(folder + '/' + file);
} catch (err) {
// if (ext.vars) {
// console.log(ext.vars.dump(err));
// } else {
sys.puts(err);
// }
}
}
}
};
includeFolderOnce('./extensions');
includeOnce('./bin/Lara.js');
var lara = new Lara();
You still need to inform what you want to export
includeOnce('./bin/WebServer.js');
function Lara() {
this.webServer = new WebServer();
this.webServer.start();
}
Lara.prototype.webServer = null;
module.exports.Lara = Lara;
You can simple just require('./filename').
Eg.
// file: index.js
var express = require('express');
var app = express();
var child = require('./child');
app.use('/child', child);
app.get('/', function (req, res) {
res.send('parent');
});
app.listen(process.env.PORT, function () {
console.log('Example app listening on port '+process.env.PORT+'!');
});
// file: child.js
var express = require('express'),
child = express.Router();
console.log('child');
child.get('/child', function(req, res){
res.send('Child2');
});
child.get('/', function(req, res){
res.send('Child');
});
module.exports = child;
Please note that:
you can't listen PORT on the child file, only parent express module has PORT listener
Child is using 'Router', not parent Express moudle.
Node works based on commonjs modules and more recently, esm modules. Basically, you should create modules in separated .js files and make use of imports/exports (module.exports and require).
Javascript on the browser works differently, based on scope. There is the global scope, and through clojures (functions inside other functions) you have private scopes.
So,in node, export functions and objects that you will consume in other modules.
The cleanest way IMO is the following, In tools.js:
function A(){
.
.
.
}
function B(){
.
.
.
}
module.exports = {
A,
B
}
Then, in app.js, just require the tools.js as following: const tools = require("tools");
I was as well searching for an option to include code without writing modules, resp. use the same tested standalone sources from a different project for a Node.js service - and jmparattes answer did it for me.
The benefit is, you don't pollute the namespace, I don't have trouble with "use strict"; and it works well.
Here a full sample:
Script to load - /lib/foo.js
"use strict";
(function(){
var Foo = function(e){
this.foo = e;
}
Foo.prototype.x = 1;
return Foo;
}())
SampleModule - index.js
"use strict";
const fs = require('fs');
const path = require('path');
var SampleModule = module.exports = {
instAFoo: function(){
var Foo = eval.apply(
this, [fs.readFileSync(path.join(__dirname, '/lib/foo.js')).toString()]
);
var instance = new Foo('bar');
console.log(instance.foo); // 'bar'
console.log(instance.x); // '1'
}
}
Hope this was helpfull somehow.
Like you are having a file abc.txt and many more?
Create 2 files: fileread.js and fetchingfile.js, then in fileread.js write this code:
function fileread(filename) {
var contents= fs.readFileSync(filename);
return contents;
}
var fs = require("fs"); // file system
//var data = fileread("abc.txt");
module.exports.fileread = fileread;
//data.say();
//console.log(data.toString());
}
In fetchingfile.js write this code:
function myerror(){
console.log("Hey need some help");
console.log("type file=abc.txt");
}
var ags = require("minimist")(process.argv.slice(2), { string: "file" });
if(ags.help || !ags.file) {
myerror();
process.exit(1);
}
var hello = require("./fileread.js");
var data = hello.fileread(ags.file); // importing module here
console.log(data.toString());
Now, in a terminal:
$ node fetchingfile.js --file=abc.txt
You are passing the file name as an argument, moreover include all files in readfile.js instead of passing it.
Thanks
Another method when using node.js and express.js framework
var f1 = function(){
console.log("f1");
}
var f2 = function(){
console.log("f2");
}
module.exports = {
f1 : f1,
f2 : f2
}
store this in a js file named s and in the folder statics
Now to use the function
var s = require('../statics/s');
s.f1();
s.f2();
To turn "tools" into a module, I don't see hard at all. Despite all the other answers I would still recommend use of module.exports:
//util.js
module.exports = {
myFunction: function () {
// your logic in here
let message = "I am message from myFunction";
return message;
}
}
Now we need to assign this exports to global scope (in your app|index|server.js )
var util = require('./util');
Now you can refer and call function as:
//util.myFunction();
console.log(util.myFunction()); // prints in console :I am message from myFunction
To interactively test the module ./test.js in a Unix environment, something like this could be used:
>> node -e "eval(''+require('fs').readFileSync('./test.js'))" -i
...
Use:
var mymodule = require("./tools.js")
app.js:
module.exports.<your function> = function () {
<what should the function do>
}

node.js module export how to use data amoung all modules?

I would like to use updated (and only then) globals among all node modules. How to do that? Questions are in code.
app.js
var data = 'data';
var global = require('glob.js')(data);
// here we are require your globals variables and we corectly 'set them'
console.log(globals.glob1);
// we can use them here
glob.js
module.exports = function(data)
{
var globs = {
glob1 : data.toLowerCase(),
glob2 : data.toUpperCase()
}
return globs;
}
mod.js
var global = require('glob.js'); // I require globals but they are not set...
function funct(someOtherData, someMoreData)
{
var test = global.glob1;
console.log(test);
// why I can't use globals here ? How can I use corectly set globals (globals need to be updated first - app.js, then ALL other modules should be able to use correctly set globals)?
}
module.export = funct;
For the answer scroll down to the TLDR section below but do read on to understand why.
Part1 - the difference between a function and a function call
Your first mistake is that you are exporting a function, not an object:
module.exports = function(data) // <---- this is a function
{
var globs = {
glob1 : data.toLowerCase(),
glob2 : data.toUpperCase()
}
return globs;
}
and in app.js you do this:
console.log(globs.glob1); <--- globs is a function, not an object
when you should be doing this:
console.log(globs().glob1);
Why is this? OK, lets forget for a moment your module. Consider the following code:
var a = function(){ return 2 };
console.log(a); // do you expect this to print a function or 2?
console.log(a()); // what do you expect this to print?
This is a very basic rule about functions in all programming languages, not just javascript: to get the return value you need to call the function. So in your code:
function myExportedFunction (data) {
// some logic here...
return globs;
}
console.log(myExportedFunction); // prints a function
console.log(myExportedFunction()); // prints the globs object
console.log(myExportedFunction().glob1); // prints value of glob1
So it's simple really. There is no magic syntax going on. You've just forgotten to return the glob object and are using the function pointer instead. Obviously the function has no glob1 property so it's correct for it to be undefined.
Part2 - function local variables
OK. So let's say you made the changes I recommended above. There's an obvious problem with the way the function was written. What happens when you do this:
var glob = require('glob.js')();
console.log(glob.glob1); // <--- prints "undefined"
So the first problem is you're not checking if you're passing data or nothing. So every time you call the function you will overwrite the stored value.
There's another problem, you are always returning a different object every time you call the function. Let's look at how local variables work when returned:
function a () {
var data = {}
return data;
}
var x = a();
var y = a();
x.testing = 1;
y.testing = 2;
console.log(x.testing); // prints 1
console.log(y.testing); // prints 2
So, every time you call a function that creates a local variable you are returning a different object. Actually what's doing this is not really the variable but the object literal syntax:
var a = {};
// is basically the same as
var a = new Object();
If we change the above example to:
function a () {
return {};
}
it would still behave the same.
TLDR
So, how do we fix it? Simple, create the object outside of the function and check if we pass data to initialize:
var globs = {
glob1 : "",
glob2 : ""
}
module.exports = function(data)
{
globs.glob1 = data.toLowerCase();
globs.glob2 = data.toUpperCase();
return globs;
}
Now everything should work:
In app.js
var global = require('glob.js')(data);
In mod.js
var global = require('glob.js')();
Epologue - modules are singletons
It may or may not be obvious to you why the above should work. In case you already know why I'm writing this as reference to future readers.
In node.js modules are implemented as proper singletons. Therefore in node if you want a singleton all you need to do is write a module, you don't need to implement any special code for it.
What this means is that all module globals (module scoped variables) are shared amongst all requires. Here's a very simple module to share one variable amongst all modules:
shared.js
var x = "";
module.exports = {
set: function (val) {x=val},
get: function () {return x}
}
a.js
var shared = require('./shared');
shared.set("hello world");
b.js
var shared = require('./shared');
console.log(shared.get()); // prints "hello world"
We're using this feature to declare a shared glob variable in the code above.
You can use the global. variable identifier to set global variables in NodeJS, instead of var, example:
app.js
var data = 'data';
var glob = require('./glob.js');
glob(data);
// here we are require your globals variables and we corectly 'set them'
console.log(global.gl.glob1);
var mod = require('./mod.js');
mod();
// we can use them here
glob.js
module.exports = function(data)
{
console.log("setting globals");
global.gl = {
glob1 : '1' + data,
glob2 : '2' + data
}
// return global.gl; // can be removed
}
mod.js
function funct(someOtherData, someMoreData)
{
var test = global.gl.glob1;
console.log(test);
test = global.gl.glob2;
console.log(test);
// why I can't use globals here ? How can I use corectly set globals (globals need to be updated first - app.js, then ALL other modules should be able to use correctly set globals)?
}
module.exports = funct;
As you can see in glob.js, i switched to var globs = to global.gl = and then in mod.js used it as global.gl.
Running app.js outputs:
setting globals
1data // From app.js
1data // From mod.js imported in app.js
2data // From mod.js imported in app.js
There are 2 options:
Use nodejs global variable (not recommended)
Create shared module
You chose 2nd option, but did it a bit wrong way by exporting a function. When you import the package and call the function it always creates new globs object and fulfill it with your data. Instead you can export an object. Simple example
glob.js
Global object is defined here
module.exports = {
glob1: '1',
glob2: '2'
};
mod.js
You can change global object here, like
var globs = require('./glob');
module.exports.updateGlob1 = function(data) {
globs.glob1 = data;
};
app.js
Here if you access global variable you can see it updated
var globs = require('./glob');
var mod = require('./mod');
mod.updateGlob1('1 plus 2');
console.log(globs.glob1); // Output: '1 plus 2'
There can be more complex examples, as for module design pattern often IIFE is used.
UPDATE
Another example using IIFE.
glob.js
module.exports = (function() {
var glob1 = 'initial value';
return {
// Getter method
getGlob1() {
return glob1;
},
// Setter method
setGlob1(value) {
glob1 = value;
}
}
})();
mod.js
var shared = require('./shared');
module.exports.testFn = function() {
// Access global variable with getter method
console.log('In mod.js', shared.getGlob1());
};
app.js
var shared = require('./shared');
var mod = require('./mod');
// Print initial value
console.log('Initial', shared.getGlob1());
// Set new value to global variable
shared.setGlob1('new value');
// Print updated value
console.log('In app.js', shared.getGlob1());
// Use global variable in mod.js file
mod.testFn();

Get cordova package name from javascript hook

I am writing a cordova plugin with a node hook to run after_prepare .
This is for Android only.
From within this hook I need to get the cordova package name, so I can copy a file to the src/com/example/myproject folder (if the package is com.example.myproject).
If I know the package name I can make this path. I have it working hardcoded now but I need this to work with two different package names.
Is there a way to get the package name from within this code in the plugin hook?
module.exports = function(ctx){
var fs = ctx.requireCordovaModule('fs');
var path = ctx.requireCordovaModule('path');
var deferral = ctx.requireCordovaModule('q').defer();
//get package name here
//do other stuff
}
I have done a lot of research but have not been able to find this.
Thanks.
It doesn't look like it is available off of the context object, but you could try to parse the config.xml.
module.exports = function(context) {
var fs = require('fs');
var path = require('path');
var config_xml = path.join(context.opts.projectRoot, 'config.xml');
var et = context.requireCordovaModule('elementtree');
var data = fs.readFileSync(config_xml).toString();
var etree = et.parse(data);
console.log(etree.getroot().attrib.id);
};
The local-webserver plugin uses a similar strategy for reading config properties.
Here my compilation from different answers that works in 2021.
I use it to update some parameters in Xcode project for plugins compilation.
You can see that I am getting here app id and name from config.xml
And you can add it to after_prepare hook:
<hook src="scripts/addBuildSettingsToXcode.js" type="after_prepare" />
#!/usr/bin/env node
let fs = require('fs');
let xcode = require('xcode');
let path = require('path');
let et = require('elementtree');
module.exports = function (context) {
//console.log(context);
function addBuildPropertyToDebugAndRelease(prop, value) {
console.log('Xcode Adding ' + prop + '=' + value);
myProj.addBuildProperty(prop, value, 'Debug');
myProj.addBuildProperty(prop, value, 'Release');
}
function updateBuildPropertyToDebugAndRelease(prop, value) {
console.log('Xcode Updating ' + prop + '=' + value );
myProj.updateBuildProperty(prop, value, 'Debug');
myProj.updateBuildProperty(prop, value, 'Release');
}
// Getting app id and name from config.xml
let config_xml = path.join(context.opts.projectRoot, 'config.xml');
let data = fs.readFileSync(config_xml).toString();
let etree = et.parse(data);
let appId = etree.getroot().attrib.id ;
let appName = etree.getroot().find('name')['text'];
// Building project path
let projectPath = 'platforms/ios/' + appName + '.xcodeproj/project.pbxproj';
// Opening Xcode project and parsing it
myProj = xcode.project(projectPath);
myProj = myProj.parseSync();
// Common properties
addBuildPropertyToDebugAndRelease('DEVELOPMENT_TEAM', 'CGXXXXXXX');
addBuildPropertyToDebugAndRelease('CODE_SIGN_IDENTITY', '"Apple Development"');
// Compilation properties
addBuildPropertyToDebugAndRelease('ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES', 'YES');
// Save project file
fs.writeFileSync(projectPath, myProj.writeSync());
};

gulp - wrap plugin (which uses through2) output with string

I would like to know how exactly can I manipulate the output of my Gulp plugin so, for example, no matter how many files are passed to the plugin, it will wrap the output with a string. Currently I cannot know when does the last file is done.
The super simplified example below will iterate on 3 files and will create a new file named output.js and in it there will be three times the string xxx (xxxxxxxxx).
I would like the plugin itself to wrap the contents so the output will
be: +xxxxxxxxx+.
How can I do this?
Thanks!
Gulpfile
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo())
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});
The most basic gulp plugin (index.js):
var through2 = require('through2'),
gutil = require('gulp-util');
var PLUGIN_NAME = 'foo';
module.exports = function( options ){
// through2.obj(fn) is a convenience wrapper around
// through2({ objectMode: true }, fn)
return through2.obj(function( file, enc, callback ){
file.contents = new Buffer( 'xxx' );
this.push(file);
callback();
});
}
I understand the files are currently simply returned modified, but what I don't understand is how to append text and return the concatenated result that I want, while keeping it OK with Gulp working standards.
The "real" plugin should actually wrap the files results with:
var foo = { FILES_CONTENT }
where FILES_CONTENT will actually be a a concatenated string of all the files:
"file_name" : "file_content",
"file_name" : "file_content",
...
I would make the following changes to your gulpfile.js:
var gulp = require('gulp');
var foo = require('./index.js');
gulp.task('default', function() {
return gulp.src(['a.html', 'b.html', 'c.html'])
.pipe(foo({fileName:'output.js', varName:'bar'}))
.pipe(gulp.dest('./test/output'))
});
Since your foo() plugin itself will concatenate all the files, there's no need to use gulp-concat at all. Instead your plugin should accept an option fileName that provides the name of the generated file. I've also added another option varName that will provide the name of the var in the output file.
I'll assume that a.html, b.html and c.html are simple HTML files, something like this:
<h1 class="header">a</h1>
As you've already realized you need to concat all the files in the plugin itself. That's not really difficult however and doesn't require a lot of code. Here's a index.js which does exactly that:
var through2 = require('through2'),
gutil = require('gulp-util'),
path = require('path'),
File = require('vinyl');
var PLUGIN_NAME = 'foo';
module.exports = function(options) {
var files = { };
var outputFile = null;
return through2.obj(function(file, enc, callback){
outputFile = outputFile || file;
var filePath = path.relative(file.base, file.path);
files[filePath] = file.contents.toString();
callback();
}, function(callback) {
outputFile = outputFile ? outputFile.clone() : new File();
outputFile.path = path.resolve(outputFile.base, options.fileName);
outputFile.contents = new Buffer(
'var ' + options.varName + ' = ' +
JSON.stringify(files, null, 2) + ';'
);
this.push(outputFile);
callback();
});
}
Since you want to output a key/value mapping from file names to file contents our transformFunction just stores both of those things in a regular JavaScript object files. None of the input files themselves are emitted. Their names and contents are just stored until we have all of them.
The only tricky part is making sure that we respect the .base property of each file as is customary for gulp plugins. This allows the user to provide a custom base folder using the base option in gulp.src().
Once all files have been processed through2 calls the flushFunction. In there we create our output file with the provided fileName (once again making sure we respect the .base property).
Creating the output file contents is then just a matter of serializing our files object using JSON.stringify() (which automatically takes care of any escaping that has to be done).
The resulting ./test/output/output.js will then look like this:
var bar = {
"a.html": "<h1 class=\"header\">a</h1>\n",
"b.html": "<h1 class=\"header\">b</h1>\n",
"c.html": "<h1 class=\"header\">c</h1>\n"
};
You should use the gulp pipeline technique (standard).
This means that you can use the gulp-insert package in order
to add the string xxx.
var insert = require('gulp-insert');
.pipe(insert.append('xxx')); // Appends 'xxx' to the contents of every file
You can also prepend, append and wrap with this package and it support of course the gulp standards.
So the full example will be:
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
var insert = require('gulp-insert');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo()
.pipe(insert.append('xxx'))
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});

node.js require all files in a folder?

How do I require all files in a folder in node.js?
need something like:
files.forEach(function (v,k){
// require routes
require('./routes/'+v);
}};
When require is given the path of a folder, it'll look for an index.js file in that folder; if there is one, it uses that, and if there isn't, it fails.
It would probably make most sense (if you have control over the folder) to create an index.js file and then assign all the "modules" and then simply require that.
yourfile.js
var routes = require("./routes");
index.js
exports.something = require("./routes/something.js");
exports.others = require("./routes/others.js");
If you don't know the filenames you should write some kind of loader.
Working example of a loader:
var normalizedPath = require("path").join(__dirname, "routes");
require("fs").readdirSync(normalizedPath).forEach(function(file) {
require("./routes/" + file);
});
// Continue application logic here
I recommend using glob to accomplish that task.
var glob = require( 'glob' )
, path = require( 'path' );
glob.sync( './routes/**/*.js' ).forEach( function( file ) {
require( path.resolve( file ) );
});
Base on #tbranyen's solution, I create an index.js file that load arbitrary javascripts under current folder as part of the exports.
// Load `*.js` under current directory as properties
// i.e., `User.js` will become `exports['User']` or `exports.User`
require('fs').readdirSync(__dirname + '/').forEach(function(file) {
if (file.match(/\.js$/) !== null && file !== 'index.js') {
var name = file.replace('.js', '');
exports[name] = require('./' + file);
}
});
Then you can require this directory from any where else.
Another option is to use the package require-dir which let's you do the following. It supports recursion as well.
var requireDir = require('require-dir');
var dir = requireDir('./path/to/dir');
I have a folder /fields full of files with a single class each, ex:
fields/Text.js -> Test class
fields/Checkbox.js -> Checkbox class
Drop this in fields/index.js to export each class:
var collectExports, fs, path,
__hasProp = {}.hasOwnProperty;
fs = require('fs');
path = require('path');
collectExports = function(file) {
var func, include, _results;
if (path.extname(file) === '.js' && file !== 'index.js') {
include = require('./' + file);
_results = [];
for (func in include) {
if (!__hasProp.call(include, func)) continue;
_results.push(exports[func] = include[func]);
}
return _results;
}
};
fs.readdirSync('./fields/').forEach(collectExports);
This makes the modules act more like they would in Python:
var text = new Fields.Text()
var checkbox = new Fields.Checkbox()
One more option is require-dir-all combining features from most popular packages.
Most popular require-dir does not have options to filter the files/dirs and does not have map function (see below), but uses small trick to find module's current path.
Second by popularity require-all has regexp filtering and preprocessing, but lacks relative path, so you need to use __dirname (this has pros and contras) like:
var libs = require('require-all')(__dirname + '/lib');
Mentioned here require-index is quite minimalistic.
With map you may do some preprocessing, like create objects and pass config values (assuming modules below exports constructors):
// Store config for each module in config object properties
// with property names corresponding to module names
var config = {
module1: { value: 'config1' },
module2: { value: 'config2' }
};
// Require all files in modules subdirectory
var modules = require('require-dir-all')(
'modules', // Directory to require
{ // Options
// function to be post-processed over exported object for each require'd module
map: function(reqModule) {
// create new object with corresponding config passed to constructor
reqModule.exports = new reqModule.exports( config[reqModule.name] );
}
}
);
// Now `modules` object holds not exported constructors,
// but objects constructed using values provided in `config`.
I know this question is 5+ years old, and the given answers are good, but I wanted something a bit more powerful for express, so i created the express-map2 package for npm. I was going to name it simply express-map, however the people at yahoo already have a package with that name, so i had to rename my package.
1. basic usage:
app.js (or whatever you call it)
var app = require('express'); // 1. include express
app.set('controllers',__dirname+'/controllers/');// 2. set path to your controllers.
require('express-map2')(app); // 3. patch map() into express
app.map({
'GET /':'test',
'GET /foo':'middleware.foo,test',
'GET /bar':'middleware.bar,test'// seperate your handlers with a comma.
});
controller usage:
//single function
module.exports = function(req,res){
};
//export an object with multiple functions.
module.exports = {
foo: function(req,res){
},
bar: function(req,res){
}
};
2. advanced usage, with prefixes:
app.map('/api/v1/books',{
'GET /': 'books.list', // GET /api/v1/books
'GET /:id': 'books.loadOne', // GET /api/v1/books/5
'DELETE /:id': 'books.delete', // DELETE /api/v1/books/5
'PUT /:id': 'books.update', // PUT /api/v1/books/5
'POST /': 'books.create' // POST /api/v1/books
});
As you can see, this saves a ton of time and makes the routing of your application dead simple to write, maintain, and understand. it supports all of the http verbs that express supports, as well as the special .all() method.
npm package: https://www.npmjs.com/package/express-map2
github repo: https://github.com/r3wt/express-map
Expanding on this glob solution. Do this if you want to import all modules from a directory into index.js and then import that index.js in another part of the application. Note that template literals aren't supported by the highlighting engine used by stackoverflow so the code might look strange here.
const glob = require("glob");
let allOfThem = {};
glob.sync(`${__dirname}/*.js`).forEach((file) => {
/* see note about this in example below */
allOfThem = { ...allOfThem, ...require(file) };
});
module.exports = allOfThem;
Full Example
Directory structure
globExample/example.js
globExample/foobars/index.js
globExample/foobars/unexpected.js
globExample/foobars/barit.js
globExample/foobars/fooit.js
globExample/example.js
const { foo, bar, keepit } = require('./foobars/index');
const longStyle = require('./foobars/index');
console.log(foo()); // foo ran
console.log(bar()); // bar ran
console.log(keepit()); // keepit ran unexpected
console.log(longStyle.foo()); // foo ran
console.log(longStyle.bar()); // bar ran
console.log(longStyle.keepit()); // keepit ran unexpected
globExample/foobars/index.js
const glob = require("glob");
/*
Note the following style also works with multiple exports per file (barit.js example)
but will overwrite if you have 2 exports with the same
name (unexpected.js and barit.js have a keepit function) in the files being imported. As a result, this method is best used when
your exporting one module per file and use the filename to easily identify what is in it.
Also Note: This ignores itself (index.js) by default to prevent infinite loop.
*/
let allOfThem = {};
glob.sync(`${__dirname}/*.js`).forEach((file) => {
allOfThem = { ...allOfThem, ...require(file) };
});
module.exports = allOfThem;
globExample/foobars/unexpected.js
exports.keepit = () => 'keepit ran unexpected';
globExample/foobars/barit.js
exports.bar = () => 'bar run';
exports.keepit = () => 'keepit ran';
globExample/foobars/fooit.js
exports.foo = () => 'foo ran';
From inside project with glob installed, run node example.js
$ node example.js
foo ran
bar run
keepit ran unexpected
foo ran
bar run
keepit ran unexpected
One module that I have been using for this exact use case is require-all.
It recursively requires all files in a given directory and its sub directories as long they don't match the excludeDirs property.
It also allows specifying a file filter and how to derive the keys of the returned hash from the filenames.
Require all files from routes folder and apply as middleware. No external modules needed.
// require
const { readdirSync } = require("fs");
// apply as middleware
readdirSync("./routes").map((r) => app.use("/api", require("./routes/" + r)));
I'm using node modules copy-to module to create a single file to require all the files in our NodeJS-based system.
The code for our utility file looks like this:
/**
* Module dependencies.
*/
var copy = require('copy-to');
copy(require('./module1'))
.and(require('./module2'))
.and(require('./module3'))
.to(module.exports);
In all of the files, most functions are written as exports, like so:
exports.function1 = function () { // function contents };
exports.function2 = function () { // function contents };
exports.function3 = function () { // function contents };
So, then to use any function from a file, you just call:
var utility = require('./utility');
var response = utility.function2(); // or whatever the name of the function is
Can use : https://www.npmjs.com/package/require-file-directory
Require selected files with name only or all files.
No need of absoulute path.
Easy to understand and use.
Using this function you can require a whole dir.
const GetAllModules = ( dirname ) => {
if ( dirname ) {
let dirItems = require( "fs" ).readdirSync( dirname );
return dirItems.reduce( ( acc, value, index ) => {
if ( PATH.extname( value ) == ".js" && value.toLowerCase() != "index.js" ) {
let moduleName = value.replace( /.js/g, '' );
acc[ moduleName ] = require( `${dirname}/${moduleName}` );
}
return acc;
}, {} );
}
}
// calling this function.
let dirModules = GetAllModules(__dirname);
Create an index.js file in your folder with this code :
const fs = require('fs')
const files = fs.readdirSync('./routes')
for (const file of files) {
require('./'+file)
}
And after that you can simply load all the folder with require("./routes")
If you include all files of *.js in directory example ("app/lib/*.js"):
In directory app/lib
example.js:
module.exports = function (example) { }
example-2.js:
module.exports = function (example2) { }
In directory app create index.js
index.js:
module.exports = require('./app/lib');

Resources