require path works in root but not in elsewhere - node.js

Working in Node.js, I am splitting one working snippet into two snippets that produce the same result. The working snippet has a require statement and is in the root directory. The split two segments work at the first stage in the root directory but the second stage fails and reports the eTrade library cannot be found. Here is the require statement.
var etrade = require('./lib/etrade');
When executed from server.js in the root, everything works but in the split project, capturing the variable now appears in index.js in the routes folder and does not work. The client side reports that the eTrade library cannot be found. There is something here I'm not understanding, probably about how to resolve the path specification in the require.
In the meanwhile, I have made the thing work using a global variable that I pass from app.js in the root to index.js in the routes folder. I can continue development this way, but it would be much better if I understood how to make the first variant work.
please edit the question and show your files hierarchy, then show us how you merge the files? which tools you used, and where was the output
server.js runs in the root and works very well.
app.js runs in the root and is part A of the merged system and works.
index.js run in the root/routes folder and is part B of the merged system and fails.
root/lib/eTrade contains the eTrade modules.
The working snippet (server.js) opens a confirmation window on the eTrade site and the user copy/pastes back a confirmation code which is echoed back to the console on the server side. The failing snippet, which is a combination of A and B, refuses to pass the variable (the confirmation code) to B and because it reports a failure to find the eTrade library, I think the problem is the path in the require statement, which is
var etrade = require('./lib/etrade');
The merged system performs the first part of the task but does not pass the variable to the second part of the task and I think that's because the variable in B is outside of scope.
Here is the working snippet:
/*
* Module dependencies
*/
const port = 3000
var express = require('express')
, stylus = require('stylus')
, nib = require('nib')
, logger = require('morgan')
, routes = require('./routes/index')
, users = require('./routes/users')
,app = express()
function compile(str, path) {
return stylus(str)
.set('filename', path)
.use(nib());
}
app.set('views', __dirname + '/views');
app.set('view engine', 'jade');
//app.use('/', routes);
//app.use('/users', users);
app.use(logger('dev'));
app.use(stylus.middleware(
{ src: __dirname + '/public'
, compile: compile
}
));
app.use(express.static(__dirname + '/public'))
//from expressSite
// from readme
var etrade = require('./lib/etrade');
var configuration =
{
useSandbox : true, // true if not provided
key : '', //actual value deleted
secret : '' //actual value deleted
}
var et = new etrade(configuration);
//here we send the user a credentials link
et.getRequestToken(
function(authorizationUrl) {
// Your service requires users, who will need to visit
// the following URL and, after logging in and
// authorizing your service to access their account
// data, paste the E*TRADE provided verification
// code back into your application.
app.get('/', function (req, res) {
res.render('AuthApp',
{ authLink : authorizationUrl }
)
});
console.log("AuthorizationURL " + authorizationUrl + " "); },
function(error) {
console.log("Error encountered while attempting " +
"to retrieve a request token: " +
error);
}
); //end getRequestToken
//user sends confirmation code and we get acesss token
app.get('/users/sendcode', function (req, res) {
console.log('verification code is '+req.query.vCode);
//end get verification code
et.getAccessToken(req.query.vCode,
function() {
// Your app can start using other E*TRADE API now
// begin main interaction
// this is where we should land first after oath
// hand it over to the db page?
//et.listAccounts();
//console.log(a);
res.render('ETQuery');
console.log('thread entered getAccessToken function')
// console.log(AccessToken)
},
function(error) {
console.log("Error encountered while attempting " +
"to exchange request token for access token: " +
error);
}
);
})
app.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`CIA is listening to the FSB on ${port}`)
})
Here is the new part A code in app.js in the root. You can see how I've patched it to use a global.
var etrade = require('./lib/etrade');
var configuration =
{
useSandbox : true, // true if not provided
key : '', //actual value deleted
secret : '' //actual value deleted
}
var et = new etrade(configuration);
// here we send the user a credentials link
et.getRequestToken(
function(authorizationUrl) {
// Your service requires users, who will need to visit
// the following URL and, after logging in and
// authorizing your service to access their account
// data, paste the E*TRADE provided verification
// code back into your application.
// app.get('/', function (req, res) {
// res.render('index',
// { authLink : authorizationUrl }
// )
// });
console.log("AuthorizationURL is " + authorizationUrl + " ");
global.ETauthUrl = authorizationUrl;
},
function(error) {
console.log("Error encountered while attempting " +
"to retrieve a request token: " +
error);
}
); //end getRequestToken
But part B does not capture the token. What is odd here is that the failure is reported as to finding the eTrade library, while that library is nowhere referenced in B. In the working snippet, I cannot pass the token beyond server.js yet but I can report it on the server side after the user pastes it in and hits the send button on the client side. Here is B.
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res) {
res.render('index',
{ authLink : ETauthUrl }
)
});
This works only because of the global variable.

The ./... tells NodeJS to load the applicable code from the current directory. This will work in server.js, from the root of the app. However, to properly reference said file from another directory, you'll need to properly reference it's path.
E.g. given:
bootstrap/
├── lib/
│ ├── etrade.js
├── modules/
│ ├── foo.js
| ├── bar/
| ├── bar.js
└── server.js
You'll need to reference etrade in server.js as:
var etrade = require('./lib/etrade');
And in modules/foo.js as:
var etrade = require('../lib/etrade');
And in modules/bar/bar.js as:
var etrade = require('../../lib/etrade');
More on how NodeJS require resolves files.
require(X) from module at path Y
1. If X is a core module, a. return the core module b. STOP
2. If X begins with './' or '/' or '../' a. LOAD_AS_FILE(Y + X) b. LOAD_AS_DIRECTORY(Y + X)
3. LOAD_NODE_MODULES(X, dirname(Y))
4. THROW "not found"
LOAD_AS_FILE(X)
1. If X is a file, load X as JavaScript text. STOP
2. If X.js is a file, load X.js as JavaScript text. STOP
3. If X.json is a file, parse X.json to a JavaScript Object. STOP
4. If X.node is a file, load X.node as binary addon. STOP
LOAD_AS_DIRECTORY(X)
1. If X/package.json is a file, a. Parse X/package.json, and look for "main" field. b. let M = X + (json main field) c. LOAD_AS_FILE(M)
2. If X/index.js is a file, load X/index.js as JavaScript text. STOP
3. If X/index.json is a file, parse X/index.json to a JavaScript object. STOP
4. If X/index.node is a file, load X/index.node as binary addon. STOP
LOAD_NODE_MODULES(X, START)
1. let DIRS=NODE_MODULES_PATHS(START)
2. for each DIR in DIRS: a. LOAD_AS_FILE(DIR/X) b. LOAD_AS_DIRECTORY(DIR/X)
NODE_MODULES_PATHS(START)
1. let PARTS = path split(START)
2. let I = count of PARTS - 1
3. let DIRS = []
4. while I >= 0, a. if PARTS[I] = "node_modules" CONTINUE b. DIR = path join(PARTS[0 .. I] + "node_modules") c. DIRS = DIRS + DIR d. let I = I - 1
5. return DIRS

Related

Add All Routes in ./routes to Middleware Stack

Right now I am using app.use() and require() for each route in my routes directory to add them to the middleware stack (I am using Express).
app.use('/', require('./routes/index'));
app.use('/users', require('./routes/users'));
app.use('/post', require('./routes/post'));
app.use('/submitPost', require('./routes/submitPost'));
...
Instead of doing this manually for each file, I would like to use a for-loop to iterate through the route files in ./routes and add each file to the middleware stack. This is what I have, but it isn't working:
require('fs').readdir('/routes', function (err, files) {
if (!err) {
for (var i = 0; i < files.length; i++) {
var file = files[i].substr(files[i].lastIndexOf('.'));
app.use('/' + file, require('./routes/' + file));
}
}
});
Could someone help me correct this bit of code. On another note, are there any disadvantages to automatically adding all routes in ./routes to the middleware stack?
Thanks in advance.
The main issue here is probably when you are adding the middleware. You are using readdir - the asynchronous method. You likely have a catch-all 404 handler declared after your code, and as the routes you are requiring are added asynchronously, they will probably be added after the catch-all. When the request propagates through the middleware, this would terminate it before it even got to the route.
One other issue is the path you are using: /routes will attempt to look in the route of your filesystem. ./routes or __dirname + '/routes' is probaby what you want.
The following code sample works for me:
var files = require('fs').readdirSync('./routes')
for (var i = 0; i < files.length; i++) {
var file = files[i].substr(0, files[i].lastIndexOf('.'));
app.use('/' + file, require('./routes/' + file));
}
By the way, you can use file-manifest for this. It was actually created specifically for this use case, although it still expects you to call app.use yourself, since order matters for express routes.
So you can do something like:
var fm = require('file-manifest');
var routes = fm.generate('./routes');
app.use('/', routes.home);
app.use('/foo', routes.foo);
// etc.
If you really want it to all happen magically, you could make that work with a custom reduce function, but this is much more explicit and ensures that routes are set up in the right order (so you don't end up with /foo falling before /foo/bar and preventing it from being reached).
I believe I am supposed to qualify that I wrote this library.
There are a few ways to do this. Here's a clean implentation using the basic fs and path modules.
var fs = require("fs"),
path = require("path");
var root = "./routes/"
fs.readdir(root, function (err, files) {
if (err) {
throw err;
}
files.forEach(function (file) {
var filename = file.slice(0, -3);
var routePath = '/' + ((filename === 'index') ? '' : filename); //filter index to use just '/'
app.use(routepath, require(root + filename));
});
});

Gulp, livereload, jade

Need help.
I use gulp-conect and it livereload method. But if I build a few template in time, get a lot of page refresh. Is any solution, I want to build few templates with single page refresh?
So, I reproduce the problem you have and came accross this working solution.
First, lets check gulp plugins you need:
gulp-jade
gulp-livereload
optional: gulp-load-plugins
In case you need some of them go to:
http://gulpjs.com/plugins/
Search for them and install them.
Strategy: I created a gulp task called live that will check your *.jade files, and as you are working on a certain file & saving it, gulp will compile it into html and refresh the browser.
In order to accomplish that, we define a function called compileAndRefresh that will take the file returned by the watcher. It will compile that file into html and the refesh the browser (test with livereload plugin for chrome).
Notes:
I always use gulp-load-plugin to load plugins, so thats whay I use plugins.jad and plugins.livereload.
This will only compile files that are saved and while you have the task live exucting on the command line. Will not compile other files that are not in use. In order to accomplish that, you need to define a task that compiles all files, not only the ones that have been changed.
Assume .jade files in /jade and html output to /html
So, here is the gulpfile.js:
var gulp = require('gulp'),
gulpLoadPlugins = require('gulp-load-plugins'),
plugins = gulpLoadPlugins();
gulp.task('webserver', function() {
gulp.src('./html')
.pipe(plugins.webserver({
livereload: true
}));
gulp.watch('./jade/*.jade', function(event) {
compileAndRefresh(event.path);
});
});
function compileAndRefresh(file) {
gulp.src(file)
.pipe(plugins.jade({
}))
.pipe(gulp.dest('./html'))
}
Post edit notes:
Removed liveReload call from compileAndRefresh (webserver will do that).
Use gulp-server plugin insted of gulp-connect, as they suggest on their repository: "New plugin based on connect 3 using the gulp.src() API. Written in plain javascript. https://github.com/schickling/gulp-webserver"
Something you can do is to watch only files that changes, and then apply a function only to those files that have been changed, something like this:
gulp.task('live', function() {
gulp.watch('templates/folder', function(event) {
refresh_templates(event.path);
});
});
function refresh_templates(file) {
return
gulp.src(file)
.pipe(plugins.embedlr())
.pipe(plugins.livereload());
}
PS: this is not a working example, and I dont know if you are using embedlr, but the point, is that you can watch, and use a callback to call another function with the files that are changing, and the manipulate only those files. Also, I supposed that your goal is to refresh the templates for your browser, but you manipulate as you like, save them on dest or do whatever you want.
Key point here is to show how to manipulate file that changes: callback of watch + custom function.
var jadeTask = function(path) {
path = path || loc.jade + '/*.jade';
if (/source/.test(path)) {
path = loc.jade + '/**/*.jade';
}
return gulp.src(path)
.pipe(changed(loc.markup, {extension: '.html'}))
.pipe(jade({
locals : json_array,
pretty : true
}))
.pipe(gulp.dest(loc.markup))
.pipe(connect.reload());
}
First install required plugins
gulp
express
gulp-jade
connect-livereload
tiny-lr
connect
then write the code
var gulp = require('gulp');
var express = require('express');
var path = require('path');
var connect = require("connect");
var jade = require('gulp-jade');
var app = express();
gulp.task('express', function() {
app.use(require('connect-livereload')({port: 8002}));
app.use(express.static(path.join(__dirname, '/dist')));
app.listen(8000);
});
var tinylr;
gulp.task('livereload', function() {
tinylr = require('tiny-lr')();
tinylr.listen(8002);
});
function notifyLiveReload(event) {
var fileName = require('path').relative(__dirname, event.path);
tinylr.changed({
body: {
files: [fileName]
}
});
}
gulp.task('jade', function(){
gulp.src('src/*.jade')
.pipe(jade())
.pipe(gulp.dest('dist'))
});
gulp.task('watch', function() {
gulp.watch('dist/*.html', notifyLiveReload);
gulp.watch('src/*.jade', ['jade']);
});
gulp.task('default', ['livereload', 'express', 'watch', 'jade'], function() {
});
find the example here at GitHub

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Multiple View paths on Node.js + Express

I'm writing a CMS on Node.js with Express Framework. On my CMS I have several modules for users, pages, etc.
I want that each module will have his files on separate folder, including the view files.
Anyone know how can I achieve that?
I'm using swig as my template engine but I can replace it to something else if it will helps.
Last Update
The multiple view folders feature is supported by the framework since Express 4.10
Just pass an array of locations to the views property, like so.
app.set('views', [__dirname + '/viewsFolder1', __dirname + '/viewsFolder2']);
Express 2.0
As far as I know express doesn't support multiple view paths or namespaces at the moment (like the static middleware do)
But you can modify the lookup logic yourself so that it works the way you want, for example:
function enableMultipleViewFolders(express) {
// proxy function to the default view lookup
var lookupProxy = express.view.lookup;
express.view.lookup = function (view, options) {
if (options.root instanceof Array) {
// clones the options object
var opts = {};
for (var key in options) opts[key] = options[key];
// loops through the paths and tries to match the view
var matchedView = null,
roots = opts.root;
for (var i=0; i<roots.length; i++) {
opts.root = roots[i];
matchedView = lookupProxy.call(this, view, opts);
if (matchedView.exists) break;
}
return matchedView;
}
return lookupProxy.call(express.view, view, options)
};
}
You will enable the new logic by calling the function above and passing express as a parameter, and then you will be able to specify an array of views to the configuration:
var express = require('express');
enableMultipleViewFolders(express);
app.set('views', [__dirname + '/viewsFolder1', __dirname + '/viewsFolder2']);
Or, if you prefer, you can patch the framework directly (updating the view.js file inside it)
This should work in Express 2.x, not sure if it will with the new version (3.x)
UPDATE
Unluckily the above solution won't work in Express 3.x since express.view would be undefined
Another possible solution will be to proxy the response.render function and set the views folder config until it gets a match:
var renderProxy = express.response.render;
express.render = function(){
app.set('views', 'path/to/custom/views');
try {
return renderProxy.apply(this, arguments);
}
catch (e) {}
app.set('views', 'path/to/default/views');
return renderProxy.apply(this, arguments);
};
I've not tested it, it feels very hacky to me anyway, unluckily this feature has been pushed back again:
https://github.com/visionmedia/express/pull/1186
UPDATE 2
This feature has been added in Express 4.10, since the following pull request has been merged:
https://github.com/strongloop/express/pull/2320
In addition to #user85461 answer, the require view part did not work for me.
What i did: removed the path stuff and moved it all to a module i could require,
patch.ViewEnableMultiFolders.js (Works with current express):
function ViewEnableMultiFolders(app) {
// Monkey-patch express to accept multiple paths for looking up views.
// this path may change depending on your setup.
var lookup_proxy = app.get('view').prototype.lookup;
app.get('view').prototype.lookup = function(viewName) {
var context, match;
if (this.root instanceof Array) {
for (var i = 0; i < this.root.length; i++) {
context = {root: this.root[i]};
match = lookup_proxy.call(context, viewName);
if (match) {
return match;
}
}
return null;
}
return lookup_proxy.call(this, viewName);
};
}
module.exports.ViewEnableMultiFolders = ViewEnableMultiFolders;
and used:
var Patch = require('patch.ViewEnableMultiFolders.js');
Patch.ViewEnableMultiFolders(app);
app.set('views', ['./htdocs/views', '/htdocs/tpls']);
Here's a solution for Express 3.x. It monkey-patches express 3.x's "View" object to do the same lookup trick as #ShadowCloud's solution above. Unfortunately, the path lookup for the View object is less clean, since 3.x doesn't expose it to express -- so you have to dig into the bowels of node_modules.
function enable_multiple_view_folders() {
// Monkey-patch express to accept multiple paths for looking up views.
// this path may change depending on your setup.
var View = require("./node_modules/express/lib/view"),
lookup_proxy = View.prototype.lookup;
View.prototype.lookup = function(viewName) {
var context, match;
if (this.root instanceof Array) {
for (var i = 0; i < this.root.length; i++) {
context = {root: this.root[i]};
match = lookup_proxy.call(context, viewName);
if (match) {
return match;
}
}
return null;
}
return lookup_proxy.call(this, viewName);
};
}
enable_multiple_view_folders();
You can however, put all the view files inside the 'view' folder, but separate each module's view into it's own folders inside the 'view' folder. So, the structure is something like this :
views
--moduleA
--moduleB
----submoduleB1
----submoduleB2
--moduleC
Set the view files like usual :
app.set('views', './views');
And when render for each module, include the module's name :
res.render('moduleA/index', ...);
or even submodule's name :
res.render('moduleB/submoduleB1/index', ...);
This solution is also works in express before version 4.x,
Install glob npm install glob
If you have a views directory that looks something like:
views
├── 404.ejs
├── home.ejs
├── includes
│ ├── header.ejs
│ └── footer.ejs
├── post
│ ├── create.ejs
│ └── edit.ejs
└── profile.ejs
You can use this glob function to return an array of subdirectories in the views directory (add the path.substring to remove the trailing /)
let viewPaths = glob.sync('views/**/').map(path => {
return path.substring(0, path.length - 1)
})
console.log(viewPaths)
>> ['views', 'views/post', 'views/includes']
So now you can set
app.set('views', viewPaths)
and now you can use
res.render('404')
res.render('home')
res.render('post/edit')
res.render('post/create')

node.js require all files in a folder?

How do I require all files in a folder in node.js?
need something like:
files.forEach(function (v,k){
// require routes
require('./routes/'+v);
}};
When require is given the path of a folder, it'll look for an index.js file in that folder; if there is one, it uses that, and if there isn't, it fails.
It would probably make most sense (if you have control over the folder) to create an index.js file and then assign all the "modules" and then simply require that.
yourfile.js
var routes = require("./routes");
index.js
exports.something = require("./routes/something.js");
exports.others = require("./routes/others.js");
If you don't know the filenames you should write some kind of loader.
Working example of a loader:
var normalizedPath = require("path").join(__dirname, "routes");
require("fs").readdirSync(normalizedPath).forEach(function(file) {
require("./routes/" + file);
});
// Continue application logic here
I recommend using glob to accomplish that task.
var glob = require( 'glob' )
, path = require( 'path' );
glob.sync( './routes/**/*.js' ).forEach( function( file ) {
require( path.resolve( file ) );
});
Base on #tbranyen's solution, I create an index.js file that load arbitrary javascripts under current folder as part of the exports.
// Load `*.js` under current directory as properties
// i.e., `User.js` will become `exports['User']` or `exports.User`
require('fs').readdirSync(__dirname + '/').forEach(function(file) {
if (file.match(/\.js$/) !== null && file !== 'index.js') {
var name = file.replace('.js', '');
exports[name] = require('./' + file);
}
});
Then you can require this directory from any where else.
Another option is to use the package require-dir which let's you do the following. It supports recursion as well.
var requireDir = require('require-dir');
var dir = requireDir('./path/to/dir');
I have a folder /fields full of files with a single class each, ex:
fields/Text.js -> Test class
fields/Checkbox.js -> Checkbox class
Drop this in fields/index.js to export each class:
var collectExports, fs, path,
__hasProp = {}.hasOwnProperty;
fs = require('fs');
path = require('path');
collectExports = function(file) {
var func, include, _results;
if (path.extname(file) === '.js' && file !== 'index.js') {
include = require('./' + file);
_results = [];
for (func in include) {
if (!__hasProp.call(include, func)) continue;
_results.push(exports[func] = include[func]);
}
return _results;
}
};
fs.readdirSync('./fields/').forEach(collectExports);
This makes the modules act more like they would in Python:
var text = new Fields.Text()
var checkbox = new Fields.Checkbox()
One more option is require-dir-all combining features from most popular packages.
Most popular require-dir does not have options to filter the files/dirs and does not have map function (see below), but uses small trick to find module's current path.
Second by popularity require-all has regexp filtering and preprocessing, but lacks relative path, so you need to use __dirname (this has pros and contras) like:
var libs = require('require-all')(__dirname + '/lib');
Mentioned here require-index is quite minimalistic.
With map you may do some preprocessing, like create objects and pass config values (assuming modules below exports constructors):
// Store config for each module in config object properties
// with property names corresponding to module names
var config = {
module1: { value: 'config1' },
module2: { value: 'config2' }
};
// Require all files in modules subdirectory
var modules = require('require-dir-all')(
'modules', // Directory to require
{ // Options
// function to be post-processed over exported object for each require'd module
map: function(reqModule) {
// create new object with corresponding config passed to constructor
reqModule.exports = new reqModule.exports( config[reqModule.name] );
}
}
);
// Now `modules` object holds not exported constructors,
// but objects constructed using values provided in `config`.
I know this question is 5+ years old, and the given answers are good, but I wanted something a bit more powerful for express, so i created the express-map2 package for npm. I was going to name it simply express-map, however the people at yahoo already have a package with that name, so i had to rename my package.
1. basic usage:
app.js (or whatever you call it)
var app = require('express'); // 1. include express
app.set('controllers',__dirname+'/controllers/');// 2. set path to your controllers.
require('express-map2')(app); // 3. patch map() into express
app.map({
'GET /':'test',
'GET /foo':'middleware.foo,test',
'GET /bar':'middleware.bar,test'// seperate your handlers with a comma.
});
controller usage:
//single function
module.exports = function(req,res){
};
//export an object with multiple functions.
module.exports = {
foo: function(req,res){
},
bar: function(req,res){
}
};
2. advanced usage, with prefixes:
app.map('/api/v1/books',{
'GET /': 'books.list', // GET /api/v1/books
'GET /:id': 'books.loadOne', // GET /api/v1/books/5
'DELETE /:id': 'books.delete', // DELETE /api/v1/books/5
'PUT /:id': 'books.update', // PUT /api/v1/books/5
'POST /': 'books.create' // POST /api/v1/books
});
As you can see, this saves a ton of time and makes the routing of your application dead simple to write, maintain, and understand. it supports all of the http verbs that express supports, as well as the special .all() method.
npm package: https://www.npmjs.com/package/express-map2
github repo: https://github.com/r3wt/express-map
Expanding on this glob solution. Do this if you want to import all modules from a directory into index.js and then import that index.js in another part of the application. Note that template literals aren't supported by the highlighting engine used by stackoverflow so the code might look strange here.
const glob = require("glob");
let allOfThem = {};
glob.sync(`${__dirname}/*.js`).forEach((file) => {
/* see note about this in example below */
allOfThem = { ...allOfThem, ...require(file) };
});
module.exports = allOfThem;
Full Example
Directory structure
globExample/example.js
globExample/foobars/index.js
globExample/foobars/unexpected.js
globExample/foobars/barit.js
globExample/foobars/fooit.js
globExample/example.js
const { foo, bar, keepit } = require('./foobars/index');
const longStyle = require('./foobars/index');
console.log(foo()); // foo ran
console.log(bar()); // bar ran
console.log(keepit()); // keepit ran unexpected
console.log(longStyle.foo()); // foo ran
console.log(longStyle.bar()); // bar ran
console.log(longStyle.keepit()); // keepit ran unexpected
globExample/foobars/index.js
const glob = require("glob");
/*
Note the following style also works with multiple exports per file (barit.js example)
but will overwrite if you have 2 exports with the same
name (unexpected.js and barit.js have a keepit function) in the files being imported. As a result, this method is best used when
your exporting one module per file and use the filename to easily identify what is in it.
Also Note: This ignores itself (index.js) by default to prevent infinite loop.
*/
let allOfThem = {};
glob.sync(`${__dirname}/*.js`).forEach((file) => {
allOfThem = { ...allOfThem, ...require(file) };
});
module.exports = allOfThem;
globExample/foobars/unexpected.js
exports.keepit = () => 'keepit ran unexpected';
globExample/foobars/barit.js
exports.bar = () => 'bar run';
exports.keepit = () => 'keepit ran';
globExample/foobars/fooit.js
exports.foo = () => 'foo ran';
From inside project with glob installed, run node example.js
$ node example.js
foo ran
bar run
keepit ran unexpected
foo ran
bar run
keepit ran unexpected
One module that I have been using for this exact use case is require-all.
It recursively requires all files in a given directory and its sub directories as long they don't match the excludeDirs property.
It also allows specifying a file filter and how to derive the keys of the returned hash from the filenames.
Require all files from routes folder and apply as middleware. No external modules needed.
// require
const { readdirSync } = require("fs");
// apply as middleware
readdirSync("./routes").map((r) => app.use("/api", require("./routes/" + r)));
I'm using node modules copy-to module to create a single file to require all the files in our NodeJS-based system.
The code for our utility file looks like this:
/**
* Module dependencies.
*/
var copy = require('copy-to');
copy(require('./module1'))
.and(require('./module2'))
.and(require('./module3'))
.to(module.exports);
In all of the files, most functions are written as exports, like so:
exports.function1 = function () { // function contents };
exports.function2 = function () { // function contents };
exports.function3 = function () { // function contents };
So, then to use any function from a file, you just call:
var utility = require('./utility');
var response = utility.function2(); // or whatever the name of the function is
Can use : https://www.npmjs.com/package/require-file-directory
Require selected files with name only or all files.
No need of absoulute path.
Easy to understand and use.
Using this function you can require a whole dir.
const GetAllModules = ( dirname ) => {
if ( dirname ) {
let dirItems = require( "fs" ).readdirSync( dirname );
return dirItems.reduce( ( acc, value, index ) => {
if ( PATH.extname( value ) == ".js" && value.toLowerCase() != "index.js" ) {
let moduleName = value.replace( /.js/g, '' );
acc[ moduleName ] = require( `${dirname}/${moduleName}` );
}
return acc;
}, {} );
}
}
// calling this function.
let dirModules = GetAllModules(__dirname);
Create an index.js file in your folder with this code :
const fs = require('fs')
const files = fs.readdirSync('./routes')
for (const file of files) {
require('./'+file)
}
And after that you can simply load all the folder with require("./routes")
If you include all files of *.js in directory example ("app/lib/*.js"):
In directory app/lib
example.js:
module.exports = function (example) { }
example-2.js:
module.exports = function (example2) { }
In directory app create index.js
index.js:
module.exports = require('./app/lib');

Resources