What is the best way to expose methods from Node.js? - node.js

Consider I want to expose a method called Print
Binding method as prototype:
File Saved as Printer.js
var printerObj = function(isPrinted) {
this.printed = isPrinted;
}
printerObj.prototype.printNow = function(printData) {
console.log('= Print Started =');
};
module.exports = printerObj;
Then access printNow() by putting code require('Printer.js').printNow() in any external .js node program file.
Export method itself using module.exports:
File Saved as Printer2.js
var printed = false;
function printNow() {
console.log('= Print Started =');
}
module.exports.printNow = printNow;
Then access printNow() by putting code require('Printer2.js').printNow() in any external .js node program file.
Can anyone tell what is the difference and best way of doing it with respect to Node.js?

Definitely the first way. It is called the substack pattern and you can read about it on Twitter and on Mikeal Rogers' blog. Some code examples can be found at the jade github repo in the parser:
var Parser = exports = module.exports = function Parser(str, filename, options){
this.input = str;
this.lexer = new Lexer(str, options);
...
};
Parser.prototype = {
context: function(parser){
if (parser) {
this.contexts.push(parser);
} else {
return this.contexts.pop();
}
},
advance: function(){
return this.lexer.advance();
}
};

In the first example you are creating a class, ideally you should use it with "new" in your caller program:
var PrinterObj = require('Printer.js').PrinterObj;
var printer = new PrinterObj();
printer.PrintNow();
This is a good read on the subject: http://www.2ality.com/2012/01/js-inheritance-by-example.html
In the second example you are returning a function.
The difference is that you can have multiple instances of the first example (provided you use new as indicated) but only one instance of the second approach.

Related

NodeJS/Express share function between multiple routes files [duplicate]

Let's say I have a file called app.js. Pretty simple:
var express = require('express');
var app = express.createServer();
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.get('/', function(req, res){
res.render('index', {locals: {
title: 'NowJS + Express Example'
}});
});
app.listen(8080);
What if I have a functions inside "tools.js". How would I import them to use in apps.js?
Or...am I supposed to turn "tools" into a module, and then require it? << seems hard, I rather do the basic import of the tools.js file.
You can require any js file, you just need to declare what you want to expose.
// tools.js
// ========
module.exports = {
foo: function () {
// whatever
},
bar: function () {
// whatever
}
};
var zemba = function () {
}
And in your app file:
// app.js
// ======
var tools = require('./tools');
console.log(typeof tools.foo); // => 'function'
console.log(typeof tools.bar); // => 'function'
console.log(typeof tools.zemba); // => undefined
If, despite all the other answers, you still want to traditionally include a file in a node.js source file, you can use this:
var fs = require('fs');
// file is included here:
eval(fs.readFileSync('tools.js')+'');
The empty string concatenation +'' is necessary to get the file content as a string and not an object (you can also use .toString() if you prefer).
The eval() can't be used inside a function and must be called inside the global scope otherwise no functions or variables will be accessible (i.e. you can't create a include() utility function or something like that).
Please note that in most cases this is bad practice and you should instead write a module. However, there are rare situations, where pollution of your local context/namespace is what you really want.
Update 2015-08-06
Please also note this won't work with "use strict"; (when you are in "strict mode") because functions and variables defined in the "imported" file can't be accessed by the code that does the import. Strict mode enforces some rules defined by newer versions of the language standard. This may be another reason to avoid the solution described here.
You need no new functions nor new modules.
You simply need to execute the module you're calling if you don't want to use namespace.
in tools.js
module.exports = function() {
this.sum = function(a,b) { return a+b };
this.multiply = function(a,b) { return a*b };
//etc
}
in app.js
or in any other .js like myController.js :
instead of
var tools = require('tools.js') which force us to use a namespace and call tools like tools.sum(1,2);
we can simply call
require('tools.js')();
and then
sum(1,2);
in my case I have a file with controllers ctrls.js
module.exports = function() {
this.Categories = require('categories.js');
}
and I can use Categories in every context as public class after require('ctrls.js')()
Create two js files
// File cal.js
module.exports = {
sum: function(a,b) {
return a+b
},
multiply: function(a,b) {
return a*b
}
};
Main js file
// File app.js
var tools = require("./cal.js");
var value = tools.sum(10,20);
console.log("Value: "+value);
Console Output
Value: 30
create two files e.g app.js and tools.js
app.js
const tools= require("./tools.js")
var x = tools.add(4,2) ;
var y = tools.subtract(4,2);
console.log(x);
console.log(y);
tools.js
const add = function(x, y){
return x+y;
}
const subtract = function(x, y){
return x-y;
}
module.exports ={
add,subtract
}
output
6
2
Here is a plain and simple explanation:
Server.js content:
// Include the public functions from 'helpers.js'
var helpers = require('./helpers');
// Let's assume this is the data which comes from the database or somewhere else
var databaseName = 'Walter';
var databaseSurname = 'Heisenberg';
// Use the function from 'helpers.js' in the main file, which is server.js
var fullname = helpers.concatenateNames(databaseName, databaseSurname);
Helpers.js content:
// 'module.exports' is a node.JS specific feature, it does not work with regular JavaScript
module.exports =
{
// This is the function which will be called in the main file, which is server.js
// The parameters 'name' and 'surname' will be provided inside the function
// when the function is called in the main file.
// Example: concatenameNames('John,'Doe');
concatenateNames: function (name, surname)
{
var wholeName = name + " " + surname;
return wholeName;
},
sampleFunctionTwo: function ()
{
}
};
// Private variables and functions which will not be accessible outside this file
var privateFunction = function ()
{
};
I was also looking for a NodeJS 'include' function and I checked the solution proposed by Udo G - see message https://stackoverflow.com/a/8744519/2979590. His code doesn't work with my included JS files.
Finally I solved the problem like that:
var fs = require("fs");
function read(f) {
return fs.readFileSync(f).toString();
}
function include(f) {
eval.apply(global, [read(f)]);
}
include('somefile_with_some_declarations.js');
Sure, that helps.
Create two JavaScript files. E.g. import_functions.js and main.js
1.) import_functions.js
// Declaration --------------------------------------
module.exports =
{
add,
subtract
// ...
}
// Implementation ----------------------------------
function add(x, y)
{
return x + y;
}
function subtract(x, y)
{
return x - y;
}
// ...
2.) main.js
// include ---------------------------------------
const sf= require("./import_functions.js")
// use -------------------------------------------
var x = sf.add(4,2);
console.log(x);
var y = sf.subtract(4,2);
console.log(y);
output
6
2
The vm module in Node.js provides the ability to execute JavaScript code within the current context (including global object). See http://nodejs.org/docs/latest/api/vm.html#vm_vm_runinthiscontext_code_filename
Note that, as of today, there's a bug in the vm module that prevenst runInThisContext from doing the right when invoked from a new context. This only matters if your main program executes code within a new context and then that code calls runInThisContext. See https://github.com/joyent/node/issues/898
Sadly, the with(global) approach that Fernando suggested doesn't work for named functions like "function foo() {}"
In short, here's an include() function that works for me:
function include(path) {
var code = fs.readFileSync(path, 'utf-8');
vm.runInThisContext(code, path);
}
say we wants to call function ping() and add(30,20) which is in lib.js file
from main.js
main.js
lib = require("./lib.js")
output = lib.ping();
console.log(output);
//Passing Parameters
console.log("Sum of A and B = " + lib.add(20,30))
lib.js
this.ping=function ()
{
return "Ping Success"
}
//Functions with parameters
this.add=function(a,b)
{
return a+b
}
Udo G. said:
The eval() can't be used inside a function and must be called inside
the global scope otherwise no functions or variables will be
accessible (i.e. you can't create a include() utility function or
something like that).
He's right, but there's a way to affect the global scope from a function. Improving his example:
function include(file_) {
with (global) {
eval(fs.readFileSync(file_) + '');
};
};
include('somefile_with_some_declarations.js');
// the declarations are now accessible here.
Hope, that helps.
app.js
let { func_name } = require('path_to_tools.js');
func_name(); //function calling
tools.js
let func_name = function() {
...
//function body
...
};
module.exports = { func_name };
It worked with me like the following....
Lib1.js
//Any other private code here
// Code you want to export
exports.function1 = function(params) {.......};
exports.function2 = function(params) {.......};
// Again any private code
now in the Main.js file you need to include Lib1.js
var mylib = requires('lib1.js');
mylib.function1(params);
mylib.function2(params);
Please remember to put the Lib1.js in node_modules folder.
Another way to do this in my opinion, is to execute everything in the lib file when you call require() function using (function(/* things here */){})(); doing this will make all these functions global scope, exactly like the eval() solution
src/lib.js
(function () {
funcOne = function() {
console.log('mlt funcOne here');
}
funcThree = function(firstName) {
console.log(firstName, 'calls funcThree here');
}
name = "Mulatinho";
myobject = {
title: 'Node.JS is cool',
funcFour: function() {
return console.log('internal funcFour() called here');
}
}
})();
And then in your main code you can call your functions by name like:
main.js
require('./src/lib')
funcOne();
funcThree('Alex');
console.log(name);
console.log(myobject);
console.log(myobject.funcFour());
Will make this output
bash-3.2$ node -v
v7.2.1
bash-3.2$ node main.js
mlt funcOne here
Alex calls funcThree here
Mulatinho
{ title: 'Node.JS is cool', funcFour: [Function: funcFour] }
internal funcFour() called here
undefined
Pay atention to the undefined when you call my object.funcFour(), it will be the same if you load with eval(). Hope it helps :)
You can put your functions in global variables, but it's better practice to just turn your tools script into a module. It's really not too hard – just attach your public API to the exports object. Take a look at Understanding Node.js' exports module for some more detail.
I just want to add, in case you need just certain functions imported from your tools.js, then you can use a destructuring assignment which is supported in node.js since version 6.4 - see node.green.
Example:
(both files are in the same folder)
tools.js
module.exports = {
sum: function(a,b) {
return a + b;
},
isEven: function(a) {
return a % 2 == 0;
}
};
main.js
const { isEven } = require('./tools.js');
console.log(isEven(10));
output: true
This also avoids that you assign those functions as properties of another object as its the case in the following (common) assignment:
const tools = require('./tools.js');
where you need to call tools.isEven(10).
NOTE:
Don't forget to prefix your file name with the correct path - even if both files are in the same folder, you need to prefix with ./
From Node.js docs:
Without a leading '/', './', or '../' to indicate a file, the module
must either be a core module or is loaded from a node_modules folder.
Include file and run it in given (non-global) context
fileToInclude.js
define({
"data": "XYZ"
});
main.js
var fs = require("fs");
var vm = require("vm");
function include(path, context) {
var code = fs.readFileSync(path, 'utf-8');
vm.runInContext(code, vm.createContext(context));
}
// Include file
var customContext = {
"define": function (data) {
console.log(data);
}
};
include('./fileToInclude.js', customContext);
Using the ESM module system:
a.js:
export default function foo() {};
export function bar() {};
b.js:
import foo, {bar} from './a.js';
This is the best way i have created so far.
var fs = require('fs'),
includedFiles_ = {};
global.include = function (fileName) {
var sys = require('sys');
sys.puts('Loading file: ' + fileName);
var ev = require(fileName);
for (var prop in ev) {
global[prop] = ev[prop];
}
includedFiles_[fileName] = true;
};
global.includeOnce = function (fileName) {
if (!includedFiles_[fileName]) {
include(fileName);
}
};
global.includeFolderOnce = function (folder) {
var file, fileName,
sys = require('sys'),
files = fs.readdirSync(folder);
var getFileName = function(str) {
var splited = str.split('.');
splited.pop();
return splited.join('.');
},
getExtension = function(str) {
var splited = str.split('.');
return splited[splited.length - 1];
};
for (var i = 0; i < files.length; i++) {
file = files[i];
if (getExtension(file) === 'js') {
fileName = getFileName(file);
try {
includeOnce(folder + '/' + file);
} catch (err) {
// if (ext.vars) {
// console.log(ext.vars.dump(err));
// } else {
sys.puts(err);
// }
}
}
}
};
includeFolderOnce('./extensions');
includeOnce('./bin/Lara.js');
var lara = new Lara();
You still need to inform what you want to export
includeOnce('./bin/WebServer.js');
function Lara() {
this.webServer = new WebServer();
this.webServer.start();
}
Lara.prototype.webServer = null;
module.exports.Lara = Lara;
You can simple just require('./filename').
Eg.
// file: index.js
var express = require('express');
var app = express();
var child = require('./child');
app.use('/child', child);
app.get('/', function (req, res) {
res.send('parent');
});
app.listen(process.env.PORT, function () {
console.log('Example app listening on port '+process.env.PORT+'!');
});
// file: child.js
var express = require('express'),
child = express.Router();
console.log('child');
child.get('/child', function(req, res){
res.send('Child2');
});
child.get('/', function(req, res){
res.send('Child');
});
module.exports = child;
Please note that:
you can't listen PORT on the child file, only parent express module has PORT listener
Child is using 'Router', not parent Express moudle.
Node works based on commonjs modules and more recently, esm modules. Basically, you should create modules in separated .js files and make use of imports/exports (module.exports and require).
Javascript on the browser works differently, based on scope. There is the global scope, and through clojures (functions inside other functions) you have private scopes.
So,in node, export functions and objects that you will consume in other modules.
The cleanest way IMO is the following, In tools.js:
function A(){
.
.
.
}
function B(){
.
.
.
}
module.exports = {
A,
B
}
Then, in app.js, just require the tools.js as following: const tools = require("tools");
I was as well searching for an option to include code without writing modules, resp. use the same tested standalone sources from a different project for a Node.js service - and jmparattes answer did it for me.
The benefit is, you don't pollute the namespace, I don't have trouble with "use strict"; and it works well.
Here a full sample:
Script to load - /lib/foo.js
"use strict";
(function(){
var Foo = function(e){
this.foo = e;
}
Foo.prototype.x = 1;
return Foo;
}())
SampleModule - index.js
"use strict";
const fs = require('fs');
const path = require('path');
var SampleModule = module.exports = {
instAFoo: function(){
var Foo = eval.apply(
this, [fs.readFileSync(path.join(__dirname, '/lib/foo.js')).toString()]
);
var instance = new Foo('bar');
console.log(instance.foo); // 'bar'
console.log(instance.x); // '1'
}
}
Hope this was helpfull somehow.
Like you are having a file abc.txt and many more?
Create 2 files: fileread.js and fetchingfile.js, then in fileread.js write this code:
function fileread(filename) {
var contents= fs.readFileSync(filename);
return contents;
}
var fs = require("fs"); // file system
//var data = fileread("abc.txt");
module.exports.fileread = fileread;
//data.say();
//console.log(data.toString());
}
In fetchingfile.js write this code:
function myerror(){
console.log("Hey need some help");
console.log("type file=abc.txt");
}
var ags = require("minimist")(process.argv.slice(2), { string: "file" });
if(ags.help || !ags.file) {
myerror();
process.exit(1);
}
var hello = require("./fileread.js");
var data = hello.fileread(ags.file); // importing module here
console.log(data.toString());
Now, in a terminal:
$ node fetchingfile.js --file=abc.txt
You are passing the file name as an argument, moreover include all files in readfile.js instead of passing it.
Thanks
Another method when using node.js and express.js framework
var f1 = function(){
console.log("f1");
}
var f2 = function(){
console.log("f2");
}
module.exports = {
f1 : f1,
f2 : f2
}
store this in a js file named s and in the folder statics
Now to use the function
var s = require('../statics/s');
s.f1();
s.f2();
To turn "tools" into a module, I don't see hard at all. Despite all the other answers I would still recommend use of module.exports:
//util.js
module.exports = {
myFunction: function () {
// your logic in here
let message = "I am message from myFunction";
return message;
}
}
Now we need to assign this exports to global scope (in your app|index|server.js )
var util = require('./util');
Now you can refer and call function as:
//util.myFunction();
console.log(util.myFunction()); // prints in console :I am message from myFunction
To interactively test the module ./test.js in a Unix environment, something like this could be used:
>> node -e "eval(''+require('fs').readFileSync('./test.js'))" -i
...
Use:
var mymodule = require("./tools.js")
app.js:
module.exports.<your function> = function () {
<what should the function do>
}

node.js module export how to use data amoung all modules?

I would like to use updated (and only then) globals among all node modules. How to do that? Questions are in code.
app.js
var data = 'data';
var global = require('glob.js')(data);
// here we are require your globals variables and we corectly 'set them'
console.log(globals.glob1);
// we can use them here
glob.js
module.exports = function(data)
{
var globs = {
glob1 : data.toLowerCase(),
glob2 : data.toUpperCase()
}
return globs;
}
mod.js
var global = require('glob.js'); // I require globals but they are not set...
function funct(someOtherData, someMoreData)
{
var test = global.glob1;
console.log(test);
// why I can't use globals here ? How can I use corectly set globals (globals need to be updated first - app.js, then ALL other modules should be able to use correctly set globals)?
}
module.export = funct;
For the answer scroll down to the TLDR section below but do read on to understand why.
Part1 - the difference between a function and a function call
Your first mistake is that you are exporting a function, not an object:
module.exports = function(data) // <---- this is a function
{
var globs = {
glob1 : data.toLowerCase(),
glob2 : data.toUpperCase()
}
return globs;
}
and in app.js you do this:
console.log(globs.glob1); <--- globs is a function, not an object
when you should be doing this:
console.log(globs().glob1);
Why is this? OK, lets forget for a moment your module. Consider the following code:
var a = function(){ return 2 };
console.log(a); // do you expect this to print a function or 2?
console.log(a()); // what do you expect this to print?
This is a very basic rule about functions in all programming languages, not just javascript: to get the return value you need to call the function. So in your code:
function myExportedFunction (data) {
// some logic here...
return globs;
}
console.log(myExportedFunction); // prints a function
console.log(myExportedFunction()); // prints the globs object
console.log(myExportedFunction().glob1); // prints value of glob1
So it's simple really. There is no magic syntax going on. You've just forgotten to return the glob object and are using the function pointer instead. Obviously the function has no glob1 property so it's correct for it to be undefined.
Part2 - function local variables
OK. So let's say you made the changes I recommended above. There's an obvious problem with the way the function was written. What happens when you do this:
var glob = require('glob.js')();
console.log(glob.glob1); // <--- prints "undefined"
So the first problem is you're not checking if you're passing data or nothing. So every time you call the function you will overwrite the stored value.
There's another problem, you are always returning a different object every time you call the function. Let's look at how local variables work when returned:
function a () {
var data = {}
return data;
}
var x = a();
var y = a();
x.testing = 1;
y.testing = 2;
console.log(x.testing); // prints 1
console.log(y.testing); // prints 2
So, every time you call a function that creates a local variable you are returning a different object. Actually what's doing this is not really the variable but the object literal syntax:
var a = {};
// is basically the same as
var a = new Object();
If we change the above example to:
function a () {
return {};
}
it would still behave the same.
TLDR
So, how do we fix it? Simple, create the object outside of the function and check if we pass data to initialize:
var globs = {
glob1 : "",
glob2 : ""
}
module.exports = function(data)
{
globs.glob1 = data.toLowerCase();
globs.glob2 = data.toUpperCase();
return globs;
}
Now everything should work:
In app.js
var global = require('glob.js')(data);
In mod.js
var global = require('glob.js')();
Epologue - modules are singletons
It may or may not be obvious to you why the above should work. In case you already know why I'm writing this as reference to future readers.
In node.js modules are implemented as proper singletons. Therefore in node if you want a singleton all you need to do is write a module, you don't need to implement any special code for it.
What this means is that all module globals (module scoped variables) are shared amongst all requires. Here's a very simple module to share one variable amongst all modules:
shared.js
var x = "";
module.exports = {
set: function (val) {x=val},
get: function () {return x}
}
a.js
var shared = require('./shared');
shared.set("hello world");
b.js
var shared = require('./shared');
console.log(shared.get()); // prints "hello world"
We're using this feature to declare a shared glob variable in the code above.
You can use the global. variable identifier to set global variables in NodeJS, instead of var, example:
app.js
var data = 'data';
var glob = require('./glob.js');
glob(data);
// here we are require your globals variables and we corectly 'set them'
console.log(global.gl.glob1);
var mod = require('./mod.js');
mod();
// we can use them here
glob.js
module.exports = function(data)
{
console.log("setting globals");
global.gl = {
glob1 : '1' + data,
glob2 : '2' + data
}
// return global.gl; // can be removed
}
mod.js
function funct(someOtherData, someMoreData)
{
var test = global.gl.glob1;
console.log(test);
test = global.gl.glob2;
console.log(test);
// why I can't use globals here ? How can I use corectly set globals (globals need to be updated first - app.js, then ALL other modules should be able to use correctly set globals)?
}
module.exports = funct;
As you can see in glob.js, i switched to var globs = to global.gl = and then in mod.js used it as global.gl.
Running app.js outputs:
setting globals
1data // From app.js
1data // From mod.js imported in app.js
2data // From mod.js imported in app.js
There are 2 options:
Use nodejs global variable (not recommended)
Create shared module
You chose 2nd option, but did it a bit wrong way by exporting a function. When you import the package and call the function it always creates new globs object and fulfill it with your data. Instead you can export an object. Simple example
glob.js
Global object is defined here
module.exports = {
glob1: '1',
glob2: '2'
};
mod.js
You can change global object here, like
var globs = require('./glob');
module.exports.updateGlob1 = function(data) {
globs.glob1 = data;
};
app.js
Here if you access global variable you can see it updated
var globs = require('./glob');
var mod = require('./mod');
mod.updateGlob1('1 plus 2');
console.log(globs.glob1); // Output: '1 plus 2'
There can be more complex examples, as for module design pattern often IIFE is used.
UPDATE
Another example using IIFE.
glob.js
module.exports = (function() {
var glob1 = 'initial value';
return {
// Getter method
getGlob1() {
return glob1;
},
// Setter method
setGlob1(value) {
glob1 = value;
}
}
})();
mod.js
var shared = require('./shared');
module.exports.testFn = function() {
// Access global variable with getter method
console.log('In mod.js', shared.getGlob1());
};
app.js
var shared = require('./shared');
var mod = require('./mod');
// Print initial value
console.log('Initial', shared.getGlob1());
// Set new value to global variable
shared.setGlob1('new value');
// Print updated value
console.log('In app.js', shared.getGlob1());
// Use global variable in mod.js file
mod.testFn();

NodeJS requiring modules, module.exports.X vs module.exports={x}

I'm having trouble understanding the difference between exporting modules like:
module.exports.getUserIP = function getUserIP(req) {
var ip = req.headers['x-forwarded-for'];
return ip;
}
Or just declaring it:
function getUserIP(req) {
// retrieve user IP from req object
// Build this function to be more accurate/use more sources.
var ip = req.headers['x-forwarded-for'];
return ip;
}
and exporting at the bottom:
module.exports = { getUserIP }
or even:
module.exports = {getUserIP:getUserIP}
or
module.exports = {'getUserIP':getUserIP}
My problem is: when i call the function getUserIP from another file:
var mainbody = require('./app.js');//getUserIP is in here.
const gl = require('geoip-lite');
var ax = require('axios');
module.exports.getloc = function getloc(req, ip, property) {
//return location from IP.
if (req) {
var ipGuest = mainbody.getUserIP(req); //HERE
} else {
var ipGuest = ip;
}....
I get an error message:
Error Message
However, when I use the FIRST method to export the function:
module.exports.getUserIP = function getUserIP(req) {
var ip = req.headers['x-forwarded-for'];
return ip;
}
Then it works perfectly.
What's the difference?
Better way is to use
module.exports = { getUserIP: getUserIP }
This way you can just look at the export statement at the end of your file and know which functions are being exported from a particular file
The module.exports = {getUserIP}; is nothing but a shorthand of the above syntax(ES6 Magic). What it typically does is allows you to write this way { getUserIP } if the key name to be same as function/variable name like { getUserIP: getUserIP } where getUserIP can be a variable or a function or a ES6 class.
All the examples you show will work properly, but they do have some different affects.
By default module.exports is already initialized to an empty object. So, when you do something like this:
module.exports.getUserIP = function() {...}
You are assigning a new property to the existing object that module.exports already pointed to. One advantage of this scheme is that you can easily add more properties the same way.
module.exports.getUserRegion = function() {}
This will add one more property to that same object without disturbing the first one you already added.
On the other hand, all of these are identical:
module.exports = {getUserIP: getUserIP}
module.exports = {'getUserIP':getUserIP}
module.exports = { getUserIP } // ES6 shorthand for the previous syntax
and, they all end up with the same result as each other, but they all replace module.exports with a new object that has your one new property in it.
If you then tried to add another property:
module.exports = {getUserRegion};
That would again assign a whole new object to module.exports and you would have just wiped out the object that previously had getUserIP on it. When assigning a new object, you would typically assign an object that had all your properties on it:
module.exports = {getUserIP: function() {...}, getUserRegion: function() {...}};
Thus, not wiping out something you had already put there.
All of your schemes should work fine as long as you aren't overwriting module.exports with a new object and thus overwriting the object that already had some of your methods on it.
To understand this exporting modules concept, just think module.export is a simple object. you can bind anything to that object as do with normal javascript objects.
Finally when you require that module by require('path to js') you will get that exported object. If you export number of items in your module you can return them back by giving the names of the tag.

Node.JS - fs.exists not working?

I'm a beginner in Node.js, and was having trouble with this piece of code.
var fs = require('fs');
Framework.Router = function() {
this.run = function(req, res) {
fs.exists(global.info.controller_file, function(exists) {
if (exists) {
// Here's the problem
res.writeHead(200, {'Content-Type':'text/html'});
var cname = App.ucfirst(global.info.controller)+'Controller';
var c = require(global.info.controller_file);
var c = new App[cname]();
var action = global.info.action;
c[action].apply(global.info.action, global.info.params);
res.end();
} else {
App.notFound();
return false;
}
});
}
};
The problem lies in the part after checking if the 'global.info.controller_file' exists, I can't seem to get the code to work properly inside the: if (exists) { ... NOT WORKING }
I tried logging out the values for all the variables in that section, and they have their expected values, however the line: c[action].apply(global.info.action, global.info.params);
is not running as expected. It is supposed to call a function in the controller_file and is supposed to do a simple res.write('hello world');. I wasn't having this problem before I started checking for the file using fs.exists. Everything inside the if statement, worked perfectly fine before this check.
Why is the code not running as expected? Why does the request just time out?
Does it have something to do with the whole synchronous vs asynchronous thing? (Sorry, I'm a complete beginner)
Thank you
Like others have commented, I would suggest you rewrite your code to bring it more in-line with the Node.js design patterns, then see if your problem still exists. In the meantime, here's something which may help:
The advice about not using require dynamically at "run time" should be heeded, and calling fs.exists() on every request is tremendously wasteful. However, say you want to load all *.js files in a directory (perhaps a "controllers" directory). This is best accomplished using an index.js file.
For example, save the following as app/controllers/index.js
var fs = require('fs');
var files = fs.readdirSync(__dirname);
var dotJs = /\.js$/;
for (var i in files) {
if (files[i] !== 'index.js' && dotJs.test(files[i]))
exports[files[i].replace(dotJs, '')] = require('./' + files[i]);
}
Then, at the start of app/router.js, add:
var controllers = require('./controllers');
Now you can access the app/controllers/test.js module by using controllers.test. So, instead of:
fs.exists(controllerFile, function (exists) {
if (exists) {
...
}
});
simply:
if (controllers[controllerName]) {
...
}
This way you can retain the dynamic functionality you desire without unnecessary disk IO.

Can I load multiple files with one require statement?

maybe this question is a little silly, but is it possible to load multiple .js files with one require statement? like this:
var mylib = require('./lib/mylibfiles');
and use:
mylib.foo(); //return "hello from one"
mylib.bar(): //return "hello from two"
And in the folder mylibfiles will have two files:
One.js
exports.foo= function(){return "hello from one";}
Two.js
exports.bar= function(){return "hello from two";}
I was thinking to put a package.json in the folder that say to load all the files, but I don't know how. Other aproach that I was thinking is to have a index.js that exports everything again but I will be duplicating work.
Thanks!!
P.D: I'm working with nodejs v0.611 on a windows 7 machine
First of all using require does not duplicate anything. It loads the module and it caches it, so calling require again will get it from memory (thus you can modify module at fly without interacting with its source code - this is sometimes desirable, for example when you want to store db connection inside module).
Also package.json does not load anything and does not interact with your app at all. It is only used for npm.
Now you cannot require multiple modules at once. For example what will happen if both One.js and Two.js have defined function with the same name?? There are more problems.
But what you can do, is to write additional file, say modules.js with the following content
module.exports = {
one : require('./one.js'),
two : require('./two.js'),
/* some other modules you want */
}
and then you can simply use
var modules = require('./modules.js');
modules.one.foo();
modules.two.bar();
I have a snippet of code that requires more than one module, but it doesn't clump them together as your post suggests. However, that can be overcome with a trick that I found.
function requireMany () {
return Array.prototype.slice.call(arguments).map(function (value) {
try {
return require(value)
}
catch (event) {
return console.log(event)
}
})
}
And you use it as such
requireMany("fs", "socket.io", "path")
Which will return
[ fs {}, socketio {}, path {} ]
If a module is not found, an error will be sent to the console. It won't break the programme. The error will be shown in the array as undefined. The array will not be shorter because one of the modules failed to load.
Then you can bind those each of those array elements to a variable name, like so:
var [fs, socketio, path] = requireMany("fs", "socket.io", "path")
It essentially works like an object, but assigns the keys and their values to the global namespace. So, in your case, you could do:
var [foo, bar] = requireMany("./foo.js", "./bar.js")
foo() //return "hello from one"
bar() //return "hello from two"
And if you do want it to break the programme on error, just use this modified version, which is smaller
function requireMany () {
return Array.prototype.slice.call(arguments).map(require)
}
Yes, you may require a folder as a module, according to the node docs. Let's say you want to require() a folder called ./mypack/.
Inside ./mypack/, create a package.json file with the name of the folder and a main javascript file with the same name, inside a ./lib/ directory.
{
"name" : "mypack",
"main" : "./lib/mypack.js"
}
Now you can use require('./mypack') and node will load ./mypack/lib/mypack.js.
However if you do not include this package.json file, it may still work. Without the file, node will attempt to load ./mypack/index.js, or if that's not there, ./mypack/index.node.
My understanding is that this could be beneficial if you have split your program into many javascript files but do not want to concatenate them for deployment.
You can use destructuring assignment to map an array of exported modules from require statements in one line:
const requires = (...modules) => modules.map(module => require(module));
const [fs, path] = requires('fs', 'path');
I was doing something similar to what #freakish suggests in his answer with a project where I've a list of test scripts that are pulled into a Puppeteer + Jest testing setup. My test files follow the naming convention testname1.js - testnameN.js and I was able use a generator function to require N number of files from the particular directory with the approach below:
const fs = require('fs');
const path = require('path');
module.exports = class FilesInDirectory {
constructor(directory) {
this.fid = fs.readdirSync(path.resolve(directory));
this.requiredFiles = (this.fid.map((fileId) => {
let resolvedPath = path.resolve(directory, fileId);
return require(resolvedPath);
})).filter(file => !!file);
}
printRetrievedFiles() {
console.log(this.requiredFiles);
}
nextFileGenerator() {
const parent = this;
const fidLength = parent.requiredFiles.length;
function* iterate(index) {
while (index < fidLength) {
yield parent.requiredFiles[index++];
}
}
return iterate(0);
}
}
Then use like so:
//Use in test
const FilesInDirectory = require('./utilities/getfilesindirectory');
const StepsCollection = new FilesInDirectory('./test-steps');
const StepsGenerator = StepsCollection.nextFileGenerator();
//Assuming we're in an async function
await StepsGenerator.next().value.FUNCTION_REQUIRED_FROM_FILE(someArg);

Resources