I don't have experience at all with nodejs and I am learning. I have a code, which I am testing and giving me the following errors, and I don't understand why:
1) Part 3 Assessment Tests "before all" hook:
ReferenceError: wagner is not defined
at Object. (fx.js:2:31)
at Object. (dependencies.js:3:10)
at Context. (test.js:29:20)
2) Part 3 Assessment Tests "after all" hook:
TypeError: Cannot read property 'close' of undefined
at Context. (test.js:65:11)
These are the programs involved in the issue:
test.js --------------
var assert = require('assert');
var express = require('express');
var fs = require('fs');
var status = require('http-status');
var superagent = require('superagent');
var wagner = require('wagner-core');
var URL_ROOT = 'http://localhost:3000';
var PRODUCT_ID = '000000000000000000000001';
describe('Part 3 Assessment Tests', function() {
var server;
var app;
var succeeded = 0;
var finalCharge;
var Category;
var Config;
var fx;
var Product;
var Stripe;
var User;
before(function() {
app = express();
// Bootstrap server
models = require('./models')(wagner);
dependencies = require('./dependencies')(wagner);
// Make models available in tests
var deps = wagner.invoke(function(Category, fx, Product, Stripe, User, Config) {
return {
Category: Category,
fx: fx,
Product: Product,
Stripe: Stripe,
User: User,
Config: Config
};
});
...
dependencies.js -------------
var wagner = require('wagner-core');
var fs = require('fs');
var fx = require('./fx')(wagner);
var Stripe = require('stripe');
module.exports = function(wagner) {
var stripe =
// TODO: Make Stripe depend on the Config service and use its `stripeKey`
// property to get the Stripe API key.
wagner.factory('Stripe', function() {
return Stripe(Config.stripeKey);
});
wagner.factory('fx', fx);
wagner.factory('Config', function() {
return JSON.parse(fs.readFileSync('./config.json').toString());
});
var Config = wagner.invoke(function(Config) {
return Config;
});
};
fx.js --------------------------
var superagent = require('superagent');
var _ = require('underscore')(wagner);
module.exports = function(Config) {
...
};
I think I shouldn't have any problem because wagner is defined on test.js and passed as parameter to dependencies.js, and it is passing it on to fx.js.
1- Could you tell me what is wrong in the code ?
2- The second error, I have not cue why is it happening.
Please, help
In your dependencies.js, in the fx definition, you dont need to pass wagner as parameter, because that variable doesnt exist there:
var fx = require('./fx');
As i see, you already defined the Config factory, and you dont need to assign it to a variable for use it, because wagner will autoinject your "Config" factory, allowing you access to it:
wagner.factory('Stripe', function(Config) {
return Stripe(Config.stripeKey);
});
...
wagner.factory('Config', function() {
return JSON.parse(fs.readFileSync('./config.json').toString());
});
Then in your fx.js, when you wanna use the Config parameter you just pass it:
module.exports = function(Config) {
var url = 'http://openexchangerates.org/api/latest.json?app_id=' +
Config.openExchangeRatesKey;
...
}
Exactly the same you should do if you wanna use the "Config" factory in any other file:
[auth.js]
function setupAuth(User, Config, app) {
...
}
You're trying to use wagner in fx.js before you execute the exported function and outside of the scope that wagner is passed to. Also, Config seems a bit misleading if you're passing wagner to that function instead.
If you can defer loading underscore, you could do something like:
var superagent = require('superagent');
var _;
module.exports = function(Config) {
if (!_)
_ = require('underscore')(Config);
// ...
};
Related
I am using nodejs with express and mysql. when i submit a form i found an error "uniqid is not a function".
var express = require('express');
var router = express.Router();
var uniqid = require('uniqid');
var db = require('../db');
router.post('/create',function(req,res,next){
var full_name = req.body.fullname;
var email = req.body.email;
var password= req.body.password;
var mobile_no = req.body.mobile_no;
var uniqid = uniqid();
var insert_query = `INSERT INTO users (user_uid,full_name,email,mobile_no,password) VALUES ("${uniqid}","${full_name}","${email}","${mobile_no}","${password}") `;
db.query(insert_query,function(err,result){
if(err) throw err;
res.redirect('/signup');
});
});
module.exports = router;
You're overriding the uniqid module with the value from uniqid()
var uniqid = require('uniqid');
...
var uniqid = uniqid();
While it might work on the first execution1, subsequent calls will fail with that error "uniqid is not a function"
Change the name and use es6 let/const instead of var
const uniqid = require('uniqid');
...
const newId = uniqid();
const insert_query = `INSERT INTO ... VALUES ("${newId}"...) `;
1 It doesn't, see Klaycon comment below
Let's take this code for example :
var foo = () => "test"
function test() {
var foo = foo()
}
test()
As you can see, it has the same error you have. This is because you are naming your local variable like your global variable.
What happens is what is called Hoisting. When the function gets executed, it declares foo as a variable local to the current function execution context before setting it any value.
Then the engine tries to set its value with foo(), but at that time foo is a local variable, not the global one, and its value is undefined.
And obviously, undefined is not a function, hence the error, "foo is not a function".
tl;dr: be careful when naming your variables (and use let/const).
I have node.js application, which was built on express.js framework.
const app = express();
require('./config')(app);
require('./services')(app);
./config/config.js we instantiate config:
module.exports = function (app) {
const conf = {APIKey: 1234567890, url: '<someurl>'};
app.set('config', conf);
};
./services/APIService.js we create service instance(singleton)
module.exports = (app) => {
app.set('apiService', new APIService(app));
};
function APIService(app) {
const config = app.get('config');
this.key = config.APIKey;
};
APIService.prototype.sendRequest = () => {
const config = app.get('config');
this._send(config.url, 'some text');
};
Or, service2
module.exports = function(app) {
const config = app.get('config');
const myMod = require('myMod')(config.APIKey);
}
Cool, all works correct. But sometime administrator will change some config data. So, we create new config, set him to
newConf = {APIKey: 1234000000, url: '<some_new_url>'};
app.set('config', newConf);
APIService.sendRequest, will send request to CHANGED url, but APIService.key still unchanged. And myMod already instantiated with old config data.
We need write some setter methods, like this
//for APIService
APIService.prototype.setConfig = () => {
const config = app.get('config');
this.key = config.APIKey;
};
//for service 2
/* change const myMod to let myMod and create method for overriding */
or bang! kill and restart node.js server process. Bad idea. Maybe exist some method for this goal, something like app.restart() for safely reinitializing application(or, maybe, his parts)?
Did you try to call app.set('apiService', new APIService(app)); again ? or just have getter and setter on the prototype for your params.
Better way should be to have a new APIService object at each new request with a middleware, somehting like :
app.use(function (req, res, next){
req.api = new APIService(app);
next();
});
And use req.api.
I'm writing a Loopback script that is supposed to be called by cron.
In order to obtain the app object, I do
var app = require('./server/server');
# Script logic
console.log('done');
However, the script does not exit once it finishes execution. How do I make it exit?
Reference: http://docs.strongloop.com/display/public/LB/Working+with+LoopBack+objects
Finally found out the cause of this issue.
The problem is due to database connection (in my case, mongodb via loopback-connector-mongodb) is still connected.
To disconnect database connection, and subsequently exiting the console script
var app = require('./server/server');
app.dataSources.DATASOURCENAME.disconnect();
In some places I've read that the issue is the http server preventing the script from shutting down.
I ended up with a module which does not even start an http server, I named it loopback-init.js and I usually import it from migrations and scripts (the important part is the custom callback passed to boot()):
'use strict';
const Promise = require('bluebird');
const loopback = require('loopback');
const boot = require('loopback-boot');
const logger = require('logger');
const app = loopback();
boot(app, __dirname + '/../server', err => {
if (err) throw err;
logger.debug('Loopback initialized.');
app.start = function() {
app.close = function(cb) {
app.removeAllListeners('started');
app.removeAllListeners('loaded');
if (cb) cb();
};
};
});
const autoMigrate = Promise.promisify(
app.dataSources.db.automigrate,
{context: app.dataSources.db}
);
app.autoMigrate = autoMigrate;
module.exports = app;
and my db-migrate scripts look like this:
'use strict';
var dbm;
var type;
var seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function(db) {
const lb = require('loopback-init');
return lb.autoMigrate('Item')
.then(lb.close, lb.close);
};
exports.down = function(db) {
return db.dropTable('item');
};
exports._meta = {
"version": 1
};
I'm looking for the easiest & performant way to make a multitenant express.js app for managing projects.
Reading several blogs and articles, I figured out that, for my application, would be nice to have a database per tenant architecture.
My first try has been to use subdomains to detect the tenant, and then map the subdomain to a mongodb database.
I came up with this express middlewares
var mongoose = require('mongoose');
var debug = require('debug')('app:middleware:mongooseInstance');
var conns [];
function mongooseInstance (req, res, next) {
var sub = req.sub = req.subdomains[0] || 'app';
// if the connection is cached on the array, reuse it
if (conns[sub]) {
debug('reusing connection', sub, '...');
req.db = conns[sub];
} else {
debug('creating new connection to', sub, '...');
conns[sub] = mongoose.createConnection('mongodb://localhost:27017/' + sub);
req.db = conns[sub];
}
next();
}
module.exports = mongooseInstance;
Then I register the models inside another middleware:
var fs = require('fs');
var debug = require('debug')('app:middleware:registerModels');
module.exports = registerModels;
var models = [];
var path = __dirname + '/../schemas';
function registerModels (req, res, next) {
if(models[req.sub]) {
debug('reusing models');
req.m = models[req.sub];
} else {
var instanceModels = [];
var schemas = fs.readdirSync(path);
debug('registering models');
schemas.forEach(function(schema) {
var model = schema.split('.').shift();
instanceModels[model] = req.db.model(model, require([path, schema].join('/')));
});
models[req.sub] = instanceModels;
req.m = models[req.sub];
}
next();
}
Then I can proceed normally as any other express.js app:
var express = require('express');
var app = express();
var mongooseInstance = require('./lib/middleware/mongooseInstance');
var registerModels = require('./lib/middleware/registerModels');
app.use(mongooseInstance);
app.use(registerModels);
app.get('/', function(req, res, next) {
req.m.Project.find({},function(err, pets) {
if(err) {
next(err);
}
res.json({ count: pets.length, data: pets });
});
});
app.get('/create', function (req, res) {
var p = new req.m.Project({ name: 'Collin', description: 'Sad' });
p.save(function(err, pet) {
res.json(pet);
});
});
app.listen(8000);
The app is working fine, I don't have more than this right now, and I'd like to get some feedback before I go on, so my questions would be:
Is this approach is efficient? Take into account that a lot will be happening here, multiple tenants, several users each, I plan to setup webhooks in order to trigger actions on each instance, emails, etc...
Are there any bottlenecks/pitfalls I'm missing? I'm trying to make this scalable from the start.
What about the model registering? I didn't found any other way to accomplish this.
Thanks!
Is this approach is efficient?
Are there any bottlenecks/pitfalls I'm missing?
This all seems generally correct to me
What about the model registering?
I agree with #narc88 that you don't need to register models in middleware.
For lack of a better term, I would use a factory pattern. This "factory function" would take in your sub-domain, or however you decide to detect tenants, and return a Models object. If a given middleware wants to use its available Models you just do
var Models = require(/* path to your Model factory */);
...
// later on inside a route, or wherever
var models = Models(req.sub/* or req.tenant ?? */);
models.Project.find(...);
For an example "factory", excuse the copy/paste
var mongoose = require('mongoose');
var fs = require('fs');
var debug = require('debug')('app:middleware:registerModels');
var models = [];
var conns = [];
var path = __dirname + '/../schemas';
function factory(tenant) {
// if the connection is cached on the array, reuse it
if (conns[tenant]) {
debug('reusing connection', tenant, '...');
} else {
debug('creating new connection to', tenant, '...');
conns[tenant] = mongoose.createConnection('mongodb://localhost:27017/' + tenant);
}
if(models[tenant]) {
debug('reusing models');
} else {
var instanceModels = [];
var schemas = fs.readdirSync(path);
debug('registering models');
schemas.forEach(function(schema) {
var model = schema.split('.').shift();
instanceModels[model] = conns[tenant].model(model, require([path, schema].join('/')));
});
models[tenant] = instanceModels;
}
return models[tenant];
}
module.exports = factory;
Aside from potential (albeit probably small) performance gain, I think it also has the advantage of:
doesn't clutter up the request object as much
you don't have to worry as much about middleware ordering
allows more easily abstracting permissions for a given set of models, i.e. the models aren't sitting on the request for all middleware to see
This approach doesn't tie your models to http requests, so you might have flexibility to use the same factory in a job queue, or whatever.
I'm testing my NodeJs project using Mocha and I have a file, index.js that is the main file without module.exports that is run like a CLI
index.js
// CLI tools
var bluebird = require('bluebird');
var gigatool = require('./lib/gigatool');
var debug = require('debug')('index');
var size = 20;
var page = process.env.BATCH;
var startDate = process.env.START;
var dataDir = process.env.DATADIR;
debug(page, startDate, dataDir);
// requires parameters
if (!process.env.BATCH) {
throw new Error('BATCH environment variable is needed');
}
tool = gigatool(size, page, dataDir);
bluebird.all([tool.clean(startDate), tool.continuous()])
.finally(function(){
process.exit(0);
});
test.js
'use strict';
var chai = require('chai');
var fs = require('fs');
var noop = require('lodash.noop');
var rimraf = require('rimraf');
var async = require('async');
var rimraf = require('rimraf');
var expect = chai.expect;
describe.only('Integration', function() {
var dataDir = './countries';
var path = dataDir + '/Albania';
describe('clean run', function() {
this.timeout(10000);
before(function() {
process.env.BATCH = 1;
process.env.DEBUG = '*';
require('../../index');
});
after(function(done) {
// rimraf(dataDir, done);
});
});
});
if I run require('./index'), it will run the module and then continue to move forward, how can i wait for it to end before i run test cases?
Note: It is calling some apis
You need to test your whole application at once, this is still testing but hardly "unit" testing unless your code is a unit ("the unix way"). For this reason your code should start with:
var Promise= require("bluebird");
var exec= Promise.promisify(require("child_process").exec);
var run = function(args){
return exec("node", ["../../index.js"].concat(args)).get("stdout");
};
Which would make your tests test the actual inputs on the file:
describe('your code', function() {
it('should work with params a,b', function(){
return run(['a','b']).then(function(out){ // note the Mocha promise syntax
assert.equal(out, 'your expected stdout');
});
});
});
Unfortunately, there is no way to unit test individual aspects of a CLI Node script as you have it. Instead, what I've done in the past is have conditional execution based on whether the script was used via require or called from the command line:
// index.js
var foo = require('foo');
var bar = require('bar');
// ...
// determine if this script is being required as a module or is CLI
var IS_EXECUTING = (require.main === module);
var methods = {
init: function(args) {
methods.auditArgs(args);
methods.doSomeStuff(arg1, arg2);
methods.doOtherStuff();
},
auditArgs: function(args) {/* ... */},
doSomeStuff: function(arg1, arg2) {/* ... */},
// ...
};
// At the bottom we either begin execution or return a function which can
// be called in a test harness when ready...
if (IS_EXECUTING) {
methods.init(process.argv);
} else {
module.exports = function (mockMethods) {
// you could have some code here to mock out provided methods
// for example:
methods.auditArgs = mockMethods.auditArgs || methods.auditArgs;
// then return the "API" for this script...
return methods;
};
}
In your test harness then you would simply require the file and when ready, use it like you would any other module. But when called from the command line the code will just execute normally:
// in test.js
'use strict';
var chai = require('chai');
// ...
var appFactory = require('index');
var expect = chai.expect;
describe('initialization', function() {
var app;
beforeEach(function() {
app = appFactory({
auditArgs = chai.spy(function() { });
// other mock method implementations, spies, etc
});
});
it('should call necessary methods on init', function() {
expect(app.auditArgs).to.have.been.called(1);
// ...
});
});