ObjectionJS - Group models in a data layer file - node.js

I have a NodeJS app running fastify with fastify-objectionjs.
For tidiness, I'd like to group all models in a single file called _main.js, where I export an array of the models inside the models folder.
Since the fastify-objectionjs registration requires an array of models, I thought I could just import the array from my _main.js and feed it as it is to the registration function.
But ObjectionJS is telling me that The supplied models are invalid.
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const dataLayer = require('./models/_main')
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: dataLayer
})
// Also tried:
// fastify.register(require('fastify-objectionjs'), {
// knexConfig: knexConfig,
// models: [dataLayer]
// })
/models/_main.js
const User = require('./user.model')
var dataLayer = [User]
module.exports = dataLayer
// Also tried without var:
// module.exports = {
// dataLayer: [
// User
// ]
// }
/models/user.model.js
const Knex = require('knex')
const connection = require('../knexfile')
const { Model } = require('objection')
const knexConnection = Knex(connection)
Model.knex(knexConnection)
class User extends Model {
static get tableName () {
return 'users'
}
}
module.exports = { User }
I can't seem to find a problem in the file flow, but if I create the models array on the fly, the app starts smoothly:
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const User = require('./models/user.model') // changed
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: [User] // changed
})
Any idea why this isn't working?
Thanks in advance for your time.

Found the gotcha, I just needed to use destructuring in the require of User, like this:
/models/_main.js
// BAD
// const User = require('./user.model')
// GOOD
const { User } = require('./user.model')
module.exports = [User]
Works like a charm.
Useful question that explains the difference:
Curly brackets (braces) in node require statement

Related

Mongoose not resolving callback queries?

I have been working on this project for 2 years now, and I'm thinking this was caused by the recent update, but am wondering if there are any kind, intelligent, Mongoose/NoSQL DBA, souls out there who would do the awesome service of helping me either track-down, and/or resolve this issue.
So, as you can see below, this is a simple mongoose find query over express to MongoDB. This is rather evident, at a high-level, and for most devs, the interactions will be natural, as any Mongo, Express, Node Stack using Mongoose.
The is issue is that, when I send this query, disregarding environment (a production project), it does not resolve.
The "data" seems to get lost somewhere, and therefore, the query simply never resolves.
It's a simple setup, really a test endpoint, so help out, run it through, and send some feedback.
Greatly Appreciated!
Model.js
const mongoose = require('mongoose');
const mongoosePaginate = require('mongoose-paginate');
const Schema = mongoose.Schema;
const TestSchema = new Schema({
data: {
type: String,
unique: false,
required: true
},
}, {
timestamps: true
});
TestSchema.plugin(mongoosePaginate);
module.exports = mongoose.model('Test', TestSchema);
Constructor.js
class Constructor {
constructor() {}
getAll() {
return TestSchema.find({}, function (err, tests) {})
}
}
module.exports = Constructor
db.js
let mongoose = require('mongoose')
// Connect to db
mongoose.connect('mongodb://localhost:27017/test', {useNewUrlParser: true, useUnifiedTopology: true }, err => {
if (err)
return console.log("Cannot connect to DB")
connectionCallback()
console.log("DB Connected")
});
let connectionCallback = () => {}
module.exports.onConnect = cb => {
connectionCallback = cb
}
App.js
const express = require('express');
const app = express();
const ip = require('ip');
let db = require('./db')
const router = express.Router();
const port = 8888;
const http = require('http').createServer(app);
let ipAddress = 'localhost'; // only works to the local host
try {
// will enable the server to be accessed from the network
ipAddress = ip.address();
} catch( err ){
console.err( err );
}
http.listen(port, ipAddress,
() => {
let message = [
`Server is running at ${ipAddress}:${port}`,
];
console.log( ...message )
});
db.onConnect(() => {
let Constructor = require("./pathTo/Constructor")
let construct = new Constructor()
app.use('/api', router.get('/test', function(req, res) {construct.getAll()}))
})
Your problem is with the constructor.js getAll function, as you are returning also and passed a callback also, the promise will never be resolved. You should either resolve the promise or return the response from the callback.
Resolve Promise:
class Constructor {
constructor() {}
async getAll() {
return await TestSchema.find({})
}
}
module.exports = Constructor
Return from callback:
class Constructor {
constructor() {}
getAll() {
TestSchema.find({}, function (err, tests){
return tests.
})
}
}
module.exports = Constructor
I ended up just scaling the project for production. I put the connectionCallback in a class and called it with the createConnection mongoose function.
Looks like this:
mongoose.Promise = global.Promise;
const url = 'mongodb://localhost/db'
const connection = mongoose.createConnection(url, options);
//load models
require('/models').connectionCallback();
modules.export = connectionInstance;
Please note, I am no longer using express!

Loopback get model from datasource with discoverSchemas

I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.

Custom service in feathersjs

I try to write a custom service, but it doesn't work at all. I try to post a request and make two update queries in the collections, but i will not work at all
this is my code
// Initializes the `bedrijven` service on path `/bedrijven`
const createService = require('feathers-mongoose');
const createModel = require('../../models/bedrijven.model');
const hooks = require('./bedrijven.hooks');
const filters = require('./bedrijven.filters');
module.exports = function() {
const app = this;
const Model = createModel(app);
const paginate = app.get('paginate');
const options = {
name: 'bedrijven',
Model,
paginate
};
// Initialize our service with any options it requires
app.post('/bedrijven/setfavo', function(req, res) {
Promise.all([
app.service('bedrijven').update({
owner: req.body.userid
}, {
favo: false
}),
app.service('bedrijven').update(req.body._id, {
favo: true
})
]);
});
app.use('/bedrijven', createService(options));
// Get our initialized service so that we can register hooks and filters
const service = app.service('bedrijven');
service.hooks(hooks);
if (service.filter) {
service.filter(filters);
}
};
Make sure this file is included in your main app.js file.
Something like:
const bedrijven = require('./bedrijven/bedrijven.service.js');
app.configure(bedrijven);
Is there a reason you don't want to use feathers generate service? It would take care of these questions for you.

How to pass DB connection to a sub-resolver when the root resolver returns an iterable

In short, this resolver getAllArticles() returns an array of Articles, and each article has an Author Field and a Tags Field, so each article can fire the sub-resolver to get that data, but I was having trouble seeing and finding the best solution.
You have to know some backstory:
app.js
I am passing the DB connections into the top-level resolvers as a map in the root value.
const db = new Map()
db.set('Neo4J', Neo4J.getDriver())
db.set('MongoDB', MongoDB.getDB())
// GraphQL Endpoint
app.use('/graphql', bodyParser.json(), graphqlExpress((req) => {
// ...
return {
schema,
context,
rootValue: {
db
}
}
}))
getArticle.js
I am passing the db connections to the sub-resolvers by assigning them onto the response object.
const getArticle = async (root, args, context) => {
const db = root.db
const Neo4J = db.get('Neo4J')
const MongoDB = db.get('MongoDB')
// ...
const article = { /* ... */ }
return Object.assign({}, article , { db })
}
This worked excellent (code has become extremely clean) until I moved to the getAllArticles() resolver that returns an array of articles. I could not see how to attach the db Map.
getAllArticles.js
Here's what was immediately intuitive to add:
const getAllArticles = async (root, args, context) => {
const db = root.db
const Neo4J = db.get('Neo4J')
const MongoDB = db.get('MongoDB')
// ...
const articles = [{ /* ... */ }, { /* ... */ }, { /* ... */ }]
return Object.assign({}, articles, { db })
}
That didn't work, and after looking at it, why would it? Sub-resolvers take the data from the parent object, which is each Article in this case.
After some iterations, here is the viable solution:
app.js
import Neo4J from './connectors/neo4j'
import MongoDB from './connectors/mongodb'
const db = new Map([
['Neo4J', Neo4J.getDriver()],
['MongoDB', MongoDB.getDB()]
])
app.use('/graphql', bodyParser.json(), graphqlExpress((req) => {
const context = {
settings: { SECRET },
person: req.person,
db
}
return {
schema,
context,
rootValue: null
}
}))
everyResolver.js
const getSomething = async (root, args, context, info) => {
const db = context.db
const Neo4J = db.get('Neo4J')
const MongoDB = db.get('MongoDB')
const session = Neo4J.session()
session.run(query) // etc
const users = MongoDB.collection('users')
users.findOne(ObjectID(id)) // etc
return objectOrIterable
}
Hopefully, this can help someone else in the future. I really like the approach of passing the DB driver connections into the resolvers. It tightened up the overall architecture and allows me to spin up additional resolvers easily because they come with batteries included.
If you pass DB connections into the GraphQL context parameter, just make sure you pass in a Map containing the DB connections, not an Object. Some values in the DB connections are functions. Maps are able to handle that. Objects are not. You may see horribly ambiguous detonations related to the DB connections in your sub-resolvers unless you pass around a Map.

REST data source in NodeJS/ Express MVC Pattern

What are the best practice to include external REST data sources in an Express MVC application?
Should we create a Model for the entities that we retrieve from external REST sources?
Let's take this practical example :
Our starting point is a user.js model that use mongoose for ODM.
var mongoose = require('mongoose');
var userModel = function () {
//Define a simple schema for our user.
var userSchema = mongoose.Schema({
name: String,
twitterId: Number
});
return mongoose.model('User', userSchema);
};
module.exports = new userModel();
Our objective is to show all tweets for a specific user, so we create a controller controller/userTweets.js where we prepare the data for our View.
How should we include the Twitter REST API in our application to handle this use case? (let's say we are using a nodejs client for twitter apis)
I'm more comfortable to use a specific model for the tweet entity, and then retrieve users tweet from the controller using our model, but how should our tweet.js model looks like?
Or should we design our REST API integration in a different way?
I would create a class called Tweet and a corresponding repository for it.
Assuming you are using es6, because why not.
lets call it tweets.js
'use strict';
module.exports = function (cfg) {
class Tweet {
constructor() {
this.userid = null;
this.text = null;
}
}
class Repo {
static getTweetsForUser(usedId) {
// make a call to twitter api, use https://www.npmjs.com/package/request
// psuedo code
let _ = require('lodash');
getTweets(userid, function (err, tweets) {
return new Promise(function (resolve, reject) {
if (err) {
return reject(err);
}
let data = [],
tweet = new Tweet;
if (! tweets.length) {
return resolve(data);
}
resolve(_.collect(tweets, function (t) {
tweet.userId = userId;
tweet.text = t.getTheTweet;
return tweet;
}));
});
});
}
}
return {
'tweet': Tweet,
'repo' : Repo
}
}
// export whatever modules, like above, lets call it index.js
'use strict';
let _ = require('lodash');
let modules = [
'tweets',
];
// cfg = any app configs that you might need in modules
function init(cfg) {
let core = {};
return _.collect(modules, function (m) {
core[m] = require('./' + m)(cfg);
});
}
module.exports = init;
Example - https://github.com/swarajgiri/express-bootstrap/blob/master/core/index.js
Now in routing side, in your main whatever is your server.js, inject the modules into an instance of express()
app.set('core', require('path/to/core/index')(whateverConfigYouMightNeed))
Once that is done, your route can look something like
'use strict'
let wrap = require('co-wrap');
route.get(':userId/tweets'), wrap(function* (req, res, next) {
let tweets = [];
try {
tweets = yield req.app.get('core').tweets.Repo.getTweetsForUser(req.params.userId)
} catch(e) {
// let the common error handler do its job.
return next(e);
}
// render whatever view you want.
});

Resources