Custom service in feathersjs - node.js

I try to write a custom service, but it doesn't work at all. I try to post a request and make two update queries in the collections, but i will not work at all
this is my code
// Initializes the `bedrijven` service on path `/bedrijven`
const createService = require('feathers-mongoose');
const createModel = require('../../models/bedrijven.model');
const hooks = require('./bedrijven.hooks');
const filters = require('./bedrijven.filters');
module.exports = function() {
const app = this;
const Model = createModel(app);
const paginate = app.get('paginate');
const options = {
name: 'bedrijven',
Model,
paginate
};
// Initialize our service with any options it requires
app.post('/bedrijven/setfavo', function(req, res) {
Promise.all([
app.service('bedrijven').update({
owner: req.body.userid
}, {
favo: false
}),
app.service('bedrijven').update(req.body._id, {
favo: true
})
]);
});
app.use('/bedrijven', createService(options));
// Get our initialized service so that we can register hooks and filters
const service = app.service('bedrijven');
service.hooks(hooks);
if (service.filter) {
service.filter(filters);
}
};

Make sure this file is included in your main app.js file.
Something like:
const bedrijven = require('./bedrijven/bedrijven.service.js');
app.configure(bedrijven);
Is there a reason you don't want to use feathers generate service? It would take care of these questions for you.

Related

Writing unit tests for services in feathers without using a database

I would like to write some unit tests for feathers services.
I want this test to run completely independent, which means i do not want to use the database.
This is an example snippet of my service which is using sequelize:
src/services/messages/messages.service.js
// Initializes the `messages` service on path `/messages`
const createService = require('feathers-sequelize');
const createModel = require('../../models/messages.model');
const hooks = require('./messages.hooks');
const filters = require('./messages.filter');
module.exports = function (app) {
const app = this;
const Model = createModel(app);
const paginate = app.get('paginate');
const options = {
name: 'messages',
Model,
paginate
};
// Initialize our service with any options it requires
app.use('/messages', createService(options));
// Get our initialized service so that we can register hooks
const service = app.service('messages');
service.hooks(hooks);
if (service.filter) {
service.filter(filters);
}
};
I would maybe try to mock the database with the library sequelize-test-helpers but I am not sure how this would work in combination with feathers.
This is how my current test in typescript for this service looks like:
src/test/services/messages.test.ts
import assert from 'assert';
import { app } from '../../src/app';
describe('\'messages\' service', () => {
before(() => {
// maybe add an entry to the mocked database
});
after(() => {
// maybe delete that entry
});
it('registered the service', () => {
const service = app.service('messages');
assert.ok(service, 'Registered the service');
});
it('returns a single record', async () => {
// get result with id 1 (maybe added item in before-hook)
const res = await service.get(1);
should().exist(res);
res.should.be.a('object');
// more checks...
});
});
The first 'it(...)' was generated by feathers itself and the second 'it(...)' shows the functionality I want the test to have.
But the problem is that I am not sure how to write this test so that the service will not use the original database.
Does anybody of you have an idea how I could write a test for a feathers service without using the actual database?
Thanks in advance!
Set environment to TEST and in config set the database on the test.json . As seen here : https://docs.feathersjs.com/guides/basics/testing.html#test-database-setup

ObjectionJS - Group models in a data layer file

I have a NodeJS app running fastify with fastify-objectionjs.
For tidiness, I'd like to group all models in a single file called _main.js, where I export an array of the models inside the models folder.
Since the fastify-objectionjs registration requires an array of models, I thought I could just import the array from my _main.js and feed it as it is to the registration function.
But ObjectionJS is telling me that The supplied models are invalid.
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const dataLayer = require('./models/_main')
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: dataLayer
})
// Also tried:
// fastify.register(require('fastify-objectionjs'), {
// knexConfig: knexConfig,
// models: [dataLayer]
// })
/models/_main.js
const User = require('./user.model')
var dataLayer = [User]
module.exports = dataLayer
// Also tried without var:
// module.exports = {
// dataLayer: [
// User
// ]
// }
/models/user.model.js
const Knex = require('knex')
const connection = require('../knexfile')
const { Model } = require('objection')
const knexConnection = Knex(connection)
Model.knex(knexConnection)
class User extends Model {
static get tableName () {
return 'users'
}
}
module.exports = { User }
I can't seem to find a problem in the file flow, but if I create the models array on the fly, the app starts smoothly:
/app.js (node entry point)
const fastify = require('fastify')({
logger: true
})
const knexConfig = require('./knexfile')
const User = require('./models/user.model') // changed
fastify.register(require('fastify-objectionjs'), {
knexConfig: knexConfig,
models: [User] // changed
})
Any idea why this isn't working?
Thanks in advance for your time.
Found the gotcha, I just needed to use destructuring in the require of User, like this:
/models/_main.js
// BAD
// const User = require('./user.model')
// GOOD
const { User } = require('./user.model')
module.exports = [User]
Works like a charm.
Useful question that explains the difference:
Curly brackets (braces) in node require statement

Loopback get model from datasource with discoverSchemas

I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.

Re Initialization express Application

I have node.js application, which was built on express.js framework.
const app = express();
require('./config')(app);
require('./services')(app);
./config/config.js we instantiate config:
module.exports = function (app) {
const conf = {APIKey: 1234567890, url: '<someurl>'};
app.set('config', conf);
};
./services/APIService.js we create service instance(singleton)
module.exports = (app) => {
app.set('apiService', new APIService(app));
};
function APIService(app) {
const config = app.get('config');
this.key = config.APIKey;
};
APIService.prototype.sendRequest = () => {
const config = app.get('config');
this._send(config.url, 'some text');
};
Or, service2
module.exports = function(app) {
const config = app.get('config');
const myMod = require('myMod')(config.APIKey);
}
Cool, all works correct. But sometime administrator will change some config data. So, we create new config, set him to
newConf = {APIKey: 1234000000, url: '<some_new_url>'};
app.set('config', newConf);
APIService.sendRequest, will send request to CHANGED url, but APIService.key still unchanged. And myMod already instantiated with old config data.
We need write some setter methods, like this
//for APIService
APIService.prototype.setConfig = () => {
const config = app.get('config');
this.key = config.APIKey;
};
//for service 2
/* change const myMod to let myMod and create method for overriding */
or bang! kill and restart node.js server process. Bad idea. Maybe exist some method for this goal, something like app.restart() for safely reinitializing application(or, maybe, his parts)?
Did you try to call app.set('apiService', new APIService(app)); again ? or just have getter and setter on the prototype for your params.
Better way should be to have a new APIService object at each new request with a middleware, somehting like :
app.use(function (req, res, next){
req.api = new APIService(app);
next();
});
And use req.api.

REST data source in NodeJS/ Express MVC Pattern

What are the best practice to include external REST data sources in an Express MVC application?
Should we create a Model for the entities that we retrieve from external REST sources?
Let's take this practical example :
Our starting point is a user.js model that use mongoose for ODM.
var mongoose = require('mongoose');
var userModel = function () {
//Define a simple schema for our user.
var userSchema = mongoose.Schema({
name: String,
twitterId: Number
});
return mongoose.model('User', userSchema);
};
module.exports = new userModel();
Our objective is to show all tweets for a specific user, so we create a controller controller/userTweets.js where we prepare the data for our View.
How should we include the Twitter REST API in our application to handle this use case? (let's say we are using a nodejs client for twitter apis)
I'm more comfortable to use a specific model for the tweet entity, and then retrieve users tweet from the controller using our model, but how should our tweet.js model looks like?
Or should we design our REST API integration in a different way?
I would create a class called Tweet and a corresponding repository for it.
Assuming you are using es6, because why not.
lets call it tweets.js
'use strict';
module.exports = function (cfg) {
class Tweet {
constructor() {
this.userid = null;
this.text = null;
}
}
class Repo {
static getTweetsForUser(usedId) {
// make a call to twitter api, use https://www.npmjs.com/package/request
// psuedo code
let _ = require('lodash');
getTweets(userid, function (err, tweets) {
return new Promise(function (resolve, reject) {
if (err) {
return reject(err);
}
let data = [],
tweet = new Tweet;
if (! tweets.length) {
return resolve(data);
}
resolve(_.collect(tweets, function (t) {
tweet.userId = userId;
tweet.text = t.getTheTweet;
return tweet;
}));
});
});
}
}
return {
'tweet': Tweet,
'repo' : Repo
}
}
// export whatever modules, like above, lets call it index.js
'use strict';
let _ = require('lodash');
let modules = [
'tweets',
];
// cfg = any app configs that you might need in modules
function init(cfg) {
let core = {};
return _.collect(modules, function (m) {
core[m] = require('./' + m)(cfg);
});
}
module.exports = init;
Example - https://github.com/swarajgiri/express-bootstrap/blob/master/core/index.js
Now in routing side, in your main whatever is your server.js, inject the modules into an instance of express()
app.set('core', require('path/to/core/index')(whateverConfigYouMightNeed))
Once that is done, your route can look something like
'use strict'
let wrap = require('co-wrap');
route.get(':userId/tweets'), wrap(function* (req, res, next) {
let tweets = [];
try {
tweets = yield req.app.get('core').tweets.Repo.getTweetsForUser(req.params.userId)
} catch(e) {
// let the common error handler do its job.
return next(e);
}
// render whatever view you want.
});

Resources