AVA Test Fails in case of accessing global variable - node.js

We've recently started migrating tests for database models.
Facing an issue while trying to separate out different type of tests in different files.
I am writing some AVA unit tests
In one file test_1.js
it is,
const test = require('ava');
const sDB = require('../services/serviceDB');
const config = require('../../config').production;
const { CONFIG_RDS } = config;
let x = 1;
test.before(async (t) => {
t.context.log = console.log;
// following line connects with database and sets global.db
await loaderDB.connect(CONFIG);
})
test('Test 1 - to access database',async(t)=>{
// test some functionality that accesses global.db
// it passes
})
On the other file test_2.js
const test = require('ava');
const sDB = require('../services/serviceDB');
const config = require('../../config').production;
const { CONFIG_RDS } = config;
let x = 1;
test.before(async (t) => {
t.context.log = console.log;
// wait for db to be connected
await timeout(4000) // custom timeout that awaits
})
test('Test 2 - to access database',async(t)=>{
// test some functionality that accesses global.db
// it FAILS
// It returns exception that global.db is undefined
})
Thanks for your help in advance.

Each test file runs in its own process, so you need to connect to the database in each test file.

Related

Send message to channel in a function

I have an event handler for the ready event.
ready.js:
const { Events } = require('discord.js');
const db = require("../config/database");
const itemdb = require("../config/itemdb");
const items = require("../models/items");
const AHItems = require('../models/ahitems.js');
const RSS = require('../models/regionserversettings.js');
module.exports = {
name: Events.ClientReady,
once: true,
execute(client) {
console.log(`Ready! Logged in as ${client.user.tag}`);
db.authenticate()
.then(() => {
console.log('Logged in to DB!');
AHItems.init(db);
AHItems.sync();
RSS.init(db);
RSS.sync();
})
.catch(err => console.log(err));
itemdb.authenticate()
.then(() => {
console.log('Logged in to Item DB!');
items.init(itemdb);
items.sync();
})
.catch(err => console.log(err));
},
};
From inside the execute block I can use client.channels.cache.get('xxxxxx').send('Hello');
I want to use the send method in another File:
const AHItems = require("../models/ahitems");
const RSS = require("../models/regionserversettings");
const getprice = require("../api/getcurrentPrice");
const client = require("../events/ready");
const pricealarm = async function()
{
let ahitems = await AHItems.findAll({attributes: ['guildID', 'itemID']});
for (let i = 0; i < ahitems.length; i++) {
const guild = ahitems[i].dataValues.guildID;
const RSSData = await RSS.findOne({where: {guildID: guild}});
const item = ahitems[i].dataValues.itemID;
const access_token = RSSData.AccessToken;
const server = RSSData.serverID;
const price = await getprice(access_token, item, server);
const channel = client.channels.cache.get('x').send('test');
console.log(channel);
}
}
module.exports = pricealarm;
But if I try to do this, it tells me 'Unresolved function or method send()'
I think I am requiring the wrong file, but am unsure, which one I have to require
The issue with your code is that you are trying to use the send() method from an object client that has not been properly instantiated in the file where you want to use it. In your ready.js file, you correctly initialize the client object and can use its send() method inside the execute block.
However, in the other file where you want to use the send() method, you import the ready.js file, but you are only importing the module, not the instantiated client object. Therefore, the send() method is unresolved and cannot be called.
To fix this issue, you need to modify the ready.js file to export the client object in addition to the module.exports statement. For example, you can add the following line at the end of the execute block:
module.exports.client = client;
Then, in your other file, you can import the client object by requiring the ready.js file and accessing the client property of the exported module. For example:
const ready = require("../events/ready");
const client = ready.client;
// Now you can use client.channels.cache.get('xxxxxx').send('Hello');
With these modifications, you should be able to properly use the send() method from the client object in both files.

Importing a specific function present in a file that contains a connection to the mongoose db crashes when running tests

I have a NodeJs app using express and MongoDB that I want to write unit tests for using Jest.
Here's the problem: I'm importing a specific function from the file reports.js. The function itself doesn't use the mongoose db, but I know that using require executes the entire file before returning the exported objects. That being said, when I'm running my test file, the "testing" part works fine, but I'm getting the reference error below since the importation is still in process when my tests are completed.
ReferenceError: You are trying to import a file after the Jest environment has been torn down. From tests/reportsLib.test.js.
I've done some research and a lot suggests to use jest.useFakeTimers() before running each tests. But the same error occurs. However, when I use jest.useFakeTimers() after importing mongoose in the payment.js file (which is not optimal since I would like to have everything about tests in the tests files), the error is fixed but another occurs on the following line: campaignstatsSchema.plugin(mongoose_delete, {deletedAt:true, overrideMethods: true})
TypeError: Invalid schema configuration: -ClockDate- is not a valid type at path -deletedAt-. See mongoose-schematypes for a list of valid schema types.
Another way to fix the 1st error is to import the function in a beforeAll() Jest function, but the 2nd error still occurs.
Is there some sort of refac needed for the way I'm connecting to my DB and creating my schemas/models? What's the best way to solve this issue? Thanks in advance!
reports.tests.js file:
const { functionToTest } = require('./../lib/reports');
describe('report.js testing', () => {
beforeEach(() => {
jest.useFakeTimers();
});
it('should return a value of 0', () => {
expect(functionToTest()).toBe(0);
});
});
reports.js file:
const db = require('./../services/db');
// ...other imports
const functionToTest = function() {
return 0;
};
const otherFunction = async function(report_id, report_data) {
//some code
await db.report.findOneAndUpdate({_id: report_id}, {data:report_data});
};
module.exports = {
functionToTest,
// other functions
};
db.js file:
const mongoose = require('mongoose');
const payment = require('../models/payment');
const report = require('../models/report');
mongoose.connection.setMaxListeners(0);
mongoose.connect(process.env.MONGODB_URI, {
useNewUrlParser: true,
useUnifiedTopology: true,
useFindAndModify: false,
useCreateIndex: true
});
const generateObjectId = function() {
return mongoose.Types.ObjectId();
};
const getTimestampFromId = function(id) {
return mongoose.Types.ObjectId(id).getTimestamp();
};
module.exports = {
report,
payment,
// other models
};
payment.js file:
const mongoose = require('mongoose');
const mongoose_delete = require('mongoose-delete');
const mongoose_history = require('mongoose-history');
const paymentProfileSchema = mongoose.Schema({
// fields here
});
paymentProfileSchema.plugin(mongoose_history);
paymentProfileSchema.plugin(mongoose_delete);
const paymentProfile = mongoose.model('payment-profiles', paymentProfileSchema);
paymentProfile.syncIndexes();
module.exports = paymentProfile;

Writing unit tests for services in feathers without using a database

I would like to write some unit tests for feathers services.
I want this test to run completely independent, which means i do not want to use the database.
This is an example snippet of my service which is using sequelize:
src/services/messages/messages.service.js
// Initializes the `messages` service on path `/messages`
const createService = require('feathers-sequelize');
const createModel = require('../../models/messages.model');
const hooks = require('./messages.hooks');
const filters = require('./messages.filter');
module.exports = function (app) {
const app = this;
const Model = createModel(app);
const paginate = app.get('paginate');
const options = {
name: 'messages',
Model,
paginate
};
// Initialize our service with any options it requires
app.use('/messages', createService(options));
// Get our initialized service so that we can register hooks
const service = app.service('messages');
service.hooks(hooks);
if (service.filter) {
service.filter(filters);
}
};
I would maybe try to mock the database with the library sequelize-test-helpers but I am not sure how this would work in combination with feathers.
This is how my current test in typescript for this service looks like:
src/test/services/messages.test.ts
import assert from 'assert';
import { app } from '../../src/app';
describe('\'messages\' service', () => {
before(() => {
// maybe add an entry to the mocked database
});
after(() => {
// maybe delete that entry
});
it('registered the service', () => {
const service = app.service('messages');
assert.ok(service, 'Registered the service');
});
it('returns a single record', async () => {
// get result with id 1 (maybe added item in before-hook)
const res = await service.get(1);
should().exist(res);
res.should.be.a('object');
// more checks...
});
});
The first 'it(...)' was generated by feathers itself and the second 'it(...)' shows the functionality I want the test to have.
But the problem is that I am not sure how to write this test so that the service will not use the original database.
Does anybody of you have an idea how I could write a test for a feathers service without using the actual database?
Thanks in advance!
Set environment to TEST and in config set the database on the test.json . As seen here : https://docs.feathersjs.com/guides/basics/testing.html#test-database-setup

Loopback get model from datasource with discoverSchemas

I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.

Loopback console script does not exit

I'm writing a Loopback script that is supposed to be called by cron.
In order to obtain the app object, I do
var app = require('./server/server');
# Script logic
console.log('done');
However, the script does not exit once it finishes execution. How do I make it exit?
Reference: http://docs.strongloop.com/display/public/LB/Working+with+LoopBack+objects
Finally found out the cause of this issue.
The problem is due to database connection (in my case, mongodb via loopback-connector-mongodb) is still connected.
To disconnect database connection, and subsequently exiting the console script
var app = require('./server/server');
app.dataSources.DATASOURCENAME.disconnect();
In some places I've read that the issue is the http server preventing the script from shutting down.
I ended up with a module which does not even start an http server, I named it loopback-init.js and I usually import it from migrations and scripts (the important part is the custom callback passed to boot()):
'use strict';
const Promise = require('bluebird');
const loopback = require('loopback');
const boot = require('loopback-boot');
const logger = require('logger');
const app = loopback();
boot(app, __dirname + '/../server', err => {
if (err) throw err;
logger.debug('Loopback initialized.');
app.start = function() {
app.close = function(cb) {
app.removeAllListeners('started');
app.removeAllListeners('loaded');
if (cb) cb();
};
};
});
const autoMigrate = Promise.promisify(
app.dataSources.db.automigrate,
{context: app.dataSources.db}
);
app.autoMigrate = autoMigrate;
module.exports = app;
and my db-migrate scripts look like this:
'use strict';
var dbm;
var type;
var seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function(db) {
const lb = require('loopback-init');
return lb.autoMigrate('Item')
.then(lb.close, lb.close);
};
exports.down = function(db) {
return db.dropTable('item');
};
exports._meta = {
"version": 1
};

Resources