How to set another database for test on knexfile? - node.js

I have 2 databases; the first for the developement and production and the second for the test.
knexfile.js:
const config = require('config');
const knexConfig = config.get('knexConfig');
module.exports = {
development: {
client: knexConfig.client,
connection: {
host: knexConfig.host,
database: knexConfig.database,
user: knexConfig.user,
password: knexConfig.password,
},
pool: {
min: knexConfig.min,
max: knexConfig.max,
},
},
test: {
client: 'sqlite3',
connection: {
filename: './file.db',
},
},
};
route.test.js:
const Model = require('objection').Model;
const provider = require('../../../server/models/provider');
const Knex = require('knex');
const knexConfig = require('../../../knexfile');
const knex = Knex(knexConfig.test);
Model.knex(knex);
describe('Should test provider Model', () => {
test('should return provider', () => {
provider
.query()
.then((providers) => {
expect(providers).toBe('array');
});
});
});
I got this error:
Test suite failed to run
ProcessTerminatedError: cancel after 2 retries!
at Farm.<anonymous> (node_modules/worker-farm/lib/farm.js:87:25)
at Array.forEach (<anonymous>)
at Farm.<anonymous> (node_modules/worker-farm/lib/farm.js:81:36)
at ontimeout (timers.js:469:11)
at tryOnTimeout (timers.js:304:5)
at Timer.listOnTimeout (timers.js:264:5)
A worker process has quit unexpectedly! Most likely this is an initialization error.
I want to connect to sqlite database for the test but I can't do that to pass tests.How can I fix that?

Your config seems to be fine, since following test works.
https://runkit.com/embed/3w0umojslatc
require('sqlite3');
var knex = require("knex")({
client: 'sqlite3',
connection: {
filename: './db.db'
}
})
const { Model } = require('objection');
Model.knex(knex);
await Model.query().select('1');
Looks like you are mixing DBB and TDD syntax and have some error of not waiting async results before returning from test.
Try first this:
const Model = require('objection').Model;
const provider = require('../../../server/models/provider');
const Knex = require('knex');
const knexConfig = require('../../../knexfile');
const knex = Knex(knexConfig.test);
Model.knex(knex);
describe('Should test provider Model', () => {
it('should return provider', () => {
return Model.query().from('providers')
.then((providers) => {
expect(providers).toBe('array');
});
});
});
If that works then try to do the query with your own providers class that you hadn't included in question

Related

How to override url for RTK query

I'm writing pact integration tests which require to perform actual call to specific mock server during running tests.
I found that I cannot find a way to change RTK query baseUrl after initialisation of api.
it('works with rtk', async () => {
// ... setup pact expectations
const reducer = {
[rtkApi.reducerPath]: rtkApi.reducer,
};
// proxy call to configureStore()
const { store } = setupStoreAndPersistor({
enableLog: true,
rootReducer: reducer,
isProduction: false,
});
// eslint-disable-next-line #typescript-eslint/no-explicit-any
const dispatch = store.dispatch as any;
dispatch(rtkApi.endpoints.GetModules.initiate();
// sleep for 1 second
await new Promise((resolve) => setTimeout(resolve, 1000));
const data = store.getState().api;
expect(data.queries['GetModules(undefined)']).toEqual({modules: []});
});
Base api
import { createApi } from '#reduxjs/toolkit/query/react';
import { graphqlRequestBaseQuery } from '#rtk-query/graphql-request-base-query';
import { GraphQLClient } from 'graphql-request';
export const client = new GraphQLClient('http://localhost:12355/graphql');
export const api = createApi({
baseQuery: graphqlRequestBaseQuery({ client }),
endpoints: () => ({}),
});
query is very basic
query GetModules {
modules {
name
}
}
I tried digging into customizing baseQuery but were not able to get it working.

Cannot run unit test for firestore database, getting - TypeError: (0 , _rulesUnitTesting).apps is not a function

I am following tutorial on unit testing and the same code is not working for me.
I have setup my dependencies:
npm install -g firebase-tools
npm install #firebase/rules-unit-testing
I ran my emulator.
this is the code:
/**
* #jest-environment node
*/
import {
apps,
initializeTestApp,
initializeAdminApp,
assertFails,
assertSucceeds,
clearFirestoreData,
} from "#firebase/rules-unit-testing";
const PROJECT_ID = "mydailyprep-ce845";
const getFirestore = () =>
initializeTestApp({ projectId: PROJECT_ID }).firestore();
const getAdminFirestore = () =>
initializeAdminApp({ projectId: PROJECT_ID }).firestore();
describe("Firestore security rules", () => {
beforeEach(async () => {
await clearFirestoreData({ projectId: PROJECT_ID });
});
it("can not read from the messages collection", async () => {
const db = getFirestore();
const testDoc = db.collection("messages").doc("testDoc");
await assertFails(testDoc.get());
});
afterAll(async () => {
const cleanUpApps = apps().map((app) => app.delete());
await Promise.all(cleanUpApps);
});
});
But for some reason I am getting the error. I don't get how can same code work for one machine, but not work for me?

Jest - mocking and testing pino multi streams based on log levels

I am struggling to find out correct way of mocking and using pino in a test logging service,
So here is my implementation of pino logger. This write to different file streams based on log levels.
getChildLoggerService(fileNameString): pino.Logger {
const streams: Streams = [
{ level: 'fatal', stream: fs.createWriteStream(path.join(process.cwd(), './logs/database-connect-fatal.log'))},
{ level: 'error', stream: fs.createWriteStream(path.join(process.cwd(), './logs/database-connect-error.log'))},
{ level: 'debug', stream: fs.createWriteStream(path.join(process.cwd(), './logs/database-connect-debug.log'))},
{ level: 'info', stream: fs.createWriteStream(path.join(process.cwd(), './logs/database-connect-info.log'))},
];
return pino({useLevelLabels: true,
base: {
hostName: os.hostname(),
platform: os.platform(),
processId: process.pid,
timestamp: this.appUtilService.getCurrentLocaleTimeZone(),
// tslint:disable-next-line: object-literal-sort-keys
fileName: this.appUtilService.getFileName(fileNameString),
} ,
level: this.appUtilService.getLogLevel(),
messageKey: LOGGER_MSG_KEY,
prettyPrint: this.appUtilService.checkForDevEnv(process.env.NODE_ENV),
timestamp: () => {
return this.appUtilService.getCurrentLocaleTimeZone()
},
}, multistream(streams)).child({
connectorReqId: (process.env.REQ_APP_NAME === null ? 'local': process.env.REQ_APP_NAME)
+uuid.v4().toString()
});
}
The most important part I wanted to test is the multistreams where I need to write to different log files based on the log levels and so far I couldn't figure out a way to do that
import pino, { DestinationStream } from 'pino';
const sinon = require('sinon');
import pinoms from 'pino-multi-stream';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const { PassThrough } = require('stream');
class EchoStream extends stream.Writable {
_write(chunk, enc, next) {
console.log('ssdsdsd',chunk.toString());
next();
}
}
import * as _ from 'lodash';
import { Writable } from 'stream';
import { mocked } from 'ts-jest/utils';
import { LogServiceInstance } from './log.service';
// jest.mock('pino', () => jest.fn().mockImplementation(() => { ====> Tried this inline mock, doesnt work
// return {
// child: jest.fn().mockReturnValue(jest.requireActual('pino').Logger)
// }
// }));
// jest.mock('pino', () => {
// return jest.fn().mockImplementation(() => {
// return {
// child: jest.fn().mockReturnValue(jest.requireActual('pino').Logger),
// stream: jest.fn().mockImplementation(() => {
// return [
// {
// level: 'info',
// stream: fs.createWriteStream(
// path.resolve(process.cwd(), '/test/database-connector-logs/info.log')
// ),
// },
// {
// level: 'warn',
// stream: fs.createWriteStream(
// path.resolve(process.cwd(), '/test/database-connector-logs/warn.log')
// ),
// },
// ];
// }),
// };
// });
// });
describe('Test suite for Log service', () => {
//const mockedPino = mocked(pino, true);
test('Test case for getLoggerInstance', () => {
const mockedPinoMsStream = [
const mockedPinoStream = (pino.prototype.stream = jest.fn(() => mockedPinoMsStream));
console.dir(pino);
const prop = Reflect.ownKeys(pino).find((s) => {
return s === 'symbols';
});
// Tried this but it did not work as the actual files are written with the values
pino[prop]['streamSym'] = jest.fn().mockImplementation(() => {
return fs.createWriteStream(path.resolve(process.cwd(), './test/database-connector-logs/info.log'))
});
console.dir(pino);
const log = LogServiceInstance.getChildLoggerService(__filename);
console.dir(Object.getPrototypeOf(log));
log.info('test logging');
expect(2).toEqual(2);
});
Could someone let me know where the mocking is wrong and how to mock it properly
UPDATE:
I came to understand that mocking pino-multi-stream might do the trick, so tried it this way. This was added at the very top and rest of all mockings are all removed (even inside the test suite as well)
const mockedPinoMultiStream = {
stream: jest.fn().mockImplementation(() => {
return {write: jest.fn().mockReturnValue(new PassThrough())}
})
}
jest.mock('pino-multi-stream', () => {
return {
multistream: jest.fn().mockReturnValue(mockedPinoMultiStream)
}
});
wanted to mock to test if based on the level, respective named files are being used, but this also results in exception
TypeError: stream.write is not a function
at Pino.write (/XXX/node_modules/pino/lib/proto.js:161:15)
at Pino.LOG (/XXXX/node_modules/pino/lib/tools.js:39:26)
LATEST UPDATE:
So I resolved the exception by modifying the way pino multistream is mocked
const { PassThrough } = require('stream');
...
...
const mockedPinoMultiStream = {
write: jest.fn().mockImplementation((data) => {
return new Passthrough();
})
};
Now there is no more exception and write(method) is properly mocked when I print "pino". BUt I do not understand how to test the different files based on different log levels. Could someone let me know, how that is to be done.?
Note: I tried setting a return value of fs.createWriteStream instead of a Passthrough but that didnt work
Atlast, I found the answer to making use of pino streams based on different log levels.
I went ahead and created a test directory to house the test log files. In reality, we do not want pino to be adulterating the actual log files. So I decided to mock the pino streams during the start of the jest test. This file gets executed first before any test suite is triggered. So I modified the jest configuration in package.json like
"setupFiles": [
"<rootDir>/jest-setup/stream.logger.js"
],
in the stream.logger.js file, I added
const pinoms = require('pino-multi-stream');
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const Writable = require('stream').Writable;
const { PassThrough } = require('stream');
const pino = require('pino');
class MyWritable extends Writable {
constructor(options) {
super(options);
}
_write(chunk, encoding, callback) {
const writeStream =fs.createWriteStream(path.resolve(process.cwd(), './test/logs/info.log'));
writeStream.write(chunk,'utf-8');
writeStream.emit('close');
writeStream.end();
}
}
const mockedPinoMultiStream = {
write: jest.fn().mockImplementation((data) => {
const writeStream = new MyWritable();
return writeStream._write(data);
})
};
jest.mock('pino-multi-stream', () => {
return {
multistream: jest.fn().mockReturnValue(mockedPinoMultiStream)
}
});
Now I went ahead and created the test file - log.service.spec.ts
import * as pino from 'pino';
const sinon = require('sinon');
import pinoms from 'pino-multi-stream';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
import * as _ from 'lodash';
import { Writable } from 'stream';
import { mocked } from 'ts-jest/utils';
import { LogServiceInstance } from './log.service';
describe('Test suite for Log service', () => {
//const mockedPino = mocked(pino, true);
afterEach(() => {
// delete the contents of the log files after each test suite
fs.truncate((path.resolve(process.cwd(), './test/logs/info.log')), 0, () => {
console.dir('Info log file deleted');
});
fs.truncate((path.resolve(process.cwd(), './test/logs/warn.log')), 0, () => {
console.dir('Warn log file deleted');
});
fs.truncate((path.resolve(process.cwd(), './test/logs/debug.log')), 0, () => {
console.dir('Debug log file deleted');
});
});
test('Test case fir getLoggerInstance', () => {
const pinoLoggerInstance = LogServiceInstance.getChildLoggerService(__filename);
pinoLoggerInstance.info('test logging');
_.map(Object.getOwnPropertySymbols(pinoLoggerInstance), (mapItems:any) => {
if(mapItems.toString().includes('Symbol')) {
if(mapItems.toString().includes('pino.level')) {
expect(pinoLoggerInstance[mapItems]).toEqual(20);
}
}
if(mapItems.toString().includes('pino.chindings')) {
const childInstance = pinoLoggerInstance[mapItems].toString().substr(1);
const jsonString = '{'+ childInstance+ '}';
const expectedObj = Object.create(JSON.parse(jsonString));
expect(expectedObj.fileName).toEqual('log.service.spec');
expect(expectedObj.appName).toEqual('AppJestTesting');
expect(expectedObj.connectorReqId).toEqual(expect.objectContaining(new String('AppJestTesting')));
}
});
// make sure the info.log file is written in this case
const infoBuffRead = fs.createReadStream(path.resolve(process.cwd(), './test/logs/info.log')).read(1024);
expect(infoBuffRead).toBeDefined();
// now write a warn log
pinoLoggerInstance.warn('test warning log');
const warnBuffRead = fs.createReadStream(path.resolve(process.cwd(), './test/logs/warn.log')).read(1024);
expect(warnBuffRead).toBeDefined();
// now write a debug log
pinoLoggerInstance.debug('test warning log');
const debugBuffRead = fs.createReadStream(path.resolve(process.cwd(), './test/logs/warn.log')).read(1024);
expect(debugBuffRead).toBeDefined();
});
});
I also made sure that the test log files do not get overwhelmed with data over time , by deleting their contents after each execution
Hope this helps people trying to test pino multi stream

How to mock typeorm connection

In integration tests I am using the following snippets to create connection
import {Connection, createConnection} from 'typeorm';
// #ts-ignore
import options from './../../../ormconfig.js';
export function connectDb() {
let con: Connection;
beforeAll(async () => {
con = await createConnection(options);
});
afterAll(async () => {
await con.close();
});
}
I am trying to unit test a class which calls typeorm repository in one of its method and without call that helper function connectDb() above I get the following error which is expected of course.
ConnectionNotFoundError: Connection "default" was not found.
My question is how can I mock connection. I have tried the following without any success
import typeorm, {createConnection} from 'typeorm';
// #ts-ignore
import options from "./../../../ormconfig.js";
const mockedTypeorm = typeorm as jest.Mocked<typeof typeorm>;
jest.mock('typeorm');
beforeEach(() => {
//mockedTypeorm.createConnection.mockImplementation(() => createConnection(options)); //Failed
mockedTypeorm.createConnection = jest.fn().mockImplementation(() => typeorm.Connection);
MethodRepository.prototype.changeMethod = jest.fn().mockImplementation(() => {
return true;
});
});
Running tests with that kind of mocking gives this error
TypeError: decorator is not a function
Note: if I call connectDb() in tests everything works fine. But I don't want to do that since it takes too much time as some data are inserted into db before running any test.
Some codes have been omitted for simplicity. Any help will be appreciated
After a bunch of research and experiment I've ended up with this solution. I hope it works for someone else who experienced the same issue...
it does not need any DB connection
testing service layer content, not the DB layer itself
test can cover all the case I need to test without hassle, I just need to provide the right output to related typeorm methods.
This is the method I want to test
#Injectable()
export class TemplatesService {
constructor(private readonly templatesRepository: TemplatesRepository) {}
async list(filter: ListTemplatesReqDTO) {
const qb = this.templatesRepository.createQueryBuilder("tl");
const { searchQuery, ...otherFilters } = filter;
if (filter.languages) {
qb.where("tl.language IN (:...languages)");
}
if (filter.templateTypes) {
qb.where("tl.templateType IN (:...templateTypes)");
}
if (searchQuery) {
qb.where("tl.name LIKE :searchQuery", { searchQuery: `%${searchQuery}%` });
}
if (filter.skip) {
qb.skip(filter.skip);
}
if (filter.take) {
qb.take(filter.take);
}
if (filter.sort) {
qb.orderBy(filter.sort, filter.order === "ASC" ? "ASC" : "DESC");
}
return qb.setParameters(otherFilters).getManyAndCount();
}
...
}
This is the test:
import { SinonStub, createSandbox, restore, stub } from "sinon";
import * as typeorm from "typeorm";
describe("TemplatesService", () => {
let service: TemplatesService;
let repo: TemplatesRepository;
const sandbox = createSandbox();
const connectionStub = sandbox.createStubInstance(typeorm.Connection);
const templatesRepoStub = sandbox.createStubInstance(TemplatesRepository);
const queryBuilderStub = sandbox.createStubInstance(typeorm.SelectQueryBuilder);
stub(typeorm, "createConnection").resolves((connectionStub as unknown) as typeorm.Connection);
connectionStub.getCustomRepository
.withArgs(TemplatesRepository)
.returns((templatesRepoStub as unknown) as TemplatesRepository);
beforeAll(async () => {
const builder: TestingModuleBuilder = Test.createTestingModule({
imports: [
TypeOrmModule.forRoot({
type: "postgres",
database: "test",
entities: [Template],
synchronize: true,
dropSchema: true
})
],
providers: [ApiGuard, TemplatesService, TemplatesRepository],
controllers: []
});
const module = await builder.compile();
service = module.get<TemplatesService>(TemplatesService);
repo = module.get<TemplatesRepository>(TemplatesRepository);
});
beforeEach(async () => {
// do something
});
afterEach(() => {
sandbox.restore();
restore();
});
it("Service should be defined", () => {
expect(service).toBeDefined();
});
describe("list", () => {
let fakeCreateQueryBuilder;
it("should return records", async () => {
stub(queryBuilderStub, "skip" as any).returnsThis();
stub(queryBuilderStub, "take" as any).returnsThis();
stub(queryBuilderStub, "sort" as any).returnsThis();
stub(queryBuilderStub, "setParameters" as any).returnsThis();
stub(queryBuilderStub, "getManyAndCount" as any).resolves([
templatesRepoMocksListSuccess,
templatesRepoMocksListSuccess.length
]);
fakeCreateQueryBuilder = stub(repo, "createQueryBuilder" as any).returns(queryBuilderStub);
const [items, totalCount] = await service.list({});
expect(fakeCreateQueryBuilder.calledOnce).toBe(true);
expect(fakeCreateQueryBuilder.calledOnce).toBe(true);
expect(items.length).toBeGreaterThan(0);
expect(totalCount).toBeGreaterThan(0);
});
});
});
cheers!

Knex pool full on migration

I'm trying to get started with knex.js and I can't get migrations to work. Knex works fine for my API calls. Here's my setup:
knexfile.js
const env = process.env;
module.exports = {
client: 'mysql',
connection: {
host: env.DB_HOST,
database: env.DB_NAME,
user: env.DB_USER,
password: env.DB_PASSWORD,
port: env.PORT
},
pool: {
min: 0,
max: 50
},
migrations: {
directory: './db/migrations',
tableName: 'knex_migrations'
},
seeds: {
directory: './db/seeds'
}
};
knex.js
const config = require('../knexfile.js');
module.exports = require('knex')(config);
events.js
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
// GET api/events
router.get('/', (req, res) => {
knex('events')
.then(events => { res.send(events) }
.catch(err => { console.log(err); })
});
module.exports = router;
and then I have a file in the migrations folder with:
exports.up = function(knex) {
return knex.schema.createTable('users', function (t) {
t.increments('id').primary()
t.string('username').notNullable()
t.string('password').notNullable()
t.timestamps(false, true)
}).then(() => { console.log('created users table') })
.catch((err) => { throw err} )
.finally(() => { knex.destroy() })
};
exports.down = function(knex) {
return knex.schema.dropTableIfExists('users')
};
When I run knex migrate:latest I get TimeoutError: Knex: Timeout acquiring a connection. The pool is probably full. Are you missing a .transacting(trx) call?
I know similar questions have been asked before, but I can't seem to find any that shed light on my particular situation. I've tried adding a knex.destroy() to the end of my GET request but that doesn't seem to help (it just makes the connection unusable if I add other request handlers below).
I did try checking the knex.client.pool in a finally clause at the end of the GET request. numUsed was 0, numFree was 1, numPendingAcquires and numPendingCreates were both 0. I do find it odd that numFree was only 1 given that my knexfile specifies max 50. Any advice greatly appreciated.
Following #technogeek1995's comment, the answer turned out to be adding require('dotenv').config({path: '../.env'}); to knexfile.js (in retrospect, this part seems obvious), and running the cli from the same directory. Hope this helps someone else.

Resources