Mocha testing PostgreSQL with Knex is giving me a MigrationLocked error - node.js

I have got a local development environment working for Node/PostgreSQL/Knex in the sense that I can post to a development database on my machine using an API. I am now trying to create tests for this functionality but am getting an error.
Here's my config:
//knexfile.js
module.exports = {
development: {
client: 'pg',
connection: {
host: '127.0.0.1',
user: 'dbUser',
password: 'dbpword',
port: 5432,
database: 'example-name'
},
migrations: {
directory: __dirname + '/db/migrations'
},
seeds: {
directory: __dirname + '/db/seeds/development'
}
},
}
//db.js
const config = require('../knexfile.js');
const env = process.env.NODE_ENV || 'development';
const knex = require("knex")(config[env]);
module.exports = knex;
knex.migrate.latest([config]);
And then my tests:
import chai from 'chai';
import { expect } from 'chai';
import chaiHttp from 'chai-http';
import knex from '../../db/db';
import app from '../../server';
chai.use(chaiHttp);
describe('Tokens API', () => {
beforeEach((done) => {
knex.migrate.rollback()
.then(() => {
knex.migrate.latest()
.then(() => {
return knex.seed.run()
.then(() => {
done();
});
});
});
});
afterEach((done) => {
knex.migrate.rollback()
.then(() => {
done();
});
});
describe('POST /users', () => {
it('posts a list of users to the database with all mandatory fields', (done) => {
chai.request(app)
.post('/users')
.send({
"users": [
"steve",
"whoever",
"matt",
"another"]})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
done();
});
});
});
});
When I run this I get the following error twice - I think for the knex calls in the beforeEach block:
Knex:warning - Can't take lock to run migrations: Migration table is already locked
Knex:warning - If you are sure migrations are not running you can release the lock manually by deleting all the rows from migrations lock table: knex_migrations_lock
Unhandled rejection MigrationLocked: Migration table is already locked
I have tried numerous things - including clearing out the knex_migrations_lock table. The only support I can find online is this thread, which suggests clearing out the lock table using DELETE FROM Migrations_lock where id <> 0;, however my lock table only has a is_locked column with zero values.
Any idea what's going on?
EDIT: I've just realised if you edit out all the knex calls, the test actually passes. Could this be because I am effectively calling knex twice - once from db.js and once indirectly through server.js? If that's the case, how do I avoid doing this - because surely I need to call the knex setup for Node to run it?

For anyone stumbling across this, the problem was actually coming from db.js, specifically the last line:
const config = require('../knexfile.js');
const env = process.env.NODE_ENV || 'development';
const knex = require("knex")(config[env]);
module.exports = knex;
knex.migrate.latest([config]);
Of course this is asynchronous, and the tests were importing this file before trying to run their own knex functions, causing the lock. I got round this by adding a clause to block this running while testing:
if(process.env.NODE_ENV != 'test') {
knex.migrate.latest([config])
}
You can then create a test environment by adding process.env.NODE_ENV='test' to each spec file, or by installing the npm env test module.

Had the exact same issue, ended up being due to my API calling the database when being initialized by the supertest library.
For example, my test file:
var db = require('../db');
var api = require('../api');
var supertest = require('supertest')(api);
describe('Session routes', () => {
beforeEach((done) => {
db.migrate.rollback()
.then(() => {
db.migrate.latest()
.then(() => {
return db.seed.run()
.then(() => {
done();
});
});
});
});
afterEach((done) => {
db.migrate.rollback()
.then(() => {
done();
});
});
it('GET /session should error with no token', (done) => {
supertest
.get('/session')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(401, {
error: 'Unauthorized'
}, done);
});
});
On line 2, it requires my api - when my api is required the following code gets run straight away to initialize my api's external services API:
var db = require('./other-postgres-library');
var servicesApi = require('./services/api')(db);
This would connect to a bunch of external services and write the results to the database.
So when tests were running my APP was throwing errors because it was trying to write to a database which was being rolled back/migrated/seeded etc.
I changed my inner services API to initialize lazily and all my problems have disappeared.
In your case, I would hazard to guess when your tests runs this line
import app from '../../server'; your app/server code is trying to running some queries against the database.

Related

Mocha/ Supertest is not exiting on completion of tests

I am using the mocha testing framework, and after running the following test it does not exit.
I have tried Promises and async await with no luck. --exit at the end of the mocha command works, but I want to find the source of the issue.
I am wondering if it is the knex database connection when running beforeEach and afterEach functions. However, I do not know how to disconnect the db connection other than destroy(), and if this is used the following tests do not run.
Can anybody see anything within the code that could be causing this issue? Or recommend another way to remedy this?
const app = require('../../app');
const request = require('supertest');
describe('Route: /' + route, () => {
let token = '';
let route = 'user';
before(function (done) {
const user = {email: 'admin#email.com', password: 'password'};
request(app)
.post('/login')
.send(user)
.end((err, res) => {
token = res.body.token;
done();
});
});
beforeEach(async () => {
await knex.migrate.rollback();
await knex.migrate.latest();
await knex.seed.run();
});
afterEach(() => knex.migrate.rollback());
it(`should not be able to consume /${route} since no token was sent`, (done) => {
request(app)
.get(`/${route}`)
.expect(401, done)
});
it(`should be able to consume /${route} since a valid token was sent`, (done) => {
request(app)
.get(`/${route}`)
.set('Authorization', 'Bearer ' + token)
.expect(200, done);
});
});
For anyone who comes across this and has a similar problem.
Using the following links;
- GitHub mocha debug example
- Mocha docs -exit
- wtfnode
I was able to debug the problem.
wtfnode used within my test showed me that my database was still connected with the console reading.
const wtf = require('wtfnode');
after(wtf.dump()); // place within test describe body
Returned;
- Sockets:
- 127.0.0.1:58898 -> 127.0.0.1:5432
- Listeners:
- connect: Connection.connect # <user_path>/node_modules/pg/lib/connection.js:59
I am using knex to connect to the database, so I've added code below to the file helper.js in my test directory.
/test/helper.js
const knex = require('../database/db');
before(function () {
if (!knex.client.pool) return knex.initialize();
});
beforeEach(async function () {
await knex.migrate.rollback();
await knex.migrate.latest();
await knex.seed.run();
});
afterEach(function () {
return knex.migrate.rollback()
});
after(function () {
return knex.destroy();
});

reset a database before each test

I'm using node and supertest for a simple app. I got SQlite3 for the local test database. I did a simple test to get a super inserted into the database. I wanted to reset the database each time a test is run. I'm looking in the docs right now and can't seem to locate it. I figured I would ask here because it seems someone would most likely know the info.
const request = require('supertest');
const server = require('../server');
describe('Authentication', function() {
//database reset here
it('should create a new user /users/registration', function(done) {
request(server)
.post('/users/register')
.send({
username: 'user-name',
email: 'luser-name#gmail.com',
password: '12345'
})
.set('Accept', 'application/json')
.expect(201, done);
});
});
If you want to run any piece of code before each test, you can use beforeEach function in jest
describe('my test', () => {
beforeEach(() => {
// code to run before each test
});
test('test 1', () => {
// code
});
test('test 2', () => {
// code
});
});
So best way to do this is have some logic in your routing functions of your Api
Receive an API request
Check if ['X-MOCK-HEADER'] exists
If it does then route to the mock version of the endpoint
So your mock for create user would always return 201 OK - your mock endpoint would do something like this:
const routes = {
CREATE_USER_OK:() => { return {....} } // make sure these return proper http responses
CREATE_USER_BAD_REQUEST: () { return {...} }
}
return routes[HEADER_VALUE]()
The reason being you're testing the route not the database class in this instance, so you just want to return static data, if you wanna test something else then just change the X-MOCK-HEADER value to whatever you want and add the mock route to return the right http response/code - I'd need to know what the API code looked like to help you on the backend implementation.
If possible stay away from messing with staging databases for testing because down the road you will suffer a LOT of pain as it gradually gets filled with garbage.
Also if you're working with a front end app you can quickly prototype with static data - this is especially useful if you've got a front end team waiting for an API endpoint to say create a login screen.
There's no defined way to reset a sqlite db, just delete the db and recreate.
Sqlite: How do I reset all database tables?
I did this in the file and it works fine
const request = require('supertest');
const server = require('../server');
const knex = require('knex');
const dbConfig = require('../knexfile.js')['test'];
const db = knex(dbConfig);
describe('Authentication', () => {
beforeEach(async () => {
await db('users').truncate();
});
it('should create a new user /users/registration', function(done) {
request(server)
.post('/users/register')
.send({
username: 'user-name',
email: 'luser-name#gmail.com',
password: '12345'
})
.set('Accept', 'application/json')
.expect(201, done);
});
});

"It" statement keep using regular database instead of the test database

I am doing some testing and I just want to test my endpoint. What goes in and what does out that's it.
I have a test database that I want to use when the test run.
in "BeforeAll" I connect to the test database and in my beforeach I make a post of a user.
It work it's inserted in the test database
The problem is as soon as I try to make a request in a "It" statement , the database used is app one not the test database :/
beforeAll(async () => {
await mongoose.connect(testDatabase);
});
afterAll(async function () {
await mongoose.disconnect()
});
describe('/user', () => {
let app;
beforeEach(async () => {
const moduleFixture: TestingModule = await Test.createTestingModule({
imports: [AppModule, UserModule],
}).compile();
app = moduleFixture.createNestApplication();
await app.init();
// this send correctly data to the TEST database
enter code here
return request(app.getHttpServer())
.post('/user').send(TEST_USER)
.set('Accept', 'application/json')
.expect(201)
.then(r => console.log("Result of post", r.body))
});
it('GET', () => {
// my probleme here : this retrieve the regular database content (setup in main file) NOT THE TEST database setup in beforeAll
return request(app)
.get('/user')
.expect(200)
.then(r => console.log("Result of get", r.body))
});
});
I am doing it wrong ?
Thank you guys !
My bet is that your app is not using the connection you establish there - you app, during the request, most likely creates its own connection.
Could you show minimum reproduction repository with the mvp implementation of your route under test, including the module and dependencies?
Finally my mistake was in this line :
imports: [AppModule, UserModule],
App module override my test's mongoose. connect with another connection established inside.

where to destroy knex connection

I'm using knex with pg.
I have a project similar as following.
dbClient.js
const dbClient = require('knex')({
client: 'pg',
connection: {
host: '127.0.0.1',
user: 'user',
password: 'password',
database: 'staging',
port: '5431'
}
})
module.exports = dbClient
libs.js
const knex = require('./dbClient.js')
async function doThis(email) {
const last = await knex('users').where({email}).first('last_name').then(res => res.last_name)
// knex.destroy()
return last
}
async function doThat(email) {
const first = await knex('users').where({email}).first('first_name').then(res => res.first_name)
// knex.destroy()
return first
}
module.exports = {
doThat,
doThis
}
test01.js
const {doThis, doThat} = require('./libs.js');
(async () => {
try {
const res1 = await doThis('user53#gmail.com')
console.log(res1)
const res2 = await doThat('user53#gmail.com')
console.log(res2)
} catch (err) {
console.log(err)
}
})()
When knex.destroy() was removed from libs.js as shown above. node test01 could output res1 and res2. But the issue is the connection hangs indefinitely and CMD never return.
But if I uncomment knex.destroy() from libs.js, then doThis will execute, CMD will hangs at doThat as there's no connection anymore which has been closed in doThis.
My question is :
What is the best location for knex.destroy()? Or there's other ways to do it?
Thanks for your time!
Knex destroy() seems to be a one time operation. After destroying a connection, one might require a brand new connection pool for the next operation.
The db client module you export is cached into node module cache and a new connection pool is not created every time you require it.
This is intended usage, the pool is supposed to be destroyed when app exits or all the tests are done. If you have reasons to create/destroy connections for every operation ( like in serverless environment) you should not reuse the destroyed client, rather create a new instance every time.
Otherwise, it defeats the purpose of connection pooling.
Update about lambda/server-less environments:
Technically a function and its resources are to be released after the lambda function has run, this includes any connections it might have opened. This is necessary for truly stateless functions. Therefore it is advisable to close connection when function is done. However, a lot of functions opening/closing a lot of connections may eventually make the DB server run out of connections (see this discussion for example). One solution might be to use an intermediate pool like PgBouncer or PgPool that negotiates connections between DB server and Lambda functions.
The other way is for the platform provider (AWS) to add special pooling capabilities to lambda environment and let them share long-lived resources.
Destroying the connection after every query is like packing your guitar up every time you play a note. Just pull it out at the beginning of the performance, play all the songs and put it away at the end.
Likewise, destroy the connection when you're done with it for the rest of the application, not after each query like this. In a web server, this is probably never since you're going to kill it with a signal at some indeterminate point and an active connection is likely a necessity for the app until then.
For tests, you'll probably want to make use of the destroy function to avoid hanging. Similarly, in an (contrived?) application like you've shown, if you are experiencing a hang and the app gets stuck, destroy the connection one time when you're done with it.
Here's an illustrative example for Mocha, which was mentioned in a comment and seems like a pretty reasonable assumption that it (or something similar) is being used by folks who wind up in this thread. The pattern of setting up before all tests, tearing down after all tests, and doing per-test case setup and teardown is generic.
Relevant to your question, after(() => knex.destroy()); is the teardown call at the end of all tests. Without this, Mocha hangs. Note that we also shut down the http server per test so there are multiple candidates for hanging the test suite to look out for.
server.js:
const express = require("express");
const createServer = (knex, port=3000) => {
const app = express();
app.get("/users/:username", (request, response) => {
knex
.where("username", request.params.username)
.select()
.first()
.table("users")
.then(user => user ? response.json({data: user})
: response.sendStatus(404))
.catch(err => response.sendStatus(500))
});
const server = app.listen(port, () =>
console.log(`[server] listening on port ${port}`)
);
return {
app,
close: cb => server.close(() => {
console.log("[server] closed");
cb && cb();
})
};
};
module.exports = {createServer};
server.test.js:
const chai = require("chai");
const chaiHttp = require("chai-http");
const {createServer} = require("./server");
const {expect} = chai;
chai.use(chaiHttp);
chai.config.truncateThreshold = 0;
describe("server", function () {
this.timeout(3000);
let knex;
let server;
let app;
before(() => {
knex = require("knex")({
client: "pg",
connection: "postgresql://postgres#localhost",
});
});
beforeEach(done => {
server = createServer(knex);
app = server.app;
knex
.schema
.dropTableIfExists("users")
.then(() =>
knex.schema.createTable("users", table => {
table.increments();
table.string("username");
})
)
.then(() => knex("users").insert({
username: "foo"
}))
.then(() => done())
.catch(err => done(err));
});
afterEach(done => server.close(done));
after(() => knex.destroy());
it("should get user 'foo'", done => {
chai
.request(app)
.get("/users/foo")
.then(response => {
expect(response.status).to.equal(200);
expect(response).to.be.json;
expect(response.body).to.be.instanceOf(Object);
expect(response.body.data).to.be.instanceOf(Object);
expect(response.body.data.username).to.eq("foo");
done();
})
.catch(err => done(err));
});
});
Packages:
"knex": "0.21.6",
"express": "4.17.1",
"mocha": "8.0.1",
"pg": "8.3.0",
"node": "12.19.0"
You probably don't usually need to explicitly call knex.destroy() – this is implied by the documentation itself saying (emphasis mine):
If you ever need to explicitly teardown the connection pool, you may use knex.destroy([callback]).

How should I start my nodejs application automatically for tests

I have a nodejs restful style service which has no front end, it just accepts data and then does something with it.
I have unit tested most of the method level stuff I want to, however now I want to basically do some automated tests to prove it all works together. When I am using ASP.MVC and IIS its easy as the server is always on, so I just setup the scenario (insert dummy guff into DB) then make a HttpRequest and send it to the server and assert that I get back what I expect.
However there are a few challenges in nodejs as the applications need to be run via command line or some other mechanism, so given that I have an app.js which will start listening, is there some way for me to automatically start that going before I run my tests and then close it once my tests are finished?
I am currently using Yadda with Mocha for my testing so I can keep it written in a BDD style way, however I am hoping the starting of the web app is agnostic of the frameworks I am using.
Just expose some methods to start and stop your webserver. Your app.js file could be something like this:
var app = express()
var server = null
var port = 3000
// configure your app here...
exports.start = function(cb) {
server = http.createServer(app).listen(port, function () {
console.log('Express server listening on port ' + port)
cb && cb()
})
}
exports.close = function(cb) {
if (server) server.close(cb)
}
// when app.js is launched directly
if (module.id === require.main.id) {
exports.start()
}
And then in your tests you can do something like this (mocha based example):
var app = require('../app')
before(function(done) {
app.start(done)
})
after(function(done) {
app.close(done)
})
Have a look to supertest https://github.com/visionmedia/supertest
You can write test like
describe('GET /users', function(){
it('respond with json', function(done){
request(app)
.get('/user')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, done);
})
})
Using gimenete's answer, here's an example of a service (server) with async await and express:
service.js:
const app = require('express')()
const config = require('./config')
let runningService
async function start() {
return new Promise((resolve, reject) => {
runningService = app.listen(config.get('port'), config.get('hostname'), () => {
console.log(`API Gateway service running at http://${config.get('hostname')}:${config.get('port')}/`)
resolve()
})
})
}
async function close() {
return new Promise((resolve, reject) => {
if (runningService) {
runningService.close(() => {
})
resolve()
}
reject()
})
}
module.exports = {
start,
close
}
service.spec.js:
const service = require('../service')
beforeEach(async () => {
await service.start()
})
afterEach(async () => {
await service.close()
})

Resources