I'm trying to add unit testing on a brand new strapi application. The official documentation is still in progress. So, until the documentation is being ready is there a way to add jest unit testing to strapi application ? i followed the approach in the v3 documentation with no luck.
There are quite a few changes from Strapi V3 to Strapi V4 when it comes to initializing a Strapi application's APIs. The most important changes are how Strapi populates the KOA routes, and how to make requests to the http server.
To populate the KOA routes use
await instance.server.mount();
instead of
await instance.app
.use(instance.router.routes()) // populate KOA routes
.use(instance.router.allowedMethods()); // populate KOA methods
instance.server = http.createServer(instance.app.callback());
To call the http server use
strapi.server.httpServer
instead of
strapi.server
You also need to use the new database configuration schema when defining your test database. You can use the following as an initial setup for your tests.
The following is an updated (and WIP) guide based on Strapi V3 Unit Testing guide.
First run
yarn add --dev jest supertest sqlite3
or
npm install --save-dev jest supertest sqlite3
Then add the following to your ./package.json scripts:
"scripts": {
// ...strapi scripts
"test": "jest --forceExit --detectOpenHandles", //add
"watch": "yarn test --watch", // optional
}
Then add the following files:
./jest.config.js
module.exports = {
verbose: true,
testPathIgnorePatterns: [
"/node_modules/",
".tmp",
".cache"
],
modulePaths: [
"/node_modules/",
],
testEnvironment: "node",
};
./config/env/test/database.json
{
"connection": {
"client": "sqlite",
"connection": {
"filename": ".tmp/test.db"
},
"useNullAsDefault": true,
"pool": {
"min": 0,
"max": 1
}
}
}
./tests/helpers/strapi.js
const Strapi = require("#strapi/strapi");
const fs = require("fs");
let instance;
async function setupStrapi() {
if (!instance) {
await Strapi().load();
instance = strapi;
await instance.server.mount();
}
return instance;
}
async function cleanupStrapi() {
const dbSettings = strapi.config.get("database.connection");
const tmpDbFile = dbSettings.connection.filename
//close server to release the db-file
await strapi.server.httpServer.close();
//delete test database after all tests
if (dbSettings && tmpDbFile) {
if (fs.existsSync(tmpDbFile)) {
fs.unlinkSync(tmpDbFile);
}
}
// close the connection to the database
await strapi.db.connection.destroy();
}
module.exports = { setupStrapi, cleanupStrapi };
Note that you need to have the /hello endpoint in your project as specified in the strapi docs for the next tests to pass.
./tests/app.test.js
const { setupStrapi, cleanupStrapi } = require("./helpers/strapi");
jest.setTimeout(15000);
beforeAll(async () => {
await setupStrapi();
});
afterAll(async () => {
await cleanupStrapi();
});
it("strapi is defined", () => {
expect(strapi).toBeDefined();
});
require('./hello')
./tests/hello/index.js
const request = require('supertest');
it('should return hello world', async () => {
await request(strapi.server.httpServer)
.get('/api/hello')
.expect(200) // Expect response http code 200
});
I hope this helps anyone struggling with the same issues. I will update the answer as I progress.
Related
I have an project set up and running with Webpack 5.28.0 and webpack-dev-server 4.11.1
Its all working nicely but I would like to be able to have the dev server write some files back to my project root. These are debug/log files that I'd like to save as JSON.
I'd also like this to be automatic, I don't want to have to click anything or trigger the action manually.
So the ideal flow would be that I run npm start, my build kicks off in a browser, the page generates a load of log data and this is then written back to my project root. Either using some browser function or calling back to Node script in my build.
Is this possible with dev-server?
You could setup the dev-server middleware to add an API endpoint to accept data and write it to your filesystem
// webpack.config.js
const { writeFile } = require("node:fs/promises");
const bodyParser = require("body-parser");
module.exports = {
// ...
devServer: {
setupMiddlewares: (middlewares, devServer) => {
devServer.app?.post(
"/__log",
bodyParser.json(),
async (req, res, next) => {
try {
await writeFile(
"debug-log.json",
JSON.stringify(req.body, null, 2)
);
res.sendStatus(202);
} catch (err) {
next(err);
}
}
);
return middlewares;
},
},
};
Then your front-end app needs only to construct the payload and POST it to the dev-server
const debugData = { /* ... */ };
fetch("/__log", {
method: "POST",
body: JSON.stringify(debugData),
headers: { "content-type": "application/json" },
});
Using NextJS, I have a Db.js file:
import { create } from 'ipfs'
let dbs = null
export async function createDbs() {
if(!dbs){
console.log('starting up IPFS node and databases...')
const node = await create({
preload: { enabled: false },
repo: './ipfs',
EXPERIMENTAL: { pubsub: true },
config: {
Bootstrap: [],
Addresses: { Swarm: [] }
}
})
dbs = { node }
console.log('Startup successful.')
}
return Promise.resolve(dbs)
}
So that the files in my api folder use it in their handlers. Example:
import { createDbs } from '../../lib/Db.js'
export default async function handler(req, res) {
return new Promise(async resolve => {
try {
const dbs = await createDbs()
...
} catch(e) {
res.status(500).json({ error: 'Unexpected error : ' + e })
} finally {
resolve()
}
})
}
Works fine, until I change a source file in the api folder (happens quite a lot during development), and NextJs auto-recompiles the api server.
Then the reference to the Ipfs node is GC'ed, and the node initialization code in 'Db.js' runs again when the Api handler is called.
Problem: when the node was first created, it locked the files using some standard Ipfs mechanism, and as the node was not gracefully stopped before NextJs auto-recompiled the Api, I get an exception when trying to create the Ipfs node l {"code":"ERR_LOCK_EXISTS","level":"error","name":"LockExistsError"}.
I can't find a place where to stop the node gracefully.
For example:
// shutdown calls 'node.stop()'
process.on('exit', () => shutdown(node, 'exit'))
does work when I exit the nextJs dev process, but not when it recompiles.
Where could I call node.stop() so that the lock is released when NextJs recompiles the Api ?
I have set up an Express API with a couple of tests inside a Windows OS, the first test ran successfully inside of wrongo/test/app_test.js:
const assert = require('assert');
const request = require('supertest');
const app = require('../app');
describe('the express app', () => {
it('handles a GET request to /api', done => {
request(app).get('/api').end((err, response) => {
assert(response.body.how === 'dee');
done();
});
});
});
I later developed another test at wrongo/test/controllers/users_controller_test.js:
const assert = require('assert');
const request = require('supertest');
const app = require('../../app');
describe('Users controller', () => {
it('Post to /api/users creates a new user', done => {
request(app).post('/api/users').send({ email: 'test#test.com' }).end(() => {
done();
});
});
});
It does not run this second test at all and I don't get my console log from users_controller.js file:
module.exports = {
greeting(req, res) {
res.send({ how: 'dee' });
},
create(req, res) {
console.log(req.body);
res.send({ how: 'dee' });
}
};
I don't believe I have any syntax errors, not sure why Mocha will not just run this second test.
It seems Mocha cannot find a test if nest it within a folder within a test/ folder, but when I moved the test file to the main test/ folder, Mocha was able to run the test. The reason being that if you look at your "scripts" object inside of package.json there is a --recursive missing within:
"test": "nodemon --exec mocha -R min"
But I had to write the test script that way because Windows 10 kept giving me an error when I had --recursive.
mocha.js: 3.4.2
chai.js: 4.1.0
node.js: 6.10.3
npm: 5.3.0
I've set up a simple test harness for a RESTful service deployed under node.js. The test harness relies on mocha.js / chai.js. Each test, which calls an endpoint through https, works. As well, before() and after()
functions are called. HOWEVER: it appears impossible to invoke any of the RESTful endpoints within the after() function. Why?
Since after() comes at the end of the tests, I thought the asynchronous invocation of the RESTful endpoint simply had no time to return before the test process exited. So, I encoded a busy-wait loop to give the response some time. No go: the endpoint invocation simply does not work in after().
Here is my package.json:
{
"name": "so-mcv-example",
"version": "1.0.0",
"description": "Minimum, Complete, and Verifiable Example",
"main": "index.js",
"dependencies": {
"restify": "^4.3.0"
},
"engines": {
"node": "6.10.3",
"npm": "5.3.0"
},
"devDependencies": {
"chai": "^4.1.0",
"chai-http": "^3.0.0",
"mocha": "^3.4.2"
},
"scripts": {
"test": "mocha"
}
}
My index.js looks like this:
const restify = require('restify'),
fs = require('fs');
const server = restify.createServer({
certificate: fs.readFileSync('security/server.crt'),
key: fs.readFileSync('security/server.key'),
name: 'so-mcv-example'
});
server.listen(443, function() {
console.log('%s listening at %s', server.name, server.url);
});
server.pre(restify.pre.userAgentConnection());
server.get('/status', function(req, res, next) {
res.send({ status: 'so-mcv-example is running ok.' });
return next();
});
Under test directory, I have mocha.opts:
--require ./test/test.bootstrap
and test.bootstrap.js:
process.env.NODE_ENV = 'test';
// CAUTION: never do this in production! For testing purposes, we ignore
// faults in the standard SSL handshaking.
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
The latter settings negotiate the self-signed SSL certificate employed in index.js. In test/status-test.js, I have:
const chai = require('chai');
const expect = chai.expect;
const should = chai.should();
const server = 'https://localhost';
chai.use(require('chai-http'));
before(function() {
if (process.env.NODE_ENV != "test") {
this.skip();
}
// XXX Wipe the database.
console.log("Wipe the database BEFORE tests.");
});
describe('GET /status', function() {
it('passes, as expected', function(done) {
chai.request(server)
.get('/status')
.end(function(err, res) {
expect(err).to.be.null;
expect(res).to.have.status(200);
done();
});
});
});
after(function() {
if (process.env.NODE_ENV != "test") {
this.skip();
}
chai.request(server)
.get('/status')
.end(function(err, res) {
expect(err).to.be.null;
expect(res).to.have.status(200);
console.log("2nd call to /status returned.");
done();
});
});
Admittedly, using chai to invoke an endpoint in the after() function is contrived. That's not the point. The point is that the console.log() directive simply is never executed. That is, when I run npm test, the result is:
> so-mcv-example#1.0.0 test /Users/charlie/workspace/so-mcv-mocha
> mocha
Wipe the database BEFORE tests.
GET /status
✓ passes, as expected (44ms)
1 passing (56ms)
See my answer below for an explanation about why console.log() is not executed.
To resolve the issue, I added a the option --no-exit to mocha.opts:
--require ./test/test.bootstrap
--no-exit
Then when I run npm test, the desired result is output:
> so-mcv-example#1.0.0 test /Users/charlie/workspace/so-mcv-mocha
> mocha
Wipe the database BEFORE tests.
GET /status
✓ passes, as expected (43ms)
1 passing (55ms)
2nd call to /status returned.
I believe I have found a solution. In after(), if you asynchronously invoke some endpoint on a service external to your testing environment, then that asynchronous call is queued and not immediately executed. This is the nature of calling asynchronous functions in node.js. Well, if there is no logic after the after() call, then mocha immediately exits by invoking process.exit()! The end-result is that your asynchronous call to an external endpoint remains queued and never invoked.
Resolution: inside mocha.opts under your test directory, just install the option --no-exit.
I have got a local development environment working for Node/PostgreSQL/Knex in the sense that I can post to a development database on my machine using an API. I am now trying to create tests for this functionality but am getting an error.
Here's my config:
//knexfile.js
module.exports = {
development: {
client: 'pg',
connection: {
host: '127.0.0.1',
user: 'dbUser',
password: 'dbpword',
port: 5432,
database: 'example-name'
},
migrations: {
directory: __dirname + '/db/migrations'
},
seeds: {
directory: __dirname + '/db/seeds/development'
}
},
}
//db.js
const config = require('../knexfile.js');
const env = process.env.NODE_ENV || 'development';
const knex = require("knex")(config[env]);
module.exports = knex;
knex.migrate.latest([config]);
And then my tests:
import chai from 'chai';
import { expect } from 'chai';
import chaiHttp from 'chai-http';
import knex from '../../db/db';
import app from '../../server';
chai.use(chaiHttp);
describe('Tokens API', () => {
beforeEach((done) => {
knex.migrate.rollback()
.then(() => {
knex.migrate.latest()
.then(() => {
return knex.seed.run()
.then(() => {
done();
});
});
});
});
afterEach((done) => {
knex.migrate.rollback()
.then(() => {
done();
});
});
describe('POST /users', () => {
it('posts a list of users to the database with all mandatory fields', (done) => {
chai.request(app)
.post('/users')
.send({
"users": [
"steve",
"whoever",
"matt",
"another"]})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
done();
});
});
});
});
When I run this I get the following error twice - I think for the knex calls in the beforeEach block:
Knex:warning - Can't take lock to run migrations: Migration table is already locked
Knex:warning - If you are sure migrations are not running you can release the lock manually by deleting all the rows from migrations lock table: knex_migrations_lock
Unhandled rejection MigrationLocked: Migration table is already locked
I have tried numerous things - including clearing out the knex_migrations_lock table. The only support I can find online is this thread, which suggests clearing out the lock table using DELETE FROM Migrations_lock where id <> 0;, however my lock table only has a is_locked column with zero values.
Any idea what's going on?
EDIT: I've just realised if you edit out all the knex calls, the test actually passes. Could this be because I am effectively calling knex twice - once from db.js and once indirectly through server.js? If that's the case, how do I avoid doing this - because surely I need to call the knex setup for Node to run it?
For anyone stumbling across this, the problem was actually coming from db.js, specifically the last line:
const config = require('../knexfile.js');
const env = process.env.NODE_ENV || 'development';
const knex = require("knex")(config[env]);
module.exports = knex;
knex.migrate.latest([config]);
Of course this is asynchronous, and the tests were importing this file before trying to run their own knex functions, causing the lock. I got round this by adding a clause to block this running while testing:
if(process.env.NODE_ENV != 'test') {
knex.migrate.latest([config])
}
You can then create a test environment by adding process.env.NODE_ENV='test' to each spec file, or by installing the npm env test module.
Had the exact same issue, ended up being due to my API calling the database when being initialized by the supertest library.
For example, my test file:
var db = require('../db');
var api = require('../api');
var supertest = require('supertest')(api);
describe('Session routes', () => {
beforeEach((done) => {
db.migrate.rollback()
.then(() => {
db.migrate.latest()
.then(() => {
return db.seed.run()
.then(() => {
done();
});
});
});
});
afterEach((done) => {
db.migrate.rollback()
.then(() => {
done();
});
});
it('GET /session should error with no token', (done) => {
supertest
.get('/session')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(401, {
error: 'Unauthorized'
}, done);
});
});
On line 2, it requires my api - when my api is required the following code gets run straight away to initialize my api's external services API:
var db = require('./other-postgres-library');
var servicesApi = require('./services/api')(db);
This would connect to a bunch of external services and write the results to the database.
So when tests were running my APP was throwing errors because it was trying to write to a database which was being rolled back/migrated/seeded etc.
I changed my inner services API to initialize lazily and all my problems have disappeared.
In your case, I would hazard to guess when your tests runs this line
import app from '../../server'; your app/server code is trying to running some queries against the database.