I have one node application which have a file called server.js where I run
node server.js env=feature port=8081 to start the node server.
I installed jest and supertest. After writing one async get call in sample.test.js file I am hitting npm test, But the result is not as expected. Because to start server.js I will pass two parameters called env and port as mentioned above. My question is, How to use it in Jest. My server importing code is present below
const app = require('../server.js')
const request = supertest(app)
describe('Sample Test', () => {
const response = await request.get("/rest/api/v1/mytools/getAvailableTools?provisioning=all");
console.log('James bob ' + JSON.stringify(response))
expect(response.statusCode).toEqual(401);
done()
})
There are a couple of ways how you can approach this.
First, it's a common practice to pass such values using environment variables, you would have to update code processing those values but at the end, you can do the following:
ENV=feature PORT=8081 node server.js
ENV=feature PORT=8081 npm test
The second option would be to decouple command line parameters from actual server and test them separately, e.g.
export const createServer(env, port) {
// Server code
}
// Command line processing code
createServer(cliEnv, cliPort)
So in tests you can write:
const createServer = require('../server.js')
const request = supertest(createServer('feature', 8081))
describe('Sample Test', () => {
const response = await request.get("/rest/api/v1/mytools/getAvailableTools?provisioning=all");
console.log('James bob ' + JSON.stringify(response))
expect(response.statusCode).toEqual(401);
done()
})
Related
Hello stackoverflow community,
The task
I'm running Jasmine tests programmatically using jasmine.execute(). My task is to run onPrepare function, where I do some setup work like setting up reporters etc, and I need that function to be synchronous (has to be finished before running specs)
Approach #1
The first approach I tried was to just declare an async onPrepare function, which also includes the code for specifying reporters, and then do
await onPrepare();
await jasmine.execute();
Problem
In the result I get jasmine.getEnv() is not a function. I assume because getEnv() becomes available as .execute() is ran. So I understand this won't work
Approach #2
The next thing I tried was to create a helper file with my sync code, specify it in the config and run jasmine.execute();.
So, if simplified, I have
// conf.js
(async () => {
let Jasmine = require('jasmine');
let jasmine = new Jasmine();
let variables = require("./variables.json");
let {spawn} = require("child_process");
let childProcess = spawn(variables.webdriver);
console.log(`Webdriver started, pid: ${childProcess.pid}`);
jasmine.exitOnCompletion = false;
jasmine.loadConfig({
'spec_files': ['specs/*.spec.js'],
'helpers': ['on-jasmine-prepare.js'],
'stopSpecOnExpectationFailure': false,
'random': false,
})
const result = await jasmine.execute();
console.log('Test status:', result.overallStatus);
console.log('Closing library and webdriver process');
await library.close();
await childProcess.kill();
console.log('webdriver killed:', childProcess.killed);
})()
// on-jasmine-prepare.js
(async () => {
const {SpecReporter} = require("jasmine-spec-reporter");
const library = require("./library/library");
const variables = require("./variables.json");
const errorHandler = require("./modules/on-error-handler");
jasmine.getEnv().clearReporters();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 3 * 60 * 1000;
jasmine.getEnv().addReporter(new SpecReporter({}))
global.library = new library.Library(variables.IP);
console.log('library instantiated')
await library.deleteSessions();
console.log('sessions deleted')
await library.launch(library.home);
console.log('home page launched')
jasmine.getEnv().addReporter(
errorHandler(library)
)
console.log('debugger reporter added' )
})();
Problem
The problem that I noticed that the helper file is executed asynchronously with the specs and I get a spec error before the helper function finishes (basically because of race condition). Below the output example, where you can see some console.log from onPrepare was ran after spec started
Webdriver started, pid: 40745
library instantiated
Jasmine started
sessions deleted
Example
✗ App is loaded (7 secs)
- Error: Session already exist
home page launched
debugger reporter added
The question
How do I run onPrepare function synchronously, before specs? Preferably natively (using only jasmine capabilities). Otherwise maybe using third party packages. I know it's possible because #protractor had this feature, however I couldn't back-engineer it
MacOS
node v16.13.2
jasmine v4.1.0
Thank you
I'm using node with TypeScript on my back end and Jest and Supertest as my test framework on my back end.
When I'm trying to test I have the result pass but I get an error at the end. Here's the result:
PASS test/controllers/user.controller.test.ts
Get all users
✓ should return status code 200 (25ms)
console.log node_modules/#overnightjs/logger/lib/Logger.js:173
[2019-12-05T04:54:26.811Z]: Setting up database ...
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 0 total
Time: 3.284s
Ran all test suites.
server/test/controllers/user.controller.test.ts:32
throw err;
^
Error: connect ECONNREFUSED 127.0.0.1:80
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1104:14)
npm ERR! Test failed. See above for more details.
Here's my test code:
import request from "supertest";
import { AppServer } from '../../config/server';
const server = new AppServer();
describe('Get all users', () => {
it('should return status code 200', async () => {
server.startDB();
const appInstance = server.appInstance;
const req = request(appInstance);
req.get('api/v1/users/')
.expect(200)
.end((err, res) => {
if (err) throw err;
})
})
})
Here's my server setup. I'm using overnightjs on my back end.
I created a getter to get the Express instance. This is coming from overnight.js.
// this should be the very top, should be called before the controllers
require('dotenv').config();
import 'reflect-metadata';
import { Server } from '#overnightjs/core';
import { Logger } from '#overnightjs/logger';
import { createConnection } from 'typeorm';
import helmet from 'helmet';
import * as bodyParser from 'body-parser';
import * as controllers from '../src/controllers/controller_imports';
export class AppServer extends Server {
constructor() {
super(process.env.NODE_ENV === 'development');
this.app.use(helmet());
this.app.use(bodyParser.json());
this.app.use(bodyParser.urlencoded({ extended: true }));
this.setupControllers();
}
get appInstance(): any {
return this.app;
}
private setupControllers(): void {
const controllerInstances = [];
// eslint-disable-next-line
for (const name of Object.keys(controllers)) {
const Controller = (controllers as any)[name];
if (typeof Controller === 'function') {
controllerInstances.push(new Controller());
}
}
/* You can add option router as second argument */
super.addControllers(controllerInstances);
}
private startServer(portNum?: number): void {
const port = portNum || 8000;
this.app.listen(port, () => {
Logger.Info(`Server Running on port: ${port}`);
});
}
/**
* start Database first then the server
*/
public async startDB(): Promise<any> {
Logger.Info('Setting up database ...');
try {
await createConnection();
this.startServer();
Logger.Info('Database connected');
} catch (error) {
Logger.Warn(error);
return Promise.reject('Server Failed, Restart again...');
}
}
}
I read this question - that's why I called the method startDB.
So I figured out and the solution is quite easy. I can't explain why though.
This req.get('api/v1/users/') should be /api/v1/users - you need a leading /.
For Frontend...
If you are making use of axios and come across this error, go to the testSetup.js file and add this line
axios.defaults.baseURL = "https://yourbaseurl.com/"
This worked for me. So, typically, this is a baseURL issue.
I had this error in my React frontend app tests.
I was using React testing library's findBy* function in my assert:
expect(await screen.findByText('first')).toBeInTheDocument();
expect(await screen.findByText('second')).toBeInTheDocument();
expect(await screen.findByText('third')).toBeInTheDocument();
After I changed it to:
await waitFor(async () => {
expect(await screen.findByText('first')).toBeInTheDocument();
expect(await screen.findByText('second')).toBeInTheDocument();
expect(await screen.findByText('third')).toBeInTheDocument();
});
the error is gone.
I don't know exactly why, but maybe it will help someone
UPDATE: I was mocking fetch incorrectly, so my test called real API and caused that error
I put this line in my setupTests file:
global.fetch = jest.fn()
It mocks fetch for all tests globally. Then, you can mock specific responses right in your tests:
jest.mocked(global.fetch).mockResolvedValue(...)
// OR
jest.spyOn(global, 'fetch').mockResolvedValue(...)
Slightly different issue, but same error message...
I was having this error when using node-fetch when trying to connect to my own localhost (http://localhost:4000/graphql), and after trying what felt like everything under the sun, my most reliable solution was:
using this script in package.json: "test": "NODE_ENV=test jest --watch"
If the terminal shows connection error I just go to the terminal with Jest watching and press a to rerun all tests and they pass without any issue.
¯\_(ツ)_/¯
Success rate continued to improve by renaming the testing folder to __tests__ and moving my index.js to src/index.js.
Very strange, but I am too exhausted to look at the Jest internals to figure out why.
The rules for supertest are the same as the rules for express. OvernightJS does not require any leading or ending "/" though.
For anyone landing on this, but not having issues with trailing slashes:
jest can also return a ECONNREFUSED when your express app takes some time (even just a second) to restart/init. If you are using nodemon like me, you can disable restarts for test files like --ignore *.test.ts.
This error also occurs if you have not set up a server to catch the request at all (depending on your implementation code and your test, the test may still pass).
I didn't get to the bottom of this error - it wasn't related to the (accepted) leading slash answer.
However, my "fix" was to move the mocks up into the suite definition - into beforeAll and afterAll for cleanup between tests).
Before, I was mocking (global.fetch) in each test, and it was the last test in the suite to use the mock that would cause the error.
In my case, the issue was related to package react-inlinesvg. Package makes a fetch request to get the svg file and since server is not running, it gets redirected to default 127.0.0.1:80.
I mocked react-inlinesvg globally to output props including svg filename to assert in testing.
jest.mock('react-inlinesvg', () => (props) => (
<svg data-testid="mocked-svg">{JSON.stringify(props)}</svg>
));
I wanted to run some Spectron e2e tests I wrote some weeks ago, but to my surprise, suddenly they all stopped working for one and the same reason.
According to the error message, I'm dealing with rejected Promises, but I can't figure out where the problem is coming from. Calling done at the end of my testcase raises the exact same error.
I'm running the following command to launch my test suit: mocha test/e2e
Mocha then executes this index.js before running my tests in ordner to support ES6+ features
'use strict'
//index.js
// Set BABEL_ENV to use proper env config
process.env.BABEL_ENV = 'test'
// Enable use of ES6+ on required files
require('babel-register')({
ignore: /node_modules/
})
// Attach Chai APIs to global scope
const { expect, should, assert } = require('chai')
global.expect = expect
global.should = should
global.assert = assert
// Require all JS files in `./specs` for Mocha to consume
require('require-dir')('./specs')
After that its trying to run this small Login.spec.js which returns the error mentioned above
import utils from '../utils'
import {Application} from "spectron";
import electron from "electron";
describe('🔑 Login', function () {
this.timeout(11000);
it('login form exists', async function (done) {
this.app = new Application({
path: electron,
env: {"SPECTRON_RUNNING":true},
args: ['dist/electron/main.js'],
startTimeout: 10000,
waitTimeout: 10000
})
await this.app.start()
await this.app.client.windowByIndex(1);
done();
})
})
I created a simple Webapp using express.js and want to test it with jasmine-node. Works fine so far but my problem is that I have to start the server manually every time before I can run my tests.
Could you help me on how to write a spec-helper that runs the server (with another port then my development one) just for the tests and then kills it afterwards?
This is what I do:
I have a server.js file inside the root of my node project that sets up the node application server (with express) and exports 2 methods:
exports.start = function( config, readyCallback ) {
if(!this.server) {
this.server = app.listen( config.port, function() {
console.log('Server running on port %d in %s mode', config.port, app.settings.env);
// callback to call when the server is ready
if(readyCallback) {
readyCallback();
}
});
}
};
exports.close = function() {
this.server.close();
};
The app.js file will be simple at this point:
var server = require('./server');
server.start( { port: 8000 } );
So the files/folder basic structure would be the following:
src
app.js
server.js
Having this separation will allow you to run the server normally:
node src/app.js
..and/or require it from a custom node script, which could be a node script (or a jake/grunt/whatever task) that executes your tests like this:
/** my-test-task.js */
// util that spawns a child process
var spawn = require('child_process').spawn;
// reference to our node application server
var server = require('./path/to/server.js');
// starts the server
server.start( { port: 8000 }, function() {
// on server ready launch the jasmine-node process with your test file
var jasmineNode = spawn('jasmine-node', [ '.path/to/test/file.js' ]);
// logs process stdout/stderr to the console
function logToConsole(data) {
console.log(String(data));
}
jasmineNode.stdout.on('data', logToConsole);
jasmineNode.stderr.on('data', logToConsole);
jasmineNode.on('exit', function(exitCode) {
// when jasmine-node is done, shuts down the application server
server.close();
}
});
I use Mocha - which is damn similar - but the same principle should apply: you could try requireing your app.js file in a 'beforeEach' hook inside the main describe. That should fire it up for you.
Assuming you use some code that invokes app.listen() in server.js, don't require the file on each run but only once and then have two functions like
startServer = -> app.listen(3000)
stopServer = -> app.close()
Then you can use these in beforeEach and afterEach
If you want then to go one step further in automating your testing while you develop, you can go to your terminal line and execute
jasmine-node . --autotest
Jasmine then will stay listening to every file inside your project and whenever you make changes to one it will tell if that piece of your code breaks any of your tests ;)
I'm starting a nodejs project and would like to do BDD with Mocha and Zombiejs. Unfortunately I'm new to just about every buzzword in that sentence. I can get Mocha and Zombiejs running tests fine, but I can't seem to integrate the two - is it possible to use Mocha to run Zombiejs tests, and if so, how would that look?
Just looking for "hello world" to get me started, but a tutorial/example would be even better.
Thanks!
Assuming you already have installed mocha, zombie and expect.js according to instructions, this should work for you:
// Put below in a file in your *test* folder, ie: test/sampletest.js:
var expect = require('expect.js'),
Browser = require('zombie'),
browser = new Browser();
describe('Loads pages', function(){
it('Google.com', function(done){
browser.visit("http://www.google.com", function () {
expect(browser.text("title")).to.equal('Google');
done();
});
});
});
Then you should be able to run the mocha command from your root application folder:
# mocha -R spec
Loads pages
✓ Google.com (873ms)
✔ 1 tests complete (876ms)
Note: If your tests keep failing due to timeouts, it helps to increase mocha's timeout setting a bit by using the -t argument. Check out mocha's documentation for complete details.
I wrote a lengthy reply to this question explaining important gotchas about asynchronous tests, good practices ('before()', 'after()', TDD, ...), and illustrated by a real world example.
http://redotheweb.com/2013/01/15/functional-testing-for-nodejs-using-mocha-and-zombie-js.html
if you want to use cucumber-js for your acceptance tests and mocha for your "unit" tests for a page, you can use cuked-zombie (sorry for the advertising).
Install it like described in the readme on github, but place your world config in a file called world-config.js
`/* globals __dirname */
var os = require('os');
var path = require('path');
module.exports = {
cli: null,
domain: 'addorange-macbook': 'my-testing-domain.com',
debug: false
};
Then use mocha with zombie in your unit tests like this:
var chai = require('chai'), expect = chai.expect;
var cukedZombie = require('cuked-zombie');
describe('Apopintments', function() {
describe('ArrangeFormModel', function() {
before(function(done) { // execute once
var that = this;
cukedZombie.infectWorld(this, require('../world-config'));
this.world = new this.World(done);
// this inherits the whole world api to your test
_.merge(this, this.world);
});
describe("display", function() {
before(function(done) { // executed once before all tests are run in the discribe display block
var test = this;
this.browser.authenticate().basic('maxmustermann', 'Ux394Ki');
this.visitPage('/someurl', function() {
test.helper = function() {
};
done();
});
});
it("something on the /someurl page is returned", function() {
expect(this.browser.html()).not.to.be.empty;
});