Is it possible to collect coverage on code loaded with vm.runInContext with Jest? - jestjs

I'm working on a legacy JS project which is not using any require/import. When deploying, the files are just concatenated and the result is sent to a server.
In order to write tests with jest, I created a custom environment to load all the JS files in the global context so that I can call the functions in the test file.
For example:
src/index.js
function sum(x, y) {
return x + y;
}
src/index.spec.js
it('should sum two numbers', () => {
expect(sum(1, 2)).toBe(3);
});
jest.config.js
module.exports = {
clearMocks: true,
collectCoverage: true,
collectCoverageFrom: [
"src/**/*.js",
],
coverageDirectory: "coverage",
coverageProvider: "v8",
testEnvironment: "./jest.env.js",
};
jest.env.js
const NodeEnvironment = require('jest-environment-node').TestEnvironment;
const fs = require('fs');
const vm = require("vm");
const path = require("path");
class CustomEnv extends NodeEnvironment {
constructor(config, context) {
super(config, context);
this.loadContext();
}
loadContext() {
const js = fs.readFileSync('./src/index.js', 'utf8');
const context = vm.createContext(this.global);
vm.runInContext(js, context, {
filename: path.resolve('./src/index.js'),
displayErrors: true,
});
Object.assign(this.global, context);
}
}
module.exports = CustomEnv;
When I run npx jest, the test is executed but the coverage is empty...
Any idea on how to fix the coverage?
I've created a minimal reproducible repo here: https://github.com/GP4cK/jest-coverage-run-in-context/tree/main. You can just clone it, run npm i and npm t.
Note: I'm happy to change v8 to babel or load the context differently if it makes it easier.

Related

How to deep mock Prisma Client in JavaScript (not TypeScript)?

I'm want to unit test Next.js API route with Prisma in JavaScript. Unfortunately, the Prisma's unit testing guide is written for Typescript.
I have jest.config.js which will setup the mock in jest.setup.js
const nextJest = require("next/jest");
const createJestConfig = nextJest({
dir: "./",
});
const config = {
setupFilesAfterEnv: ["<rootDir>/jest.setup.js"],
moduleDirectories: ["node_modules", "<rootDir>/"],
moduleNameMapper: {
"#/(.*)$": "<rootDir>/$1",
},
};
module.exports = createJestConfig(config);
and this is how I configured the mock in jest.setup.js
import prisma from "#/utils/client";
jest.mock("#/utils/client", () => ({
__esModule: true,
default: {
user: {
findMany: jest.fn(),
},
// ... and each and everyone of the entities
},
}));
export const prismaMock = prisma;
and the following test case passed
describe("Calculator", () => {
it("renders a calculator", async () => {
await prismaMock.user.findMany.mockResolvedValue(["abc]);
const { req, res } = createMocks();
await handler(req, res);
expect(res._getStatusCode()).toBe(200);
expect(res._getData()).toBe('["abc"]');
});
});
With this approach, I have to mock each and everyone of the models and function in jest.setup.js. Is there a way to mock all the models and functions automatically? Is there a similar JavaScript library which provides mockDeep from jest-mock-extended?

How to set env.development and env.production in Preact app

On react app, you can create .env.production and .env.development and enter different key and values like this.
REACT_APP_API_URL= "xyz"
to pick environment variables automatically based on commands used --> npm start or npm run build.
What is the equivalent process in preact?
It is my solution
env.js in the root of project:
import fs from 'fs';
import dotenv from 'dotenv';
function getAppEnvironment() {
const prefix = "PREACT";
return Object.keys(process.env)
.filter((key) => new RegExp(`^${prefix}_`, 'i').test(key))
.reduce((env, key) => {
env[key] = process.env[key];
return env;
}, {});
}
function resolveFile(file) {
const path = fs.realpathSync(process.cwd());
return `${path}/${file}`;
}
function getEnvFiles(production) {
const key = production ? 'production' : 'development';
return [
resolveFile(".env"),
resolveFile(".env.local"),
resolveFile(`.env.${key}`),
resolveFile(`.env.${key}.local`),
].filter(Boolean);
}
export function getEnvironment(production) {
const dotenvFiles = getEnvFiles(production);
dotenvFiles.forEach((dotenvFile) => {
if (fs.existsSync(dotenvFile)) {
dotenv.config({
path: dotenvFile,
override: true
})
}
});
return getAppEnvironment();
}
export default getEnvironment;
then create or modify your preact.config.js:
import getEnvironment from './env';
export default {
plugins: [],
webpack(config, env, helpers) {
config.plugins.push(
new helpers.webpack.DefinePlugin({
'process.env': JSON.stringify(getEnvironment(env.production))
}),
);
},
};

RequireJS + Mocha + JSDom + Node -> Shim config not supported in Node

I'm trying to setup a Mocha testing framework using JSDom with RequireJS. Because I'm running the test on node instead of using a browser (since I'm using JSDom), all the non AMD modules doesn't seem to be imported and is throwing Shim config not supported in Node. Does anyone know how I can export those modules to AMD or what the right approach is? (aka what I'm doing wrong)
Example of my set-up
Component.js
define(["jquery", "non_AMD_Module", ... ], function($, NonAMDModule, ...) {
let component = {
...
foo = () => {
NonAMDModule.bar();
};
};
return component;
});
Component.test.js
const requirejs = require('requirejs');
const { JSDOM } = require('jsdom');
requirejs.config({
baseUrl: "dist/app",
paths: {
jquery: "lib/jquery",
component: "path_to_component",
non_AMD_Module: "path_to_module"
},
shim: {
non_AMD_Module: { exports: "non_AMD_Module" } // This doesn't work
}
});
const { window } = new JSDOM("<html></html>");
global.window = window;
global.document = window.document;
global.$ = requirejs('jquery');
const Component = requireJS('component');
describe('test', () => {
it('is a simple test', () => {
const testComponent = new Component();
testComponent.foo();
}
});
When I run the test suite, I get:
Mocha Exploded!
TypeError: Cannot read property 'bar' of undefined
running r.js -convert "path_to_module" did not work for this module
Looking at the source code for jQuery, I found that there's this boiler-plate coded that exports it to AMD.
This can be added at the bottom of the non-AMD-module in order to export it to an AMD module accessible by RequireJS
if ( typeof define === "function" && define.amd ) {
define([], function {
return non_AMD_Module;
});
}
Other Resources:
Shim a module in Require.js that uses module.exports possible?
https://github.com/requirejs/requirejs/wiki/Updating-existing-libraries#anon

Different builds based on targeting client vs server code

I currently have 2 separate webpack builds for server rendered vs client rendered code. Is there an easy way to change the build output based on server/client build?
For example something like this:
// Have some code like this
if(is_client){
console.log('x.y.z')
} else {
server.log('x.y.z')
}
// Webpack outputs:
// replaced code in client.js
console.log('x.y.z')
// replaced code in server.js
server.log('x.y.z')
Have you tried anything like this?
// webpack.config.js
module.exports = () => ['web', 'node'].map(target => {
const config = {
target,
context: path.resolve('__dirname', 'src'),
entry: {
[target]: ['./application.js'],
},
output: {
path: path.resolve(__dirname, 'dist', target),
filename: '[name].js'
},
modules: { rules: ... },
plugins: [
new webpack.DefinePlugin({
IS_NODE: JSON.stringify(target === 'node'),
IS_WEB: JSON.stringify(target === 'web'),
}),
],
};
return config;
});
// later in your code
import logger from 'logger';
if (IS_NODE) {
logger.log('this is node js');
}
if (IS_WEB) {
console.log('this is web');
}
how the compilation works?
// client.bundle.js
import logger from 'logger';
// DefinePlugin creates a constant expression which causes the code below to be unreachable
if (false) {
logger.log('this is node js');
}
if (true) {
console.log('this is web');
}
Finally you will produce your build in production mode, so webpack will include a plugin called UglifyJS, this has a feature called dead code removal (aka tree shaking), so it will delete any unused/unreachable code.
and the final result will look like:
// node.bundle.js
import logger from 'logger';
console.log('this is node js');
//web.bundle.js
console.log('this is node js');

Jest Testing with require modules: ejs-loader

I am playing with the idea of having large static html bundles just loaded into a react component vice typing them all out in jsx. I am currently just experimenting with ejs-loader and html-react-parser to evaluate the feasibility of this. Everything actually renders fine but I cannot get any tests to work with jest for this.
I receive: Cannot find module ejs-loader!./AboutPage.view.ejs from AboutPage.js errors and I am unsure of what to do.
I am currently just working off of react-slingshot as my base for experimenting with this.
The repo for the project is here
The component itself is simple:
import React from 'react';
import Parser from 'html-react-parser';
import '../styles/about-page.css';
const view = require('ejs-loader!./AboutPage.view.ejs')();
// Since this component is simple and static, there's no parent container for it.
const AboutPage = () => {
return (
<div>
{Parser(view)}
</div>
);
};
export default AboutPage;
And the test is:
import React from 'react';
import {shallow} from 'enzyme';
import AboutPage from './AboutPage';
describe('<AboutPage />', () => {
it('should have a header called \'About\'', () => {
const wrapper = shallow(<AboutPage />);
const actual = component.find('h2').text();
const expected = 'About';
expect(actual).toEqual(expected);
});
});
I have read through the docs and similar questions like this. I attempted to use a custom transformer, but I may be misunderstanding something as it doesn't appear to be even called.
Package.json
"jest": {
"moduleNameMapper": {
"\\.(css|scss)$": "identity-obj-proxy",
"^.+\\.(gif|ttf|eot|svg|woff|woff2|ico)$": "<rootDir>/tools/fileMock.js"
},
"transform": {
"^.+\\.js$": "babel-jest",
"\\.(ejs|ejx)$": "<rootDir>/tools/ejx-loader/jest.transformer.js"
}
},
and the transformer itself:
module.exports = {
process(src, filename, config, options){
console.log('????');
return 'module.exports = ' + require(`ejs-loader!./${filename}`);
//return require(`ejs-loader!./${filename}`);
}
};
Can you try changing module name mapper to -
{
"\\.(css|scss)$": "identity-obj-proxy",
"^.+\\.(gif|ttf|eot|svg|woff|woff2|ico)$": "<rootDir>/tools/fileMock.js"
"ejs-loader!(.*)": "$1",
}
This should at least invoke your custom transformer.
Also the custom transformer should be -
const _ = require('lodash');
module.exports = {
process(src, filename, config, options){
console.log('????');
return 'module.exports = ' + _.template(src);
}
};
It doesn't look like you've specified .ejs as a moduleFileExtension.
"jest": {
...
"moduleFileExtensions": ["js", "jsx", "ejs", "ejx"],
...
}
Also, ejs-loader will export the function using cjs syntax for you, so you can do the following in your transformer:
const loader = require('ejs-loader');
module.exports = {process: loader};
Work for me:
"jest": {
"moduleNameMapper": {
'\\.(ejs|ejx)$': '<rootDir>/jest-ejs.transformer.js'
},
moduleFileExtensions: ['js', 'json', 'jsx', 'ejs']
},
In jest-ejs.transformer.js
const loader = require('ejs-loader');
module.exports = {process: loader};

Resources