RequireJS + Mocha + JSDom + Node -> Shim config not supported in Node - node.js

I'm trying to setup a Mocha testing framework using JSDom with RequireJS. Because I'm running the test on node instead of using a browser (since I'm using JSDom), all the non AMD modules doesn't seem to be imported and is throwing Shim config not supported in Node. Does anyone know how I can export those modules to AMD or what the right approach is? (aka what I'm doing wrong)
Example of my set-up
Component.js
define(["jquery", "non_AMD_Module", ... ], function($, NonAMDModule, ...) {
let component = {
...
foo = () => {
NonAMDModule.bar();
};
};
return component;
});
Component.test.js
const requirejs = require('requirejs');
const { JSDOM } = require('jsdom');
requirejs.config({
baseUrl: "dist/app",
paths: {
jquery: "lib/jquery",
component: "path_to_component",
non_AMD_Module: "path_to_module"
},
shim: {
non_AMD_Module: { exports: "non_AMD_Module" } // This doesn't work
}
});
const { window } = new JSDOM("<html></html>");
global.window = window;
global.document = window.document;
global.$ = requirejs('jquery');
const Component = requireJS('component');
describe('test', () => {
it('is a simple test', () => {
const testComponent = new Component();
testComponent.foo();
}
});
When I run the test suite, I get:
Mocha Exploded!
TypeError: Cannot read property 'bar' of undefined
running r.js -convert "path_to_module" did not work for this module

Looking at the source code for jQuery, I found that there's this boiler-plate coded that exports it to AMD.
This can be added at the bottom of the non-AMD-module in order to export it to an AMD module accessible by RequireJS
if ( typeof define === "function" && define.amd ) {
define([], function {
return non_AMD_Module;
});
}
Other Resources:
Shim a module in Require.js that uses module.exports possible?
https://github.com/requirejs/requirejs/wiki/Updating-existing-libraries#anon

Related

Is it possible to collect coverage on code loaded with vm.runInContext with Jest?

I'm working on a legacy JS project which is not using any require/import. When deploying, the files are just concatenated and the result is sent to a server.
In order to write tests with jest, I created a custom environment to load all the JS files in the global context so that I can call the functions in the test file.
For example:
src/index.js
function sum(x, y) {
return x + y;
}
src/index.spec.js
it('should sum two numbers', () => {
expect(sum(1, 2)).toBe(3);
});
jest.config.js
module.exports = {
clearMocks: true,
collectCoverage: true,
collectCoverageFrom: [
"src/**/*.js",
],
coverageDirectory: "coverage",
coverageProvider: "v8",
testEnvironment: "./jest.env.js",
};
jest.env.js
const NodeEnvironment = require('jest-environment-node').TestEnvironment;
const fs = require('fs');
const vm = require("vm");
const path = require("path");
class CustomEnv extends NodeEnvironment {
constructor(config, context) {
super(config, context);
this.loadContext();
}
loadContext() {
const js = fs.readFileSync('./src/index.js', 'utf8');
const context = vm.createContext(this.global);
vm.runInContext(js, context, {
filename: path.resolve('./src/index.js'),
displayErrors: true,
});
Object.assign(this.global, context);
}
}
module.exports = CustomEnv;
When I run npx jest, the test is executed but the coverage is empty...
Any idea on how to fix the coverage?
I've created a minimal reproducible repo here: https://github.com/GP4cK/jest-coverage-run-in-context/tree/main. You can just clone it, run npm i and npm t.
Note: I'm happy to change v8 to babel or load the context differently if it makes it easier.

NUXT: Module not found: Error: Can't resolve 'fs'

I'm starting out with vue and nuxt, I have a project using vuetify and I'm trying to modify the carousel component to dynamically load images from the static folder. So far I've come up with:
<template>
<v-carousel>
<v-carousel-item v-for="(item,i) in items" :key="i" :src="item.src"></v-carousel-item>
</v-carousel>
</template>
<script>
function getImagePaths() {
var glob = require("glob");
var options = {
cwd: "./static"
};
var fileNames = glob.sync("*", options);
var items = [];
fileNames.forEach(fileName =>
items.push({
'src': '/'+fileName
})
);
return items;
}
export default {
data() {
return {items :getImagePaths()};
}
};
</script>
When I test this I see:
ERROR in ./node_modules/fs.realpath/index.js
Module not found: Error: Can't resolve 'fs' in '....\node_modules\fs.realpath'
ERROR in ./node_modules/fs.realpath/old.js
Module not found: Error: Can't resolve 'fs' in ....\node_modules\fs.realpath'
ERROR in ./node_modules/glob/glob.js
Module not found: Error: Can't resolve 'fs' in '....\node_modules\glob'
ERROR in ./node_modules/glob/sync.js
Module not found: Error: Can't resolve 'fs' in '.....\node_modules\glob'
googling this I see a bunch of references like https://github.com/webpack-contrib/css-loader/issues/447.
These suggest that you have to midify the webpack config file with something like:
node: {
fs: 'empty'
}
I know very little about webpack. I found https://nuxtjs.org/faq/extend-webpack/ , but am not sure how to modify the webpack config file in this case.
How do I do this?
You can't use NodeJs specific module on browser.
To solve your issue, you can create an API using Nuxt server middleware. The code below, inspired by https://github.com/nuxt-community/express-template.
Create a file, index.js in api/index.js. Then fill it with:
const express = require('express')
// Create express instance
const app = express()
// Require API routes
const carousel = require('./routes/carousel')
// Import API Routes
app.use(carousel)
// Export the server middleware
module.exports = {
path: '/api',
handler: app
}
Create carousel.js in api/routes/carousel.js. Then fill it with:
const { Router } = require('express')
const glob = require('glob')
const router = Router()
router.get('/carousel/images', async function (req, res) {
const options = {
cwd: './static'
}
const filenames = glob.sync('*', options)
let items = [];
filenames.forEach(filename =>
items.push({
'src': '/'+filename
})
);
return res.json({ data: items })
})
module.exports = router
Register your server middleware in nuxt.config.js
module.exports = {
build: {
...
},
serverMiddleware: [
'~/api/index.js'
]
}
Call the api in your page / component. I assume you're using Axios here (axios-module).
<script>
export default {
async asyncData ({ $axios }) {
const images = (await $axios.$get('/api/carousel/images')).data
return { images }
}
}
</script>
I know this is an old question, but it may be helpful for someone to disable fs in their browser.
Like this:
nuxt.config.js
build: {
extend (config, { isDev, isClient }) {
config.node= {
fs: 'empty'
}
// ....
}
},
Add this in your nuxt-config.js:
build: { extend (config, { isDev, isClient }) {
config.node = {
fs: 'empty'
}
// ....
}},

Loading CreateJS with RequrieJS “shim” dosen't work

I have made requirejs work for my own modules (with define), but I'm not able to use shim for createjs. I've gone through countless examples and ended up using this: https://github.com/CreateJS/EaselJS/wiki/Using-easeljs-and-tweenjs-with-requirejs, but I'm getting net::ERR_ABORTED error
My module:
define(function (require) {
var createjs = require('createjs');
var start = function () {
var canvas = document.getElementById('canvas');
var stage = new createjs.Stage(canvas);
}
return {
start: start
};
});
My configuration:
require.config({
shim: {
easel: {
exports: 'createjs'
}
},
paths: {
easel: 'libs/easeljs.min'
}
});
I finally found the answer somewhere else; it should be:
require('libs/createjs');
not:
var createjs=require('libs/createjs');
As c

Jest Testing with require modules: ejs-loader

I am playing with the idea of having large static html bundles just loaded into a react component vice typing them all out in jsx. I am currently just experimenting with ejs-loader and html-react-parser to evaluate the feasibility of this. Everything actually renders fine but I cannot get any tests to work with jest for this.
I receive: Cannot find module ejs-loader!./AboutPage.view.ejs from AboutPage.js errors and I am unsure of what to do.
I am currently just working off of react-slingshot as my base for experimenting with this.
The repo for the project is here
The component itself is simple:
import React from 'react';
import Parser from 'html-react-parser';
import '../styles/about-page.css';
const view = require('ejs-loader!./AboutPage.view.ejs')();
// Since this component is simple and static, there's no parent container for it.
const AboutPage = () => {
return (
<div>
{Parser(view)}
</div>
);
};
export default AboutPage;
And the test is:
import React from 'react';
import {shallow} from 'enzyme';
import AboutPage from './AboutPage';
describe('<AboutPage />', () => {
it('should have a header called \'About\'', () => {
const wrapper = shallow(<AboutPage />);
const actual = component.find('h2').text();
const expected = 'About';
expect(actual).toEqual(expected);
});
});
I have read through the docs and similar questions like this. I attempted to use a custom transformer, but I may be misunderstanding something as it doesn't appear to be even called.
Package.json
"jest": {
"moduleNameMapper": {
"\\.(css|scss)$": "identity-obj-proxy",
"^.+\\.(gif|ttf|eot|svg|woff|woff2|ico)$": "<rootDir>/tools/fileMock.js"
},
"transform": {
"^.+\\.js$": "babel-jest",
"\\.(ejs|ejx)$": "<rootDir>/tools/ejx-loader/jest.transformer.js"
}
},
and the transformer itself:
module.exports = {
process(src, filename, config, options){
console.log('????');
return 'module.exports = ' + require(`ejs-loader!./${filename}`);
//return require(`ejs-loader!./${filename}`);
}
};
Can you try changing module name mapper to -
{
"\\.(css|scss)$": "identity-obj-proxy",
"^.+\\.(gif|ttf|eot|svg|woff|woff2|ico)$": "<rootDir>/tools/fileMock.js"
"ejs-loader!(.*)": "$1",
}
This should at least invoke your custom transformer.
Also the custom transformer should be -
const _ = require('lodash');
module.exports = {
process(src, filename, config, options){
console.log('????');
return 'module.exports = ' + _.template(src);
}
};
It doesn't look like you've specified .ejs as a moduleFileExtension.
"jest": {
...
"moduleFileExtensions": ["js", "jsx", "ejs", "ejx"],
...
}
Also, ejs-loader will export the function using cjs syntax for you, so you can do the following in your transformer:
const loader = require('ejs-loader');
module.exports = {process: loader};
Work for me:
"jest": {
"moduleNameMapper": {
'\\.(ejs|ejx)$': '<rootDir>/jest-ejs.transformer.js'
},
moduleFileExtensions: ['js', 'json', 'jsx', 'ejs']
},
In jest-ejs.transformer.js
const loader = require('ejs-loader');
module.exports = {process: loader};

require.js listener or callback

I am loading a 3rd party script that simply creates an overlay on a site it has been loaded onto. It works fine but sites using require.js seem to have intermittent issues I'm assuming with async loading some js files. Is there any type of callback or way to create a module in the DOM as sort of a listener to see if require.js is done loading?
I tried this but not even close:
define(function() {
alert('test');
return {};
});
and
define('myModule',
function () {
var myModule = {
doStuff:function(){
console.log('Yay! Stuff');
}
};
return myModule;
});
console.log(myModule);
I ended up just creating a secondary require.config file and loading the module with require if require is detected, seems to work fine.
if(typeof require === 'function') {
var base = 'http://' + someDomainVar;
function getJSTreeURL() {
var url = base + '/js/libs/jstree.min';
return url;
}
function getModuleURL() {
var url = base + '/module';
return url;
}
var reqTwo = require.config({
context: "instance2",
baseUrl: "instance2",
paths: {
'jq': 'http://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min',
'jqTree': getJSTreeURL(),
'module': getModuleURL()
},
shim: {
'jq': {
exports: 'jq'
},
'jqTree': {
deps: ['jq'],
exports: 'jqTree'
},
'module': {
deps: ['jq', 'jqTree'],
exports: 'module'
}
}
});
reqTwo(['require', 'jq', 'jqTree'],
function(require, jq, jqTree) {
setTimeout(function() {
require(['module'],
function(module) {
console.log('loaded');
}
);
}, 0);
});

Resources