jest.spy on a global function is ignored when the spy is called from another function - jestjs

I'm working on a legacy JS web project that doesn't use import/require etc. So if I want to write tests, I need to somehow load all the code before the test is executed (I'm using a custom testEnvironment for that).
I've created a sample repo here.
Here are the main files:
// ./src/index.js
function spyOnMe() {
return "Hello World!";
}
function main() {
const text = spyOnMe();
return text;
}
// ./src/index.spec.js
it('should spyOn spyOnMe', () => {
const mockedValue = 'Thanks for helping!';
jest.spyOn(window, 'spyOnMe').mockReturnValue(mockedValue);
expect(spyOnMe()).toBe(mockedValue); // OK
const result = main();
expect(result).toBe(mockedValue); // KO
});
// ./jest.config.js
module.exports = {
clearMocks: true,
coverageProvider: "v8",
testEnvironment: "./jest.env.js",
};
// ./jest.env.js
const JSDOMEnvironment = require("jest-environment-jsdom");
const vm = require("vm");
const fs = require("fs");
class MyEnv extends JSDOMEnvironment.default {
constructor(config, context) {
super(config, context);
this.loadContext();
}
loadContext() {
const js = fs.readFileSync("./src/index.js", "utf8");
const context = vm.createContext();
context.document = this.global.document;
context.window = this.global.window;
vm.runInContext(js, context, {
filename: "./src/index.js",
displayErrors: true,
});
Object.assign(this.global, context);
}
}
module.exports = MyEnv;
The issue is in the index.spec.js:
The first expect returns Thanks for helping!
The second one returns "Hello world!"
Why is that?

I found a fix but I don't really understand why it works:
In jest.env.js, I should replace this line:
- const context = vm.createContext();
+ const context = vm.createContext(this.global);

Related

Unable to stub an exported function with Sinon

I need to test the following createFacebookAdVideoFromUrl() that consumes a retryAsyncCall that I'd like to stub with Sinon :
async function createFacebookAdVideoFromUrl(accountId, videoUrl, title, facebookToken = FACEBOOK_TOKEN, options = null, businessId = null) {
const method = 'POST';
const url = `${FACEBOOK_URL}${adsSdk.FacebookAdsApi.VERSION}/${accountId}/advideos`;
const formData = {
access_token: businessId ? getFacebookConfig(businessId).token : facebookToken,
title,
name: title,
file_url: videoUrl,
};
const callback = () => requestPromise({ method, url, formData });
const name = 'createFacebookAdVideoFromUrl';
const retryCallParameters = buildRetryCallParameters(name, options);
const adVideo = await retryAsyncCall(callback, retryCallParameters);
logger.info('ADVIDEO', adVideo);
return { id: JSON.parse(adVideo).id, title };
}
This retryAsyncCall function is exported as such:
module.exports.retryAsyncCall = async (callback, retryCallParameters, noRetryFor = [], customRetryCondition = null) => {
// Implementation details ...
}
Here is how I wrote my test so far:
it.only("should create the video calling business's Facebook ids", async () => {
const payload = createPayloadDataBuilder({
businessId: faker.internet.url(),
});
const retryAsyncCallStub = sinon.stub(retryAsyncCallModule, 'retryAsyncCall').resolves('random');
const createdFacebookAd = await FacebookGateway.createFacebookAdVideoFromUrl(
payload.accountId,
payload.videoUrl,
payload.title,
payload.facebookToken,
payload.options,
payload.businessId,
);
assert.strictEqual(retryAsyncCallStub.calledOnce, true);
assert.strictEqual(createdFacebookAd, { id: 'asdf', title: 'asdf' });
});
I don't expect it to work straightaway as I am working in TDD fashion, but I do expect the retryAsyncCall to be stubbed out. Yet, I am still having this TypeError: Cannot read property 'inc' of undefined error from mocha, which refers to an inner function of retryAsyncCall.
How can I make sinon stubbing work?
I fixed it by changing the way to import in my SUT :
// from
const { retryAsyncCall } = require('../../../helpers/retry-async');
// to
const retry = require('../../../helpers/retry-async');
and in my test file :
// from
import * as retryAsyncCallModule from '../../../src/common/helpers/retry-async';
// to
import retryAsyncCallModule from '../../../src/common/helpers/retry-async';
The use of destructuring seemed to make a copy instead of using the same reference, thus, the stub was not applied on the right reference.

Playwright JS: How to take screenshot for failed test cases in jest test

There is a way to take screenshots whenever test cases fail in Playwright Test Runner using a config file. I am using Jest as a test runner for Playwright JS tests - is there a way to achieve the same thing using Jest?
You can add something like this to Your setup file:
global.test = async (name, func) => {
return await it(name, async () => {
try {
await func();
} catch (e) {
const date = new Date();
const year = date.getFullYear();
const month = date.getUTCMonth() + 1;
const dateOfMonth = date.getUTCDate();
const hour = date.getUTCHours();
const minute = date.getUTCMinutes();
const sec = date.getUTCSeconds();
const dateString = `${year}-${month}-${dateOfMonth}-${hour}-${minute}-${sec}`;
const errorScreenshotPath = `screenshots/${browserName}-${dateString}-${name.replace(
/ /g,
"_"
)}.png`;
await page.screenshot({
path: errorScreenshotPath,
});
throw e;
}
});};
It will not support other test globals, but it's a good starting point.
I've found a solution that works for me here:
https://github.com/playwright-community/jest-playwright#using-with-different-jest-environments
Just override default PlaywrightEnvironment with a custom CustomEnvironment.js:
const PlaywrightEnvironment = require('jest-playwright-preset/lib/PlaywrightEnvironment')
.default
class CustomEnvironment extends PlaywrightEnvironment {
async setup() {
await super.setup()
// Your setup
}
async teardown() {
// Your teardown
await super.teardown()
}
async handleTestEvent(event) {
await super.handleTestEvent(event);
if (event.name === 'test_done' && event.test.errors.length > 0) {
const parentName = event.test.parent.name.replace(/\W/g, '-')
const specName = event.test.name.replace(/\W/g, '-')
await this.global.page.screenshot({
path: `screenshots/${parentName}_${specName}.png`,
})
}
}
}
module.exports = CustomEnvironment
Update jest.config.json:
testEnvironment: "./CustomEnvironment.js"

sinon stub fails for promise functions if not exported within class

I'm trying to get sinon.stub to work for async function. I have created promiseFunction.js:
let functionToBeStubbed = async function() {
return ("Text to be replaced by stub.");
};
let promiseFunction = async function() {
return(await functionToBeStubbed());
};
module.exports = {
promiseFunction: promiseFunction,
functionToBeStubbed: functionToBeStubbed
};
and test promiseFunction.spec.js:
let functionstobestested = require('./promiseFunction.js');
describe('Sinon Stub Test', function () {
var sandbox;
it('should return --Text to be replaced by stub.--', async function () {
let responsevalue = "The replaced text.";
sandbox = sinon.sandbox.create();
sandbox.stub(functionstobestested, 'functionToBeStubbed').resolves(responsevalue);
//sandbox.stub(functionstobestested, 'functionToBeStubbed').returns(responsevalue);
let result = "Empty";
console.log(`BEFORE: originaldata = ${result}, value = ${responsevalue}`);
result = await functionstobestested.promiseFunction();
console.log(`AFTER: originaldata = ${result}, value = ${responsevalue}`);
expect(result).to.equal(responsevalue);
sandbox.restore();
console.log("AFTER2: Return value after restoring stub: " + await functionstobestested.promiseFunction());
});
});
when running the test, I will get
test failure
If I modify export slightly, it still fails:
var functionsForTesting = {
promiseFunction: promiseFunction,
functionToBeStubbed: functionToBeStubbed
};
module.exports = functionsForTesting;
I do not understand why this test fails, as it should pass. If I change the way I export functions from promiseFunction.js - module, the test pass correctly. Revised promiseFunction.js:
const functionsForTesting = {
functionToBeStubbed: async function() {
return ("Text to be replaced by stub.");
},
promiseFunction: async function() {
return(await functionsForTesting.functionToBeStubbed());
};
module.exports = functionsForTesting;
Test pass
What's wrong in my original and modified way to export functions?

Using Babel to Get ApolloClient to ES5 CommonJS Module Format for Node Environment

Using Babel to Get ApolloClient to ES5 CommonJS Module Format
Im trying to use Babel to get the apollo-client module to work as ES5 in a non-browser, node environment. I've gone through step below which always give me the same result. Im trying to figure out if that result is right result for a node environment. When I import the babel processed documents into my project and call a method that should be exported, im getting, cannot find module. For context, the project is a fusetools.com demo. Fusetools does not support ES2015 Promises so the idea is that with the babel es2015 preset, it should work. I'm mostly chasing this down to learn something but it would be great if I could get it to work. Any comments on an easier way to do this, now that I understand it better, would be greatly appreciated. The project where I babeled the code can be found here. The fusetools project where i used the transformed code is here.
The error I get is :
LOG: Error: JavaScript error in MainView.ux line 9: Name: Fuse.Scripting.Error
Error message: require(): module not found: js/apollo-client/ApolloClient.js
File name: MainView.ux
Line number: 9
Source line: var ApolloClient = require('js/apollo-client/ApolloClient.js');
This is the code im trying to reach:
```
"use strict";
var networkInterface_1 = require('./transport/networkInterface');
var isUndefined = require('lodash.isundefined');
var assign = require('lodash.assign');
var isString = require('lodash.isstring');
var store_1 = require('./store');
var QueryManager_1 = require('./core/QueryManager');
var storeUtils_1 = require('./data/storeUtils');
var fragments_1 = require('./fragments');
var getFromAST_1 = require('./queries/getFromAST');
var DEFAULT_REDUX_ROOT_KEY = 'apollo';
function defaultReduxRootSelector(state) {
return state[DEFAULT_REDUX_ROOT_KEY];
}
var ApolloClient = function () {
function ApolloClient(_a) {
var _this = this;
var _b = _a === void 0 ? {} : _a,
networkInterface = _b.networkInterface,
reduxRootKey = _b.reduxRootKey,
reduxRootSelector = _b.reduxRootSelector,
initialState = _b.initialState,
dataIdFromObject = _b.dataIdFromObject,
resultTransformer = _b.resultTransformer,
resultComparator = _b.resultComparator,
_c = _b.ssrMode,
ssrMode = _c === void 0 ? false : _c,
_d = _b.ssrForceFetchDelay,
ssrForceFetchDelay = _d === void 0 ? 0 : _d,
_e = _b.mutationBehaviorReducers,
mutationBehaviorReducers = _e === void 0 ? {} : _e,
_f = _b.addTypename,
addTypename = _f === void 0 ? true : _f,
queryTransformer = _b.queryTransformer;
this.middleware = function () {
return function (store) {
_this.setStore(store);
return function (next) {
return function (action) {
var returnValue = next(action);
_this.queryManager.broadcastNewStore(store.getState());
return returnValue;
};
};
};
};
if (reduxRootKey && reduxRootSelector) {
throw new Error('Both "reduxRootKey" and "reduxRootSelector" are configured, but only one of two is allowed.');
}
if (reduxRootKey) {
console.warn('"reduxRootKey" option is deprecated and might be removed in the upcoming versions, ' + 'please use the "reduxRootSelector" instead.');
this.reduxRootKey = reduxRootKey;
}
if (queryTransformer) {
throw new Error('queryTransformer option no longer supported in Apollo Client 0.5. ' + 'Instead, there is a new "addTypename" option, which is on by default.');
}
if (!reduxRootSelector && reduxRootKey) {
this.reduxRootSelector = function (state) {
return state[reduxRootKey];
};
} else if (isString(reduxRootSelector)) {
this.reduxRootKey = reduxRootSelector;
this.reduxRootSelector = function (state) {
return state[reduxRootSelector];
};
} else if (typeof reduxRootSelector === 'function') {
this.reduxRootSelector = reduxRootSelector;
} else {
this.reduxRootSelector = null;
}
this.initialState = initialState ? initialState : {};
this.networkInterface = networkInterface ? networkInterface : networkInterface_1.createNetworkInterface({ uri: '/graphql' });
this.addTypename = addTypename;
this.resultTransformer = resultTransformer;
this.resultComparator = resultComparator;
this.shouldForceFetch = !(ssrMode || ssrForceFetchDelay > 0);
this.dataId = dataIdFromObject;
this.fieldWithArgs = storeUtils_1.storeKeyNameFromFieldNameAndArgs;
if (ssrForceFetchDelay) {
setTimeout(function () {
return _this.shouldForceFetch = true;
}, ssrForceFetchDelay);
}
this.reducerConfig = {
dataIdFromObject: dataIdFromObject,
mutationBehaviorReducers: mutationBehaviorReducers
};
this.watchQuery = this.watchQuery.bind(this);
this.query = this.query.bind(this);
this.mutate = this.mutate.bind(this);
this.setStore = this.setStore.bind(this);
this.resetStore = this.resetStore.bind(this);
}
ApolloClient.prototype.watchQuery = function (options) {
this.initStore();
if (!this.shouldForceFetch && options.forceFetch) {
options = assign({}, options, {
forceFetch: false
});
}
fragments_1.createFragment(options.query);
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
query: fullDocument
});
delete realOptions.fragments;
return this.queryManager.watchQuery(realOptions);
};
;
ApolloClient.prototype.query = function (options) {
this.initStore();
if (!this.shouldForceFetch && options.forceFetch) {
options = assign({}, options, {
forceFetch: false
});
}
fragments_1.createFragment(options.query);
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
query: fullDocument
});
delete realOptions.fragments;
return this.queryManager.query(realOptions);
};
;
ApolloClient.prototype.mutate = function (options) {
this.initStore();
var fullDocument = getFromAST_1.addFragmentsToDocument(options.mutation, options.fragments);
var realOptions = Object.assign({}, options, {
mutation: fullDocument
});
delete realOptions.fragments;
return this.queryManager.mutate(realOptions);
};
;
ApolloClient.prototype.subscribe = function (options) {
this.initStore();
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
document: fullDocument
});
delete realOptions.fragments;
delete realOptions.query;
return this.queryManager.startGraphQLSubscription(realOptions);
};
ApolloClient.prototype.reducer = function () {
return store_1.createApolloReducer(this.reducerConfig);
};
ApolloClient.prototype.initStore = function () {
if (this.store) {
return;
}
if (this.reduxRootSelector) {
throw new Error('Cannot initialize the store because "reduxRootSelector" or "reduxRootKey" is provided. ' + 'They should only be used when the store is created outside of the client. ' + 'This may lead to unexpected results when querying the store internally. ' + "Please remove that option from ApolloClient constructor.");
}
this.setStore(store_1.createApolloStore({
reduxRootKey: DEFAULT_REDUX_ROOT_KEY,
initialState: this.initialState,
config: this.reducerConfig
}));
this.reduxRootKey = DEFAULT_REDUX_ROOT_KEY;
};
;
ApolloClient.prototype.resetStore = function () {
this.queryManager.resetStore();
};
;
ApolloClient.prototype.setStore = function (store) {
var reduxRootSelector;
if (this.reduxRootSelector) {
reduxRootSelector = this.reduxRootSelector;
} else {
reduxRootSelector = defaultReduxRootSelector;
this.reduxRootKey = DEFAULT_REDUX_ROOT_KEY;
}
if (isUndefined(reduxRootSelector(store.getState()))) {
throw new Error('Existing store does not use apolloReducer. Please make sure the store ' + 'is properly configured and "reduxRootSelector" is correctly specified.');
}
this.store = store;
this.queryManager = new QueryManager_1.QueryManager({
networkInterface: this.networkInterface,
reduxRootSelector: reduxRootSelector,
store: store,
addTypename: this.addTypename,
resultTransformer: this.resultTransformer,
resultComparator: this.resultComparator,
reducerConfig: this.reducerConfig
});
};
;
return ApolloClient;
}();
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = ApolloClient;
//# sourceMappingURL=ApolloClient.js.map
```
Any and all comments I might learn from are appreciated. Thank you.
One way to do this would be to use webpack like this:
const webpack = require('webpack');
const path = require('path');
module.exports = {
// watch: true,
entry: {
ApolloClient: './config/ApolloClient.js',
createNetworkInterface: './config/createNetworkInterface.js',
Redux: './config/Redux.js',
},
output: {
path: path.join(__dirname, 'build/Libs'),
filename: '[name].js',
library: '[name]',
libraryTarget: 'commonjs',
},
module: {
rules: [
{
use: 'babel-loader',
test: /\.js$/,
exclude: /node_modules/,
},
],
},
plugins: [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
}),
],
};
Then in config directory you could have:
/* ApolloClient.js */
import { ApolloClient } from 'apollo-client';
export default ApolloClient;
and
/* createNetworkInterface.js */
import { createNetworkInterface } from 'apollo-client/transport/networkInterface';
export default createNetworkInterface;
plus if you want to have Redux as well:
/* Redux.js */
import * as Redux from 'redux';
export default Redux;
However I was not able to get gql done this way and had to use bolav's fusepm.
Which you would use exactly as bolav has mention, first install it globally:
npm install -G fusepm and then fusepm npm graphql-tag
Once you have all these in place you can require them as follow:
var Redux = require('build/Libs/Redux');
var ApolloClient = require('build/Libs/ApolloClient');
var createNetworkInterface = require('build/Libs/createNetworkInterface');
var gql = require('fusejs_lib/graphql-tag_graphql-tag.umd');
This way still could use some TLC but for now, it works and get's the job done:
var networkInterface = createNetworkInterface.createNetworkInterface({
uri: 'http://localhost:8000/graphql',
});
var client = new ApolloClient.ApolloClient({
networkInterface,
});
client.query({
query: gql`
query {
allPosts {
edges {
node {
id
headline
summary(length: 80)
body
createdAt
updatedAt
personByAuthorId {
firstName
lastName
}
}
}
}
}
`,
})
.then(data => data.data.allPosts.edges.forEach(node => pages.add(createPage(node))))
.catch(error => console.log(error));
Also if you like I've setup a whole project along with server that might be of an interest to you: fuseR
I made fusepm, which has a mode to convert npm modules to run them under FuseTools. It's still has a lot of bugs, but at least I managed to come longer than you:
fuse create app apolloc
cd apolloc
npm install apollo-client
fusepm npm apollo-client
And then in your javascript:
<JavaScript>
var ApolloClient = require('fusejs_lib/apollo-client.js');
</JavaScript>
fusepm uses Babel, with some custom plugins.

Declare multiple module.exports in Node.js

What I'm trying to achieve is to create one module that contains multiple functions in it.
module.js:
module.exports = function(firstParam) { console.log("You did it"); },
module.exports = function(secondParam) { console.log("Yes you did it"); },
// This may contain more functions
main.js:
var foo = require('module.js')(firstParam);
var bar = require('module.js')(secondParam);
The problem I have is that the firstParam is an object type and the secondParam is a URL string, but when I have that it always complains that the type is wrong.
How can I declare multiple module.exports in this case?
You can do something like:
module.exports = {
method: function() {},
otherMethod: function() {},
};
Or just:
exports.method = function() {};
exports.otherMethod = function() {};
Then in the calling script:
const myModule = require('./myModule.js');
const method = myModule.method;
const otherMethod = myModule.otherMethod;
// OR:
const {method, otherMethod} = require('./myModule.js');
To export multiple functions you can just list them like this:
module.exports = {
function1,
function2,
function3
}
And then to access them in another file:
var myFunctions = require("./lib/file.js")
And then you can call each function by calling:
myFunctions.function1
myFunctions.function2
myFunctions.function3
in addition to #mash answer I recommend you to always do the following:
const method = () => {
// your method logic
}
const otherMethod = () => {
// your method logic
}
module.exports = {
method,
otherMethod,
// anotherMethod
};
Note here:
You can call method from otherMethod and you will need this a lot
You can quickly hide a method as private when you need
This is easier for most IDE's to understand and autocomplete your code ;)
You can also use the same technique for import:
const {otherMethod} = require('./myModule.js');
module.js:
const foo = function(<params>) { ... }
const bar = function(<params>) { ... }
//export modules
module.exports = {
foo,
bar
}
main.js:
// import modules
var { foo, bar } = require('module');
// pass your parameters
var f1 = foo(<params>);
var f2 = bar(<params>);
This is just for my reference as what I was trying to achieve can be accomplished by this.
In the module.js
We can do something like this
module.exports = function ( firstArg, secondArg ) {
function firstFunction ( ) { ... }
function secondFunction ( ) { ... }
function thirdFunction ( ) { ... }
return { firstFunction: firstFunction, secondFunction: secondFunction,
thirdFunction: thirdFunction };
}
In the main.js
var name = require('module')(firstArg, secondArg);
If the files are written using ES6 export, you can write:
module.exports = {
...require('./foo'),
...require('./bar'),
};
One way that you can do it is creating a new object in the module instead of replacing it.
for example:
var testone = function () {
console.log('test one');
};
var testTwo = function () {
console.log('test two');
};
module.exports.testOne = testOne;
module.exports.testTwo = testTwo;
and to call
var test = require('path_to_file').testOne:
testOne();
You can write a function that manually delegates between the other functions:
module.exports = function(arg) {
if(arg instanceof String) {
return doStringThing.apply(this, arguments);
}else{
return doObjectThing.apply(this, arguments);
}
};
There are multiple ways to do this, one way is mentioned below.
Just assume you have .js file like this.
let add = function (a, b) {
console.log(a + b);
};
let sub = function (a, b) {
console.log(a - b);
};
You can export these functions using the following code snippet,
module.exports.add = add;
module.exports.sub = sub;
And you can use the exported functions using this code snippet,
var add = require('./counter').add;
var sub = require('./counter').sub;
add(1,2);
sub(1,2);
I know this is a late reply, but hope this helps!
use this
(function()
{
var exports = module.exports = {};
exports.yourMethod = function (success)
{
}
exports.yourMethod2 = function (success)
{
}
})();
also you can export it like this
const func1 = function (){some code here}
const func2 = function (){some code here}
exports.func1 = func1;
exports.func2 = func2;
or
for anonymous functions like this
const func1 = ()=>{some code here}
const func2 = ()=>{some code here}
exports.func1 = func1;
exports.func2 = func2;
You can use like i did below... for both functions and arrow functions :
greet.js :
function greetFromGreet() {
console.log("hello from greet module...");
}
const greetVar = () => {
console.log("greet var as a arrow fn/...");
};
module.exports = { greetVar, greetFromGreet }; // ---- multiple module export...
// -----------------------------------------------
app.js :
const greetFromGreets = require("./greet");
greetFromGreets.greetFromGreet();
greetFromGreets.greetVar();
// -----------------------------------------------
Inside your node module you can export various functions such as:
module.exports.eat = eat;
function eat() {
.......
return *something*;
};
module.exports.sleep = sleep;
function sleep() {
.......
return *something*;
};
Note that you are not calling the functions while exporting them.
Then while requiring the modules you can require as:-
const task = require(__dirname + "/task.js");
//task is the name of the file
let eat = task.eat();
let sleep = task.sleep();
Two types module import and export.
type 1 (module.js):
// module like a webpack config
const development = {
// ...
};
const production = {
// ...
};
// export multi
module.exports = [development, production];
// export single
// module.exports = development;
type 1 (main.js):
// import module like a webpack config
const { development, production } = require("./path/to/module");
type 2 (module.js):
// module function no param
const module1 = () => {
// ...
};
// module function with param
const module2 = (param1, param2) => {
// ...
};
// export module
module.exports = {
module1,
module2
}
type 2 (main.js):
// import module function
const { module1, module2 } = require("./path/to/module");
How to use import module?
const importModule = {
...development,
// ...production,
// ...module1,
...module2("param1", "param2"),
};
module1.js:
var myFunctions = {
myfunc1:function(){
},
myfunc2:function(){
},
myfunc3:function(){
},
}
module.exports=myFunctions;
main.js
var myModule = require('./module1');
myModule.myfunc1(); //calling myfunc1 from module
myModule.myfunc2(); //calling myfunc2 from module
myModule.myfunc3(); //calling myfunc3 from module
Use the export keyword
module.js
export {method1, method2}
And import them in main.js
import {method1, method2) from "./module"
If you declare a class in module file instead of the simple object
File: UserModule.js
//User Module
class User {
constructor(){
//enter code here
}
create(params){
//enter code here
}
}
class UserInfo {
constructor(){
//enter code here
}
getUser(userId){
//enter code here
return user;
}
}
// export multi
module.exports = [User, UserInfo];
Main File: index.js
// import module like
const { User, UserInfo } = require("./path/to/UserModule");
User.create(params);
UserInfo.getUser(userId);
You can use this approach too
module.exports.func1 = ...
module.exports.func2 = ...
or
exports.func1 = ...
exports.func2 = ...
Adding here for someone to help:
this code block will help adding multiple plugins into cypress index.js
Plugins -> cypress-ntlm-auth and cypress env file selection
const ntlmAuth = require('cypress-ntlm-auth/dist/plugin');
const fs = require('fs-extra');
const path = require('path');
const getConfigurationByFile = async (config) => {
const file = config.env.configFile || 'dev';
const pathToConfigFile = path.resolve(
'../Cypress/cypress/',
'config',
`${file}.json`
);
console.log('pathToConfigFile' + pathToConfigFile);
return fs.readJson(pathToConfigFile);
};
module.exports = async (on, config) => {
config = await getConfigurationByFile(config);
await ntlmAuth.initNtlmAuth(config);
return config;
};
module.exports = (function () {
'use strict';
var foo = function () {
return {
public_method: function () {}
};
};
var bar = function () {
return {
public_method: function () {}
};
};
return {
module_a: foo,
module_b: bar
};
}());

Resources