How to mock event and event handler for node.js module? - jestjs

Here is my function
async function remoteCSVToJSON<T>(url: string, options: csv.ConverterOptions) {
const defaultOptions = {
noheader: false,
delimiter: ',',
workerNum: os.cpus().length,
};
const finalOptions = defaultsDeep(options, defaultOptions);
const datas: T[] = [];
return new Promise<T[]>((resolve, reject) => {
request.get(url).then(res => {
csv(finalOptions)
.fromString(res)
.on('json', (jsonObj: T) => datas.push(jsonObj))
.on('error', err => reject(err))
.on('end', () => {
logger.info('convert csv to json done');
resolve(datas);
});
});
});
}
I can mock fromString for csvtojson module like this:
jest.mock('csvtojson', () => {
return {
__esModule: true,
fromString: jest.fn(),
};
});
But how do I mock the .on(event) method and event handlers?

Here is the unit test solution:
index.ts:
import request from 'request-promise';
import csv from 'csvtojson';
import os from 'os';
import { defaultsDeep } from 'lodash';
export async function remoteCSVToJSON<T>(url: string, options: any) {
const defaultOptions = {
noheader: false,
delimiter: ',',
workerNum: os.cpus().length
};
const finalOptions = defaultsDeep(options, defaultOptions);
const datas: T[] = [];
return new Promise<T[]>((resolve, reject) => {
request.get(url).then(res => {
csv(finalOptions)
.fromString(res)
.on('json', (jsonObj: T) => datas.push(jsonObj))
.on('error', err => reject(err))
.on('end', () => {
console.info('convert csv to json done');
resolve(datas);
});
});
});
}
index.spec.ts:
import request from 'request-promise';
import csv from 'csvtojson';
import os from 'os';
import { remoteCSVToJSON } from './';
jest.mock('csvtojson', () => {
const mCsv = {
on: jest.fn(),
fromString: jest.fn().mockReturnThis()
};
return jest.fn(() => mCsv);
});
jest.mock('request-promise', () => {
return {
get: jest.fn()
};
});
describe('remoteCSVToJSON', () => {
afterEach(() => {
jest.restoreAllMocks();
});
test('should request and convert csv to json', async () => {
const events: any = {};
csv()
.fromString()
.on.mockImplementation(function(this: any, ...args: any[]) {
const [event, handler] = args;
events[event] = handler;
return this;
});
const mGetResponse = { data: 'fake data' };
(request.get as jest.MockedFunction<typeof request.get>).mockResolvedValueOnce(mGetResponse);
const infoSpy = jest.spyOn(console, 'info');
const actualValue = remoteCSVToJSON('url', {});
const mJsonObj = { id: 1, name: 'mrdulin' };
expect(jest.isMockFunction(csv)).toBeTruthy();
await new Promise(resolve => setTimeout(resolve, 0));
expect(csv).toBeCalledWith({
noheader: false,
delimiter: ',',
workerNum: os.cpus().length
});
expect(csv().fromString).toBeCalledWith(mGetResponse);
events['json'](mJsonObj);
events['end']();
await expect(actualValue).resolves.toEqual([mJsonObj]);
expect(request.get).toBeCalledWith('url');
expect(infoSpy).toBeCalledWith('convert csv to json done');
});
test('should throw error when convert error', async () => {
const events: any = {};
csv()
.fromString()
.on.mockImplementation(function(this: any, ...args: any[]) {
const [event, handler] = args;
events[event] = handler;
return this;
});
const mGetResponse = { data: 'fake data' };
(request.get as jest.MockedFunction<typeof request.get>).mockResolvedValueOnce(mGetResponse);
const actualValue = remoteCSVToJSON('url', {});
const mJsonObj = { id: 1, name: 'mrdulin' };
expect(jest.isMockFunction(csv)).toBeTruthy();
await new Promise(resolve => setTimeout(resolve, 0));
expect(csv).toBeCalledWith({
noheader: false,
delimiter: ',',
workerNum: os.cpus().length
});
expect(csv().fromString).toBeCalledWith(mGetResponse);
events['json'](mJsonObj);
const mError = new Error('mock error');
events['error'](mError);
await expect(actualValue).rejects.toThrowError(mError);
expect(request.get).toBeCalledWith('url');
});
});
Unit test result with 100% coverage:
PASS src/stackoverflow/57423762/index.spec.ts (6.995s)
remoteCSVToJSON
✓ should request and convert csv to json (16ms)
✓ should throw error when convert error (22ms)
console.info node_modules/jest-mock/build/index.js:860
convert csv to json done
----------|----------|----------|----------|----------|-------------------|
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s |
----------|----------|----------|----------|----------|-------------------|
All files | 100 | 100 | 100 | 100 | |
index.ts | 100 | 100 | 100 | 100 | |
----------|----------|----------|----------|----------|-------------------|
Test Suites: 1 passed, 1 total
Tests: 2 passed, 2 total
Snapshots: 0 total
Time: 8.114s
Source code: https://github.com/mrdulin/jest-codelab/tree/master/src/stackoverflow/57423762

Related

TypeError: AWS.SecretsManager is not a constructor in unit testing with proxyquire

I have written a test code to test code that gives credentials from AWS Secret Manager. I used proxyquire and sinon for stubbing and getting this error.
Function I want to test
exports.getCredsFromAWSSecretsManager = (keyName) => {
const SM = new AWS.SecretsManager({
apiVersion: process.env.AWS_SM_API_VERSION,
region: process.env.AWS_SM_REGION
});
return SM.getSecretValue(params).promise().then((data) => {
logger.info(logMsgs.awsHlpr_smGetSecretValueSuccess(JSON.stringify(data)));
return JSON.parse(data.SecretString);
}).catch((err) => {
logger.error(logMsgs.awsHlpr_smGetSecretValueErr(JSON.stringify(err)));
throw err;
});
};
Test case that I have written
const sinon = require("sinon");
const proxyquire = require("proxyquire").noCallThru().noPreserveCache();
const { mockLogger } = require("../../mockdata/mockLogger");
let awsHelper;
let secretsManagerStub;
describe.only("AWS Helper ", () => {
// function1
describe("AWS Helper: getCredsFromAWSSecretsManagera method", () => {
before((done) => {
const data = {
SecretString: JSON.stringify({ publicKey: 'secretUsername', privateKey: 'secretPassword' }),
};
secretsManagerStub = {
getSecretValue: sinon.stub().callsFake((params, callback) => {
callback(null, data);
}),
};
const awsStub = {
SecretsManager: sinon.stub().returns(secretsManagerStub)
}
awsHelper = proxyquire('../../../utils/aws_helper.js', {
'aws-sdk':{
AWS:awsStub
} ,
"../../utils/logger": mockLogger,
});
done();
});
afterEach(() => {
sinon.restore();
});
it('should write random data!', async () => {
const expectedData = "abcdef";
secretsManagerStub.getSecretValue.yields(null, expectedData);
const data = await awsHelper.getCredsFromAWSSecretsManager();
sinon.assert.callCount(secretsManagerStub.getSecretValue, 1);
assert.strictEqual(data, expectedData);
});
});
});
This code gives me the error saying
TypeError: AWS.SecretsManager is not a constructor
any help would be greatly appreciated.
AWS is a namespace, it contains all AWS service classes like SecretsManager. You should provide the awsStub to aws-sdk, there is no need to wrap the awsStub inside an object.
aws_helper.js:
const AWS = require('aws-sdk');
exports.getCredsFromAWSSecretsManager = () => {
const SM = new AWS.SecretsManager({
apiVersion: process.env.AWS_SM_API_VERSION,
region: process.env.AWS_SM_REGION,
});
const params = {
SecretId: '1',
};
return SM.getSecretValue(params)
.promise()
.then((data) => {
console.info(data);
return JSON.parse(data.SecretString);
})
.catch((err) => {
console.error(err);
throw err;
});
};
aws_helper.test.js:
const sinon = require('sinon');
const proxyquire = require('proxyquire').noCallThru().noPreserveCache();
let awsHelper;
let secretsManagerStub;
describe('AWS Helper: getCredsFromAWSSecretsManagera method', () => {
before(() => {
const data = {
SecretString: JSON.stringify({ publicKey: 'secretUsername', privateKey: 'secretPassword' }),
};
secretsManagerStub = {
getSecretValue: sinon.stub().returnsThis(),
promise: sinon.stub().resolves(data),
};
const awsStub = {
SecretsManager: sinon.stub().returns(secretsManagerStub),
};
awsHelper = proxyquire('./aws_helper.js', {
'aws-sdk': awsStub,
});
});
afterEach(() => {
sinon.restore();
});
it('should write random data!', async () => {
const data = await awsHelper.getCredsFromAWSSecretsManager();
sinon.assert.callCount(secretsManagerStub.getSecretValue, 1);
sinon.assert.match(data, { publicKey: 'secretUsername', privateKey: 'secretPassword' });
});
});
test result:
AWS Helper: getCredsFromAWSSecretsManagera method
{
SecretString: '{"publicKey":"secretUsername","privateKey":"secretPassword"}'
}
✓ should write random data!
1 passing (2s)
---------------|---------|----------|---------|---------|-------------------
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
---------------|---------|----------|---------|---------|-------------------
All files | 77.78 | 100 | 66.67 | 77.78 |
aws_helper.js | 77.78 | 100 | 66.67 | 77.78 | 19-20
---------------|---------|----------|---------|---------|-------------------

Jest Mock FS File Stream

I am very new to jest and mocking, I have a fs module read stream, which has 2 events on and data, i am trying to mock below code
ReadFile.js
const csv = require('csv-parser');
let storeData=[];
csvFileReader() {
fs.createReadStream(path.resolve(__dirname, "./abc.csv"))
.pipe(csv())
.on('data', async (row) => {
storeData[0] = row.postcode;
})
.on('end', () => {
console.log('Done')
});
}
ReadFileTest.js
import ReadFile from './readFile.js';
const fs = require('fs');
jest.mock('fs');
describe('Load File', () => {
const readFile= new ReadFile();
test('Test data handler', async () => {
const mockPipeOn = { on: jest.fn().mockImplementation(function(this, event, handler) {
if (event === 'data') {
jest.fn.mockReturnValueOnce("Reading Data")
}
if (event === 'end') {
jest.fn.mockReturnValueOnce("Completed Reading Data")
}
return this;
}), };
const mockReadStream = { pipe: jest.fn().mockReturnValueOnce(mockPipeOn) };
const createReadStream = jest.fn().mockReturnValueOnce(mockReadStream);
await readFile.csvFileReader();
});
});
I am getting error on 'this' key word its not going into 'data' and 'end' error handler
You should use mockFn.mockReturnThis() to return the context.
E.g.
ReadFile.js:
import fs from 'fs';
import path from 'path';
import csv from 'csv-parser';
class ReadFile {
csvFileReader() {
fs.createReadStream(path.resolve(__dirname, './abc.csv'))
.pipe(csv())
.on('data', async (row) => {
console.log('Storing Data');
})
.on('end', () => {
console.log('Done');
});
}
}
export default ReadFile;
ReadFile.test.js
import ReadFile from './ReadFile';
import fs from 'fs';
jest.mock('fs');
describe('67216891', () => {
const readFile = new ReadFile();
it('should store', () => {
const mReadStream = {
pipe: jest.fn().mockReturnThis(),
on: jest.fn().mockImplementation(function (event, handler) {
handler();
return this;
}),
};
fs.createReadStream.mockReturnValueOnce(mReadStream);
readFile.csvFileReader();
expect(fs.createReadStream).toBeCalledTimes(1);
expect(mReadStream.pipe).toBeCalledTimes(1);
expect(mReadStream.on).toBeCalledWith('data', expect.any(Function));
expect(mReadStream.on).toBeCalledWith('end', expect.any(Function));
});
});
test result:
PASS examples/67216891/ReadFile.test.js (8.891 s)
67216891
✓ should store (20 ms)
console.log
Storing Data
at ReadFile.<anonymous> (examples/67216891/ReadFile.js:10:17)
console.log
Done
at examples/67216891/ReadFile.js:13:17
-------------|---------|----------|---------|---------|-------------------
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
-------------|---------|----------|---------|---------|-------------------
All files | 100 | 100 | 100 | 100 |
ReadFile.js | 100 | 100 | 100 | 100 |
-------------|---------|----------|---------|---------|-------------------
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 0 total
Time: 10.159 s

How to mock AWS DynamoDB in Jest for Serverless Nodejs Lambda?

I wrote a lambda as follows.
handler.js
const aws = require('aws-sdk');
const dynamoDb = new aws.DynamoDB.DocumentClient();
const testHandler = async event => {
// some code
// ...
const user = await getUser(userId)
// ...
// some code
}
const promisify = foo => new Promise((resolve, reject) => {
foo((error, result) => {
if (error) {
reject(error)
} else {
resolve(result)
}
})
})
const getUser = (userId) => promisify(callback =>
dynamoDb.get({
TableName: 'test-table',
Key: {
"PK": `${userId}`,
"SK": `${userId}`
}
}, callback))
.then((user) => {
console.log(`Retrieved user: ${userId}`)
return user
})
module.exports = {
testHandler: testHandler,
getUser: getUser
}
I want to write a unit test for testing the getUser function so I tried the following.
handler.test.js
const handler = require('../handler');
const AWS = require('aws-sdk')
const dynamoDbGetParameterPromise = jest.fn().mockReturnValue({
promise: jest.fn().mockResolvedValue({
PK: 'userId-123', SK: 'userId-123'
})
})
AWS.DynamoDB.DocumentClient = jest.fn().mockImplementation(() => ({
get: dynamoDbGetParameterPromise
}))
describe('test getUser', () => {
beforeEach(() => {
jest.resetModules()
});
test('get user success', async () => {
const user = { PK: 'userId-123', SK: 'userId-123' };
const result = await handler.getUser(userId);
expect(result).toEqual(user);
});
});
The error is as follows.
ConfigError: Missing region in config
105 |
106 | const getUser = (userId) => promisify(callback =>
> 107 | dynamoDb.get({
| ^
108 | TableName: 'test-table',
109 | Key: {
110 | "PK": 'userId-123',
It seems the test still uses the dynamoDb in the handler.js rather than the mocked in the test.
Any ideas on how to wire up the mock correctly to test the function?
Thanks in advance!
You can use jest's auto-mock by adding
jest.mock("aws-sdk");
and then AWS.DynamoDB.DocumentClient will be a mocked class so you'll be able to mock it's implementation. And since we want it's get method to be a function that accepts anything as a first argument (as we won't do anything with it within the mock implementation) and a callback that we're expecting it to have been called with null and user we can mock it like this:
AWS.DynamoDB.DocumentClient.prototype.get.mockImplementation((_, cb) => {
cb(null, user);
});
You could use jest.mock(moduleName, factory, options) to mock aws-sdk module manually.
E.g.
handler.js:
const aws = require('aws-sdk');
const dynamoDb = new aws.DynamoDB.DocumentClient();
const promisify = (foo) =>
new Promise((resolve, reject) => {
foo((error, result) => {
if (error) {
reject(error);
} else {
resolve(result);
}
});
});
const getUser = (userId) =>
promisify((callback) =>
dynamoDb.get(
{
TableName: 'test-table',
Key: {
PK: `${userId}`,
SK: `${userId}`,
},
},
callback,
),
).then((user) => {
console.log(`Retrieved user: ${userId}`);
return user;
});
module.exports = { getUser };
handler.test.js:
const aws = require('aws-sdk');
const { getUser } = require('./handler');
jest.mock('aws-sdk', () => {
const mDocumentClient = { get: jest.fn() };
const mDynamoDB = { DocumentClient: jest.fn(() => mDocumentClient) };
return { DynamoDB: mDynamoDB };
});
const mDynamoDb = new aws.DynamoDB.DocumentClient();
describe('64564233', () => {
afterAll(() => {
jest.resetAllMocks();
});
it('should get user', async () => {
const mResult = { name: 'teresa teng' };
mDynamoDb.get.mockImplementationOnce((_, callback) => callback(null, mResult));
const actual = await getUser(1);
expect(actual).toEqual({ name: 'teresa teng' });
expect(mDynamoDb.get).toBeCalledWith(
{
TableName: 'test-table',
Key: {
PK: '1',
SK: '1',
},
},
expect.any(Function),
);
});
it('should handler error', async () => {
const mError = new Error('network');
mDynamoDb.get.mockImplementationOnce((_, callback) => callback(mError));
await expect(getUser(1)).rejects.toThrowError('network');
expect(mDynamoDb.get).toBeCalledWith(
{
TableName: 'test-table',
Key: {
PK: '1',
SK: '1',
},
},
expect.any(Function),
);
});
});
unit test result:
PASS src/stackoverflow/64564233/handler.test.js (14.929s)
64564233
✓ should get user (23ms)
✓ should handler error (3ms)
console.log src/stackoverflow/64564233/handler.js:433
Retrieved user: 1
------------|----------|----------|----------|----------|-------------------|
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s |
------------|----------|----------|----------|----------|-------------------|
All files | 100 | 100 | 100 | 100 | |
handler.js | 100 | 100 | 100 | 100 | |
------------|----------|----------|----------|----------|-------------------|
Test Suites: 1 passed, 1 total
Tests: 2 passed, 2 total
Snapshots: 0 total
Time: 17.435s
source code: https://github.com/mrdulin/jest-codelab/tree/master/src/stackoverflow/64564233

Jest mocking google-cloud/storage typescript

I have been trying to mock the #google-cloud/storage for my implementation so that I could test it without having to hit the cloud-storge in gcp and so far it has all been in vain
I have tried to mock the node_module scope folder using the jest doc and that didnt work out
Hence I tried using below
This is my implementation class
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
import * as fs from 'fs';
import pump from 'pump';
import pino from 'pino';
import * as _ from 'lodash';
import {
ENV_NAME_DEV,
GCLOUD_DATABASE_BUCKET_DEV,
GCLOUD_DATABASE_BUCKET_PROD,
GCLOUD_ENV_STR_BUCKET_NAME,
GCLOUD_STORED_FILE_NAME_DEV,
GCLOUD_STORED_FILE_NAME_PROD,
GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH,
GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH,
} from '../common/util/app.constants';
import { PinoLoggerServiceInstance } from '../common/services/pino.logger.service';
import { AppUtilServiceInstance } from '../common/services/app.util.service';
export const uploadEnvFiles = async (env_name: string) => {
const LOGGER: pino.Logger = PinoLoggerServiceInstance.getLogger(__filename);
return new Promise(async (res, rej) => {
// This just returns the Storage() instance with keyFileName and projectID
//of google cloud console being set so authentication takes place
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
const bucketToUpload = GCLOUD_ENV_STR_BUCKET_NAME;
let uploadLocalFilePath;
let destinationBucketPath;
if (!AppUtilServiceInstance.isNullOrUndefined(env_name)) {
uploadLocalFilePath = ENV_NAME_DEV === env_name ? GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH : GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH;
destinationBucketPath = ENV_NAME_DEV === env_name ? GCLOUD_DATABASE_BUCKET_DEV : GCLOUD_DATABASE_BUCKET_PROD;
}
LOGGER.info('after authentication');
pump(
fs.createReadStream(uploadLocalFilePath),
str
.bucket(bucketToUpload)
.file(destinationBucketPath)
.createWriteStream({
gzip: true,
public: true,
resumable: true,
})
)
.on('error', (err) => {
LOGGER.error('Error occured in uploading:', err);
rej({ status: 'Error', error: err, code: 500 });
})
.on('finish', () => {
LOGGER.info('Successfully uploaded the file');
res({ status: 'Success', code: 201, error: null });
});
});
};
export const downloadEnvFiles = async (env_name): Promise<any> => {
const LOGGER: pino.Logger = PinoLoggerServiceInstance.getLogger(__filename);
return new Promise(async (res, rej) => {
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
try {
const [files] = await str.bucket(GCLOUD_ENV_STR_BUCKET_NAME).getFiles();
const filteredFile =
ENV_NAME_DEV === env_name
? _.find(files, (file) => {
c
return file.name.includes(GCLOUD_STORED_FILE_NAME_DEV);
})
: _.find(files, (file) => {
return file.name.includes(GCLOUD_STORED_FILE_NAME_PROD);
});
res({
status: 'Success',
code: 200,
error: null,
stream: str
.bucket(GCLOUD_ENV_STR_BUCKET_NAME)
.file(filteredFile.name)
.createReadStream()
});
} catch (err) {
LOGGER.error('Error in retrieving files from gcloud:'+err);
rej({ status: 'Error', error: err, code: 500 });
}
});
};
This is my jest ts
bucket.operations.int.spec.ts
I've tried to include the mock inline
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
const { Storage } = require('#google-cloud/storage');
const { Bucket } = require('#google-cloud/storage');
import { File } from '#google-cloud/storage';
import { mocked } from 'ts-jest/utils'
const fs = require('fs');
import * as path from 'path';
import pump from 'pump';
import * as BucketOperations from './bucket.operations';
import { GCLOUD_ENV_STR_BUCKET_NAME } from '../common/util/app.constants';
const { PassThrough } = require('stream');
const fsMock = jest.mock('fs');
// Here we are trying to mock pump with a function returned
// since pump () is the actual fucntion, we are mocking the function to return a value
// which is just a value of "on" eventlistener.. so we indicate that this will be substituted
// with another mocked function
jest.genMockFromModule('#google-cloud/storage');
jest.mock('#google-cloud/storage', () => {
const mockedFile = jest.fn().mockImplementation(() => {
return {
File: jest.fn().mockImplementation(() => {
return {
name: 'dev.txt',
createReadStream: jest
.fn()
.mockReturnValue(
fs.createReadStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt')
)
),
createWriteStream: jest
.fn()
.mockReturnValue(
fs.createWriteStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt')
)
)
};
})
};
});
const mockedBUcket = jest.fn().mockImplementation(() => {
return {
Bucket: jest.fn().mockImplementation(() => {
return {
constructor: jest.fn().mockReturnValue('test-bucket'),
getFiles: jest.fn().mockReturnValue([mockedFile])
}
})
}
});
return {
Storage: jest.fn().mockImplementation(() => {
return {
constructor: jest.fn().mockReturnValue('test-storage'),
bucket: mockedBUcket,
file: mockedFile,
createWriteStream: jest.fn().mockImplementation(() =>
fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt')))
};
})
};
});
jest.mock('pump', () => {
const mPump = { on: jest.fn() };
return jest.fn(() => mPump);
});
describe('Test suite for testing bucket operations', () => {
const mockedStorage = mocked(Storage, true);
const mockeddFile = mocked(File, true);
const mockeddBucket = mocked(Bucket, true);
function cancelCloudStorageMock() {
//mockCloudStorage.unmock('#google-cloud/storage');
mockedStorage.mockClear();
mockeddBucket.mockClear();
mockeddFile.mockClear();
jest.unmock('#google-cloud/storage');
jest.requireActual('#google-cloud/storage');
}
function cancelFsMock() {
jest.unmock('fs');
jest.requireActual('fs');
}
afterEach(() => {
jest.clearAllMocks();
//jest.restoreAllMocks();
});
test('test for uploadfiles - success', async (done) => {
cancelFsMock();
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'finish') {
callback();
}
return this;
});
const actual = await BucketOperations.uploadEnvFiles('dev');
expect(actual).toEqual(
expect.objectContaining({
status: 'Success',
code: 201,
})
);
done();
});
test('test downloadEnvFiles - success', async (done) => {
jest.unmock('fs');
const fMock = (File.prototype.constructor = jest.fn().mockImplementation(() => {
return {
storage: new Storage(),
bucket: 'testBucket',
acl: 'test-acl',
name: 'dev.txt',
parent: 'parent bucket',
};
}));
const bucketGetFilMock = (Bucket.prototype.getFiles = jest.fn().mockImplementation(() => {
return [fMock];
}));
// Get files should be an array of File from google-cloud-storage
//Bucket.prototype.getFiles = jest.fn().mockReturnValue([mockedFsConstructor]);
//Storage.prototype.bucket = jest.fn().mockReturnValue(new Storage());
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
jest.spyOn(fs, 'createReadStream').mockReturnValue(
fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'))
);
await BucketOperations.downloadEnvFiles('dev');
done();
});
});
This is the exception I end up with. Upon debugging I see that the mocked instances are trying to execute, but it doesn't execute the file method in Storage mock. This is not available in #google-cloud/storage but I did try to mock it. Is there a way to mock just the usage of google-cloud/storage using jest?
EDIT:
Here is the exception:
TypeError: str.bucket(...).file is not a function
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:37:6
at Generator.next (<anonymous>)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:8:71
at new Promise (<anonymous>)
at Object.<anonymous>.__awaiter (/home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:4:12)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:22:40
at new Promise (<anonymous>)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:22:9
Thanks to #ralemos. I was able to find the answer on how I mocked
Here is the complete implementation.
I've added a few more test stories as well
So jest.mock() esp the #google-cloud/storage modules, needs to be mocked in a different way. The Bucket of the Storage has all the details of the files in gcp storage, so that needs to be mocked first, I also mocked the File (this is of type #google-cloud/storage). Now I added the mockedFile to the mockedBucket and from there to the mockedStorage. I've also added all the methods and properties and implemented a mock for all of them.
There is a lodash node_module usage in my test file, so I mocked that implementation as well. Now everything works fine.
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
const { Storage } = require('#google-cloud/storage');
const fs = require('fs');
import * as path from 'path';
import pump from 'pump';
import * as BucketOperations from './bucket.operations';
const { PassThrough } = require('stream');
const fsMock = jest.mock('fs');
const mockedFile = {
name: 'dev.txt',
createWriteStream: jest.fn().mockImplementation(() => {
return fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt'));
}),
createReadStream: jest.fn().mockImplementation(() => {
return fs.createReadStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'));
}),
};
jest.mock('lodash', () => {
return {
find: jest.fn().mockImplementation(() => {
return mockedFile;
})
};
});
const mockedBucket = {
file: jest.fn(() => mockedFile),
getFiles: jest.fn().mockImplementation(() => {
const fileArray = new Array();
fileArray.push(mockedFile);
return fileArray;
})
};
const mockedStorage = {
bucket: jest.fn(() => mockedBucket)
};
jest.mock('#google-cloud/storage', () => {
return {
Storage: jest.fn(() => mockedStorage)
};
});
jest.mock('pump', () => {
const mPump = { on: jest.fn() };
return jest.fn(() => mPump);
});
describe('Test suite for testing bucket operations', () => {
function cancelCloudStorageMock() {
jest.unmock('#google-cloud/storage');
jest.requireActual('#google-cloud/storage');
}
function cancelFsMock() {
jest.unmock('fs');
jest.requireActual('fs');
}
afterEach(() => {
jest.clearAllMocks();
//jest.restoreAllMocks();
});
test('test for uploadfiles - success', async (done) => {
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'finish') {
callback();
}
return this;
});
const actual = await BucketOperations.uploadEnvFiles('dev');
expect(actual).toEqual(
expect.objectContaining({
status: 'Success',
code: 201,
})
);
done();
});
test('test downloadEnvFiles - success', async (done) => {
jest.unmock('fs');
const downloadRes = await BucketOperations.downloadEnvFiles('dev');
expect(downloadRes).toBeDefined();
expect(downloadRes).toEqual(expect.objectContaining({code:200, status: 'Success'}));
done();
});
test('test for uploadfiles- failure', async (done) => {
cancelCloudStorageMock();
const bucketStorageSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockImplementation(() => {
return new Storage({
projectId: 'testId',
keyFilename: path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'),
scopes: ['testScope'],
autoRetry: false,
});
});
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
fs.createWriteStream = jest.fn().mockReturnValue(mockWritable);
fs.createReadStream = jest.fn().mockReturnValue(mockReadable);
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'error') {
callback();
}
return this;
});
const actual = BucketOperations.uploadEnvFiles('prod');
expect(actual).rejects.toEqual(
expect.objectContaining({
status: 'Error',
code: 500,
})
);
expect(bucketStorageSpy).toHaveBeenCalledTimes(1);
done();
});
test('test download - make the actual call - rej with auth error', async (done) => {
cancelCloudStorageMock();
console.dir(Storage);
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
fs.createWriteStream = jest.fn().mockReturnValue(mockWritable);
fs.createReadStream = jest.fn().mockReturnValue(mockReadable);
const createGcloudAuthenticationBucketSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockImplementation(() => {
return new Storage();
});
try {
await BucketOperations.downloadEnvFiles('dev');
} catch (err) {
expect(err.code).toBe(500);
expect(err.status).toBe('Error');
}
expect(createGcloudAuthenticationBucketSpy).toHaveBeenCalledTimes(1);
createGcloudAuthenticationBucketSpy.mockReset();
done();
});
});

Mocking pump node_module with different implementations using jest - Typescript

I am trying to implement gcloud-storage with nodejs and test them using typescript
This is my actual class
Please do not consider the logging implementation for now.
The storage is authenticated by an external service call -
const str =
GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
and the file that I am willing to store in gcloud is manipulated using streams , with the pump module
export const uploadEnvFiles = async (env_name: string) => {
const LOGGER: pino.Logger = PinoLoggerServiceInstance.getLogger(__filename);
return new Promise(async (res, rej) => {
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
const bucketToUpload = GCLOUD_ENV_STR_BUCKET_NAME;
let uploadLocalFilePath;
let destinationBucketPath;
if (!AppUtilServiceInstance.isNullOrUndefined(env_name)) {
uploadLocalFilePath = ENV_NAME_DEV === env_name ? GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH : GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH;
destinationBucketPath = ENV_NAME_DEV === env_name ? GCLOUD_DATABASE_BUCKET_DEV : GCLOUD_DATABASE_BUCKET_PROD;
}
LOGGER.info('after authentication');
pump(
fs.createReadStream(uploadLocalFilePath),
str
.bucket(bucketToUpload)
.file(destinationBucketPath)
.createWriteStream({
gzip: true,
public: true,
resumable: true,
})
)
.on('error', (err) => {
LOGGER.error('Error occured in uploading:', err);
rej({ status: 'Error', error: err, code: 500 });
})
.on('finish', () => {
LOGGER.info('Successfully uploaded the file');
res({ status: 'Success', code: 201, error: null });
});
});
};
Now there are possibilities of the stream finishing or erroring out and I wanted to test both.
I am able to mock the pump npm module as a whole with jest.mock like this hoisted at the top before any test suite declarations.
jest.mock('pump', () =>
jest.fn().mockImplementation(() => {
const readStream = fs.createReadStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt')
);
const writeStream = fs.createWriteStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt')
);
return readStream.pipe(writeStream);
})
);
So the above is an implementation for the working scenario, where I have piped an existing file to an output stream and returned the stream, making the mock of pump to work. Here is my test spec file
const globalAny: any = global;
describe('Test suite for bucket functionality', () => {
beforeEach(() => {
jest.restoreAllMocks();
});
afterAll(() => {
jest.clearAllMocks();
jest.restoreAllMocks();
jest.resetAllMocks();
});
test('test upload - make the actual call', async (done) => {
// to make sure that mock fs doesnt affect the gcloud authentication, this is a MUST
const createGcloudAuthenticationBucketSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockImplementation(() => {
return new Storage();
});
const res = BucketOperations.uploadEnvFiles(globalAny.ENV_JEST);
await expect(res).resolves.toBeDefined();
expect(createGcloudAuthenticationBucketSpy).toHaveBeenCalledTimes(1);
done();
});
});
Now this works with the mocked pump call. But I wanted to test the scenario where the stream emits error as well in the same spec. Is there a possibility to overwrite the mockImplementation in another test spec. Since this is a npm module, I have written the jest.mock() at the top which will serve as the mock for the entire test suite, but unsure as to how to overwrite it. I've been trying for past 3 days and couldn't figure it out. Any way that can be achieved?
Here is the unit test solution using jest.mock(moduleName, factory, options) and jest.spyOn(object, methodName).
bucketOperations.ts:
import fs from 'fs';
import pump from 'pump';
import { GcloudAuthenticationInstance } from './gcloudAuthenticationInstance';
import { AppUtilServiceInstance } from './appUtilServiceInstance';
const {
GCLOUD_ENV_STR_BUCKET_NAME,
GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH,
GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH,
GCLOUD_DATABASE_BUCKET_DEV,
GCLOUD_DATABASE_BUCKET_PROD,
ENV_NAME_DEV,
} = process.env;
export const uploadEnvFiles = async (env_name: string) => {
return new Promise(async (res, rej) => {
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
const bucketToUpload = GCLOUD_ENV_STR_BUCKET_NAME;
let uploadLocalFilePath;
let destinationBucketPath;
if (!AppUtilServiceInstance.isNullOrUndefined(env_name)) {
uploadLocalFilePath =
ENV_NAME_DEV === env_name ? GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH : GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH;
destinationBucketPath = ENV_NAME_DEV === env_name ? GCLOUD_DATABASE_BUCKET_DEV : GCLOUD_DATABASE_BUCKET_PROD;
}
console.info('after authentication');
pump(
fs.createReadStream(uploadLocalFilePath),
str
.bucket(bucketToUpload)
.file(destinationBucketPath)
.createWriteStream({
gzip: true,
public: true,
resumable: true,
}),
)
.on('error', (err) => {
console.error('Error occured in uploading:', err);
rej({ status: 'Error', error: err, code: 500 });
})
.on('finish', () => {
console.info('Successfully uploaded the file');
res({ status: 'Success', code: 201, error: null });
});
});
};
appUtilServiceInstance.ts:
const AppUtilServiceInstance = {
isNullOrUndefined: (env_name) => typeof env_name === 'undefined',
};
export { AppUtilServiceInstance };
gcloudAuthenticationInstance.ts:
const GcloudAuthenticationInstance = {
createGcloudAuthenticationBucket: () => {
const storage = {
bucket(name) {
return this;
},
file(filename) {
return this;
},
createWriteStream(options) {
return 'write stream';
},
};
return storage;
},
};
export { GcloudAuthenticationInstance };
bucketOperations.test.ts:
import pump from 'pump';
import fs from 'fs';
import { GcloudAuthenticationInstance } from './gcloudAuthenticationInstance';
jest.mock('pump', () => {
const mPump = { on: jest.fn() };
return jest.fn(() => mPump);
});
describe('61031410', () => {
let originalEnv;
beforeEach(() => {
originalEnv = process.env;
});
afterEach(() => {
process.env = originalEnv;
jest.restoreAllMocks();
});
it('should upload file correctly', async () => {
process.env.ENV_NAME_DEV = 'dev';
process.env.GCLOUD_ENV_STR_BUCKET_NAME = 'bucket-dev';
process.env.GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH = 'dev';
process.env.GCLOUD_DATABASE_BUCKET_DEV = 'bucket-dev-db';
const BucketOperations = require('./bucketOperations');
const createReadStreamSpy = jest.spyOn(fs, 'createReadStream').mockReturnValueOnce('rs' as any);
const mStorage: any = {
bucket: jest.fn().mockReturnThis(),
file: jest.fn().mockReturnThis(),
createWriteStream: jest.fn().mockReturnValueOnce('ws'),
};
const infoSpy = jest.spyOn(console, 'info');
const createGcloudAuthenticationBucketSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockReturnValueOnce(mStorage);
pump().on.mockImplementation(function(this: any, event, callback) {
if (event === 'finish') {
callback();
}
return this;
});
const actual = await BucketOperations.uploadEnvFiles('dev');
expect(actual).toEqual({ status: 'Success', code: 201, error: null });
expect(createGcloudAuthenticationBucketSpy).toBeCalledTimes(1);
expect(pump).toBeCalledWith('rs', 'ws');
expect(createReadStreamSpy).toBeCalledWith('dev');
expect(mStorage.bucket).toBeCalledWith('bucket-dev');
expect(mStorage.file).toBeCalledWith('bucket-dev-db');
expect(mStorage.createWriteStream).toBeCalledWith({ gzip: true, public: true, resumable: true });
expect(infoSpy.mock.calls[0]).toEqual(['after authentication']);
expect(infoSpy.mock.calls[1]).toEqual(['Successfully uploaded the file']);
});
it('should handle the error if upload file failure', () => {
// TODO: you can do this like above
});
});
unit test results with coverage report:
PASS stackoverflow/61031410/bucketOperations.test.ts (7.94s)
61031410
✓ should upload file correctly (69ms)
✓ should handle the error if upload file failure
console.info node_modules/jest-environment-enzyme/node_modules/jest-mock/build/index.js:866
after authentication
console.info node_modules/jest-environment-enzyme/node_modules/jest-mock/build/index.js:866
Successfully uploaded the file
---------------------------------|---------|----------|---------|---------|-------------------
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
---------------------------------|---------|----------|---------|---------|-------------------
All files | 80.56 | 50 | 54.55 | 79.41 |
appUtilServiceInstance.ts | 100 | 100 | 100 | 100 |
bucketOperations.ts | 92.31 | 50 | 83.33 | 91.67 | 40,41
gcloudAuthenticationInstance.ts | 28.57 | 100 | 0 | 28.57 | 3,5,8,11,14
---------------------------------|---------|----------|---------|---------|-------------------
Test Suites: 1 passed, 1 total
Tests: 2 passed, 2 total
Snapshots: 0 total
Time: 9.247s
source code: https://github.com/mrdulin/react-apollo-graphql-starter-kit/tree/master/stackoverflow/61031410

Resources