How to mock 'readline.createInterface' in jest tests - node.js

I need to test 'readline.createInterface'.
Below is the code that I need to test:
private createReadStreamSafe(filePath: string): Promise<fs.ReadStream> {
return new Promise((resolve, reject) => {
const fileStream = fs.createReadStream(filePath)
console.log('file Stream')
fileStream
.on('error', () => {
reject('create read stream error')
})
.on('open', () => {
resolve(fileStream)
})
})
}
async start() {
const fileStream = await this.createReadStreamSafe(this.filePath)
const rl = readline.createInterface({
input: fileStream,
output: process.stdout,
terminal: false
})
for await (const line of rl) {
...
}
}
I tried following code:
it('should work', async () => {
const mockedReadStream = new Readable()
jest.spyOn(fs, 'createReadStream').mockReturnValue(mockedReadStream as any)
jest.spyOn(readline, 'createInterface').mockImplementation(() => {
const lines = ['text', 'text2', 'text3']
return {
[Symbol.asyncIterator]() {
return {
i: 0,
next: () => {
if (this.i < 3) {
return Promise.resolve({ value: lines[this.i++], done: false })
}
return Promise.resolve({ done: true })
}
}
}
} as any
})
const app = new App('myFile.txt')
let promise = app.start()
mockedReadStream.emit('open')
await expect(promise).resolves.toBe(undefined)
})
But following code is never reached
for await (const line of rl) {
...
}
Is there a way to mock readline.createInterface and then it works with the for await (const line of rl)?

The issue is: the async iterable object is not triggered during the test.
Solution, we can just use an array in the mock, like this:
jest.spyOn(readline, 'createInterface').mockImplementationOnce(() => {
return ['text1', 'text2'] as any
})
Since for await (const item of iterable) works for async iterable objects as well sync iterables. With sync iterables, they will be executed automatically.

Related

Jest coverage is not working properly on docker

I am using JEST for testing a package I am creating and I am using 100% coverage in all aspects.
On windows everything works fine, coverage is 100% as expected but when I run it on a docker even though the node version is the same (14.18.1), jest fails to detect 2 lines of code that I'm am sure that they were reached, here is the code:
import callIf from 'src/tools/call-if';
import zlib from 'zlib';
export default class CompressorEngine {
static preparaData<T>(data: T): Buffer {
const isNullOrUndefined = [null, undefined].includes(data as any);
if (isNullOrUndefined) {
return Buffer.from('null', 'utf-8');
}
return Buffer.from(JSON.stringify(data), 'utf-8');
}
static compress(uncompressedData: Buffer, compressionLevel: number = 9) {
return new Promise<Buffer>((resolve, reject) => {
zlib.deflate(uncompressedData, { level: compressionLevel }, (error, result) => {
callIf(error, reject, error);
callIf(!error, resolve, result);
});
});
}
static decompress(compressedData: Buffer, compressionLevel: number = 9) {
return new Promise<Buffer>((resolve, reject) => {
zlib.inflate(compressedData, { level: compressionLevel }, (error, result) => {
callIf(error, reject, error);
callIf(!error, resolve, result);
});
});
}
}
the src/tools/call-if code:
const callIf = (expression: any, callback: Function, ...args: any[]) => {
if (expression) return callback(...args);
};
export default callIf;
and here are my tests for this class:
import CompressorEngine from 'src/core/compressor';
describe('CompressorEngine', () => {
it('should compress and decompress data', async () => {
const input = 'test';
const compressed = await CompressorEngine.compress(CompressorEngine.preparaData(input));
const uncompressed = await CompressorEngine.decompress(compressed);
expect(uncompressed.toString('utf-8')).toEqual(`"${input}"`);
});
it('should compress and decompress null as "null"', async () => {
const input = null;
const compressed = await CompressorEngine.compress(CompressorEngine.preparaData(input));
const uncompressed = await CompressorEngine.decompress(compressed);
expect(JSON.parse(uncompressed.toString('utf-8'))).toEqual(null);
});
describe('prepareData', () => {
it('should return a buffer for null value as "null"', () => {
Array.prototype.includes = jest.fn(Array.prototype.includes);
const buffer = CompressorEngine.preparaData(null);
expect(buffer.toString('utf-8')).toBe('null');
expect(Array.prototype.includes).toHaveBeenCalled();
expect(Array.prototype.includes).toHaveReturnedWith(true);
});
it('should return a buffer for undefined value as "null"', () => {
const buffer = CompressorEngine.preparaData(undefined);
expect(buffer.toString('utf-8')).toBe('null');
});
});
});
The problem is on prepareData it says as if the if(true) is not reached, but it is as the first prepareData specific test describes "expect(Array.prototype.includes).toHaveReturnedWith(true);" but the generated coverage says it wasn't covered, how come?

How to write test jest of async/await is toHaveBeenCalled?

`Trying to run a test for the following code using Jest.I mocked the function and checked if it runs with the desired intent. I think the error is in the promise but I can't find a solution. Can you help me?
Why redis.set not call
i get error
expect(jest.fn()).toHaveBeenCalled()
Expected number of calls: >= 1
Received number of calls: 0
mailparser.js
mail parser will call with mgs(‘body’,(stream)=>{parser})
const parser = async (stream) => {
try {
const mail = await simpleParser(stream)
const isAvailable = await redis.set(
[`mail_message_id_${mail.messageId}`],
[`${mail.messageId}`],
{
flag: 'NX',
expiry: 60 * 30,
}
)
if (!isAvailable) {
return console.log(`${mail.messageId} is already processed.`)
}
} catch (err) {
console.error(err, 'failed to parser mail')
}
}
Here my unit Ttesting
const stream = 'stream'
const mockParser = {
simpleParser: jest.fn(),
}
const mockRedis = {
set: jest.fn(),
}
beforeAll(() => {
jest.mock('./redis', () => {
return {
set: mockRedis.set,
}
})
jest.mock('mailparser', () => {
return {
simpleParser: mockParser.simpleParser,
}
})
jest.fn().mockImplementation(() => mockMgs)
})
beforeEach(() => {
mockMgs.on.mockImplementationOnce((event, fn) => {
fn(stream)
})
})
afterEach(async () => {
await jest.clearAllMocks()
})
describe('mailparser', () => {
test('should call redis.set with params correctly', async () => {
const mailparser = require('./mailparser')
await mockParser.simpleParser.mockImplementation(() =>
Promise.resolve(mail) //mail is object data
)
mockRedis.set.mockImplementationOnce(() => Promise.resolve('OK'))
mailparser(mockMgs, 1)
expect(mockRedis.set).toHaveBeenCalled()
})

one of my friend is trying to automate a process in which the bot will post instagram stories as a video from a specific folder

below is the working code in which it can post images but is there any way i can also share videos as instagram story?
the error i get when i try to post video instead of image are:**
error image
PS D:\Softwares\programming\Insta Bot\story> node index.js
18:45:11 - info: Dry Run Activated
18:45:11 - info: Post() called! ======================
18:45:11 - debug: 1 files found in ./images/
18:45:11 - warn: Record file not found, saying yes to D:\Softwares\programming\Insta Bot\story\images\meme.mp4
18:45:11 - debug: Read File Success
18:45:11 - error: undefined
(MAIN CODE)
index.js
const logger = require("./logger.js")
const { random, sleep } = require('./utils')
require('dotenv').config();
const { IgApiClient, IgLoginTwoFactorRequiredError } = require("instagram-private-api");
const ig = new IgApiClient();
const Bluebird = require('bluebird');
const inquirer = require('inquirer');
const { CronJob } = require('cron');
const path = require("path");
const fs = require("fs");
const fsp = fs.promises;
const sharp = require("sharp");
//==================================================================================
const statePath = "./etc/state.conf";
const recordPath = "./etc/usedfiles.jsonl";
const imgFolderPath = "./images/";
const dryrun = true;
const runOnStart = true;
//==================================================================================
(async () => { // FOR AWAIT
// LOGIN TO INSTAGRAM
if (!dryrun) {
await login();
logger.info("Log In Successful");
} else {
logger.info("Dry Run Activated");
}
// SCHEDULER
// logger.silly("I'm a schedule, and I'm running!! :)");
const job = new CronJob('38 43 * * * *', post, null, true); //https://crontab.guru/
if (!runOnStart) logger.info(`Next few posts scheduled for: \n${job.nextDates(3).join("\n")}\n`);
else post();
// MAIN POST COMMAND
async function post() {
logger.info("Post() called! ======================");
let postPromise = fsp.readdir(imgFolderPath)
.then(filenames => {
if (filenames.length < 1) throw new Error(`Folder ${imgFolderPath} is empty...`)
logger.debug(`${filenames.length} files found in ${imgFolderPath}`);
return filenames;
})
.then(filenames => filenames.map(file => path.resolve(imgFolderPath + file)))
.then(filenames => pickUnusedFileFrom(filenames, filenames.length))
.then(filename => {
if (!dryrun) registerFileUsed(filename)
return filename
})
.then(fsp.readFile)
.then(async buffer => {
logger.debug("Read File Success "); //TODO move this to previous then?
return sharp(buffer).jpeg().toBuffer()
.then(file => {
logger.debug("Sharp JPEG Success");
return file
})
})
.then(async file => {
if (!dryrun) {
// await sleep(random(1000, 60000)) //TODO is this necessary?
return ig.publish.story({ file })
.then(fb => logger.info("Posting successful!?"))
}
else return logger.info("Data not sent, dryrun = true")
})
.then(() => logger.info(`Next post scheduled for ${job.nextDates()}\n`))
.catch(logger.error)
}
})();
//=================================================================================
async function login() {
ig.state.generateDevice(process.env.IG_USERNAME);
// ig.state.proxyUrl = process.env.IG_PROXY;
//register callback?
ig.request.end$.subscribe(async () => {
const serialized = await ig.state.serialize();
delete serialized.constants; // this deletes the version info, so you'll always use the version provided by the library
await stateSave(serialized);
});
if (await stateExists()) {
// import state accepts both a string as well as an object
// the string should be a JSON object
const stateObj = await stateLoad();
await ig.state.deserialize(stateObj)
.catch(err => logger.debug("deserialize: " + err));
} else {
let standardLogin = async function() {
// login like normal
await ig.simulate.preLoginFlow();
logger.debug("preLoginFlow finished");
await ig.account.login(process.env.IG_USERNAME, process.env.IG_PASSWORD);
logger.info("Logged in as " + process.env.IG_USERNAME);
process.nextTick(async () => await ig.simulate.postLoginFlow());
logger.debug("postLoginFlow finished");
}
// Perform usual login
// If 2FA is enabled, IgLoginTwoFactorRequiredError will be thrown
return Bluebird.try(standardLogin)
.catch(
IgLoginTwoFactorRequiredError,
async err => {
logger.info("Two Factor Auth Required");
const {username, totp_two_factor_on, two_factor_identifier} = err.response.body.two_factor_info;
// decide which method to use
const verificationMethod = totp_two_factor_on ? '0' : '1'; // default to 1 for SMS
// At this point a code should have been sent
// Get the code
const { code } = await inquirer.prompt([
{
type: 'input',
name: 'code',
message: `Enter code received via ${verificationMethod === '1' ? 'SMS' : 'TOTP'}`,
},
]);
// Use the code to finish the login process
return ig.account.twoFactorLogin({
username,
verificationCode: code,
twoFactorIdentifier: two_factor_identifier,
verificationMethod, // '1' = SMS (default), '0' = TOTP (google auth for example)
trustThisDevice: '1', // Can be omitted as '1' is used by default
});
},
)
.catch(e => logger.error('An error occurred while processing two factor auth', e, e.stack));
}
return
//================================================================================
async function stateSave(data) {
// here you would save it to a file/database etc.
await fsp.mkdir(path.dirname(statePath), { recursive: true }).catch(logger.error);
return fsp.writeFile(statePath, JSON.stringify(data))
// .then(() => logger.info('state saved, daddy-o'))
.catch(err => logger.error("Write error" + err));
}
async function stateExists() {
return fsp.access(statePath, fs.constants.F_OK)
.then(() => {
logger.debug('Can access state info')
return true
})
.catch(() => {
logger.warn('Cannot access state info')
return false
});
}
async function stateLoad() {
// here you would load the data
return fsp.readFile(statePath, 'utf-8')
.then(data => JSON.parse(data))
.then(data => {
logger.info("State load successful");
return data
})
.catch(logger.error)
}
}
async function registerFileUsed( filepath ) {
let data = JSON.stringify({
path: filepath,
time: new Date().toISOString()
}) + '\n';
return fsp.appendFile(recordPath, data, { encoding: 'utf8', flag: 'a+' } )
.then(() => {
logger.debug("Writing filename to record file");
return filepath
})
}
function pickUnusedFileFrom( filenames, iMax = 1000) {
return new Promise((resolve, reject) => {
let checkFileUsed = async function ( filepath ) {
return fsp.readFile(recordPath, 'utf8')
.then(data => data.split('\n'))
.then(arr => arr.filter(Boolean))
.then(arr => arr.map(JSON.parse))
.then(arr => arr.some(entry => entry.path === filepath))
}
let trythis = function( iMax, i = 1) {
let file = random(filenames);
checkFileUsed(file)
.then(async used => {
if (!used) {
logger.info(`Unused file found! ${file}`);
resolve(file);
} else if (i < iMax) {
logger.debug(`Try #${i}: File ${file} used already`);
await sleep(50);
trythis(iMax, ++i)
} else {
reject(`I tried ${iMax} times and all the files I tried were previously used`)
}
})
.catch(err => {
logger.warn("Record file not found, saying yes to " + file);
resolve(file);
})
}( iMax );
})
}

Strange Promise.all behaviour

In one file called SearchBuilder.js i have this function ( wrote in this way as pure test )
self.validateInputs = async params => {
const validateDates = async () => {
const pick_up_date = moment(`${params.pick_up_date} ${params.pick_up_time}`),
drop_off_date = moment(`${params.drop_off_date} ${params.drop_off_date}`);
return true;
};
const validatePickUpId = async () => {
return true;
};
const validateDropOffId = async () => {
throw { 'code': 'NOT_FOUND'};
};
return Promise.all([
validateDates,
validatePickUpId,
validateDropOffId,
]);
};
In another file called searchEngine.js i have this:
self.run = async (type, search_inputs, account) => {
const searchBuilder = self.getSearchBuilder(type);
try {
const isValid = await searchBuilder.validateInputs(search_inputs);
console.log(isValid);
} catch (e) {
console.log('error input validation');
}
return search;
};
I erroneously thought that if any of the promises in Promise.all would fail, so my code will enter the catch block. Instead, even if the promise fails code inside the catch is never executed and inside isValid i get this.
[ [AsyncFunction: validateDates],
[AsyncFunction: validatePickUpId],
[AsyncFunction: validateDropOffId] ]

Jest mocking google-cloud/storage typescript

I have been trying to mock the #google-cloud/storage for my implementation so that I could test it without having to hit the cloud-storge in gcp and so far it has all been in vain
I have tried to mock the node_module scope folder using the jest doc and that didnt work out
Hence I tried using below
This is my implementation class
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
import * as fs from 'fs';
import pump from 'pump';
import pino from 'pino';
import * as _ from 'lodash';
import {
ENV_NAME_DEV,
GCLOUD_DATABASE_BUCKET_DEV,
GCLOUD_DATABASE_BUCKET_PROD,
GCLOUD_ENV_STR_BUCKET_NAME,
GCLOUD_STORED_FILE_NAME_DEV,
GCLOUD_STORED_FILE_NAME_PROD,
GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH,
GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH,
} from '../common/util/app.constants';
import { PinoLoggerServiceInstance } from '../common/services/pino.logger.service';
import { AppUtilServiceInstance } from '../common/services/app.util.service';
export const uploadEnvFiles = async (env_name: string) => {
const LOGGER: pino.Logger = PinoLoggerServiceInstance.getLogger(__filename);
return new Promise(async (res, rej) => {
// This just returns the Storage() instance with keyFileName and projectID
//of google cloud console being set so authentication takes place
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
const bucketToUpload = GCLOUD_ENV_STR_BUCKET_NAME;
let uploadLocalFilePath;
let destinationBucketPath;
if (!AppUtilServiceInstance.isNullOrUndefined(env_name)) {
uploadLocalFilePath = ENV_NAME_DEV === env_name ? GCLOUD_UPLOAD_FILE_DEV_LOCAL_PATH : GCLOUD_UPLOAD_FILE_PROD_LOCAL_PATH;
destinationBucketPath = ENV_NAME_DEV === env_name ? GCLOUD_DATABASE_BUCKET_DEV : GCLOUD_DATABASE_BUCKET_PROD;
}
LOGGER.info('after authentication');
pump(
fs.createReadStream(uploadLocalFilePath),
str
.bucket(bucketToUpload)
.file(destinationBucketPath)
.createWriteStream({
gzip: true,
public: true,
resumable: true,
})
)
.on('error', (err) => {
LOGGER.error('Error occured in uploading:', err);
rej({ status: 'Error', error: err, code: 500 });
})
.on('finish', () => {
LOGGER.info('Successfully uploaded the file');
res({ status: 'Success', code: 201, error: null });
});
});
};
export const downloadEnvFiles = async (env_name): Promise<any> => {
const LOGGER: pino.Logger = PinoLoggerServiceInstance.getLogger(__filename);
return new Promise(async (res, rej) => {
const str = GcloudAuthenticationInstance.createGcloudAuthenticationBucket();
try {
const [files] = await str.bucket(GCLOUD_ENV_STR_BUCKET_NAME).getFiles();
const filteredFile =
ENV_NAME_DEV === env_name
? _.find(files, (file) => {
c
return file.name.includes(GCLOUD_STORED_FILE_NAME_DEV);
})
: _.find(files, (file) => {
return file.name.includes(GCLOUD_STORED_FILE_NAME_PROD);
});
res({
status: 'Success',
code: 200,
error: null,
stream: str
.bucket(GCLOUD_ENV_STR_BUCKET_NAME)
.file(filteredFile.name)
.createReadStream()
});
} catch (err) {
LOGGER.error('Error in retrieving files from gcloud:'+err);
rej({ status: 'Error', error: err, code: 500 });
}
});
};
This is my jest ts
bucket.operations.int.spec.ts
I've tried to include the mock inline
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
const { Storage } = require('#google-cloud/storage');
const { Bucket } = require('#google-cloud/storage');
import { File } from '#google-cloud/storage';
import { mocked } from 'ts-jest/utils'
const fs = require('fs');
import * as path from 'path';
import pump from 'pump';
import * as BucketOperations from './bucket.operations';
import { GCLOUD_ENV_STR_BUCKET_NAME } from '../common/util/app.constants';
const { PassThrough } = require('stream');
const fsMock = jest.mock('fs');
// Here we are trying to mock pump with a function returned
// since pump () is the actual fucntion, we are mocking the function to return a value
// which is just a value of "on" eventlistener.. so we indicate that this will be substituted
// with another mocked function
jest.genMockFromModule('#google-cloud/storage');
jest.mock('#google-cloud/storage', () => {
const mockedFile = jest.fn().mockImplementation(() => {
return {
File: jest.fn().mockImplementation(() => {
return {
name: 'dev.txt',
createReadStream: jest
.fn()
.mockReturnValue(
fs.createReadStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt')
)
),
createWriteStream: jest
.fn()
.mockReturnValue(
fs.createWriteStream(
path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt')
)
)
};
})
};
});
const mockedBUcket = jest.fn().mockImplementation(() => {
return {
Bucket: jest.fn().mockImplementation(() => {
return {
constructor: jest.fn().mockReturnValue('test-bucket'),
getFiles: jest.fn().mockReturnValue([mockedFile])
}
})
}
});
return {
Storage: jest.fn().mockImplementation(() => {
return {
constructor: jest.fn().mockReturnValue('test-storage'),
bucket: mockedBUcket,
file: mockedFile,
createWriteStream: jest.fn().mockImplementation(() =>
fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt')))
};
})
};
});
jest.mock('pump', () => {
const mPump = { on: jest.fn() };
return jest.fn(() => mPump);
});
describe('Test suite for testing bucket operations', () => {
const mockedStorage = mocked(Storage, true);
const mockeddFile = mocked(File, true);
const mockeddBucket = mocked(Bucket, true);
function cancelCloudStorageMock() {
//mockCloudStorage.unmock('#google-cloud/storage');
mockedStorage.mockClear();
mockeddBucket.mockClear();
mockeddFile.mockClear();
jest.unmock('#google-cloud/storage');
jest.requireActual('#google-cloud/storage');
}
function cancelFsMock() {
jest.unmock('fs');
jest.requireActual('fs');
}
afterEach(() => {
jest.clearAllMocks();
//jest.restoreAllMocks();
});
test('test for uploadfiles - success', async (done) => {
cancelFsMock();
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'finish') {
callback();
}
return this;
});
const actual = await BucketOperations.uploadEnvFiles('dev');
expect(actual).toEqual(
expect.objectContaining({
status: 'Success',
code: 201,
})
);
done();
});
test('test downloadEnvFiles - success', async (done) => {
jest.unmock('fs');
const fMock = (File.prototype.constructor = jest.fn().mockImplementation(() => {
return {
storage: new Storage(),
bucket: 'testBucket',
acl: 'test-acl',
name: 'dev.txt',
parent: 'parent bucket',
};
}));
const bucketGetFilMock = (Bucket.prototype.getFiles = jest.fn().mockImplementation(() => {
return [fMock];
}));
// Get files should be an array of File from google-cloud-storage
//Bucket.prototype.getFiles = jest.fn().mockReturnValue([mockedFsConstructor]);
//Storage.prototype.bucket = jest.fn().mockReturnValue(new Storage());
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
jest.spyOn(fs, 'createReadStream').mockReturnValue(
fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'))
);
await BucketOperations.downloadEnvFiles('dev');
done();
});
});
This is the exception I end up with. Upon debugging I see that the mocked instances are trying to execute, but it doesn't execute the file method in Storage mock. This is not available in #google-cloud/storage but I did try to mock it. Is there a way to mock just the usage of google-cloud/storage using jest?
EDIT:
Here is the exception:
TypeError: str.bucket(...).file is not a function
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:37:6
at Generator.next (<anonymous>)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:8:71
at new Promise (<anonymous>)
at Object.<anonymous>.__awaiter (/home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:4:12)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:22:40
at new Promise (<anonymous>)
at /home/vijaykumar/Documents/Code/Nestjs/cloud-storage-app/src/gcloud/bucket.operations.ts:22:9
Thanks to #ralemos. I was able to find the answer on how I mocked
Here is the complete implementation.
I've added a few more test stories as well
So jest.mock() esp the #google-cloud/storage modules, needs to be mocked in a different way. The Bucket of the Storage has all the details of the files in gcp storage, so that needs to be mocked first, I also mocked the File (this is of type #google-cloud/storage). Now I added the mockedFile to the mockedBucket and from there to the mockedStorage. I've also added all the methods and properties and implemented a mock for all of them.
There is a lodash node_module usage in my test file, so I mocked that implementation as well. Now everything works fine.
import { GcloudAuthenticationInstance } from '../common/services/gcloud.authentication';
const { Storage } = require('#google-cloud/storage');
const fs = require('fs');
import * as path from 'path';
import pump from 'pump';
import * as BucketOperations from './bucket.operations';
const { PassThrough } = require('stream');
const fsMock = jest.mock('fs');
const mockedFile = {
name: 'dev.txt',
createWriteStream: jest.fn().mockImplementation(() => {
return fs.createWriteStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-write.txt'));
}),
createReadStream: jest.fn().mockImplementation(() => {
return fs.createReadStream(path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'));
}),
};
jest.mock('lodash', () => {
return {
find: jest.fn().mockImplementation(() => {
return mockedFile;
})
};
});
const mockedBucket = {
file: jest.fn(() => mockedFile),
getFiles: jest.fn().mockImplementation(() => {
const fileArray = new Array();
fileArray.push(mockedFile);
return fileArray;
})
};
const mockedStorage = {
bucket: jest.fn(() => mockedBucket)
};
jest.mock('#google-cloud/storage', () => {
return {
Storage: jest.fn(() => mockedStorage)
};
});
jest.mock('pump', () => {
const mPump = { on: jest.fn() };
return jest.fn(() => mPump);
});
describe('Test suite for testing bucket operations', () => {
function cancelCloudStorageMock() {
jest.unmock('#google-cloud/storage');
jest.requireActual('#google-cloud/storage');
}
function cancelFsMock() {
jest.unmock('fs');
jest.requireActual('fs');
}
afterEach(() => {
jest.clearAllMocks();
//jest.restoreAllMocks();
});
test('test for uploadfiles - success', async (done) => {
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'finish') {
callback();
}
return this;
});
const actual = await BucketOperations.uploadEnvFiles('dev');
expect(actual).toEqual(
expect.objectContaining({
status: 'Success',
code: 201,
})
);
done();
});
test('test downloadEnvFiles - success', async (done) => {
jest.unmock('fs');
const downloadRes = await BucketOperations.downloadEnvFiles('dev');
expect(downloadRes).toBeDefined();
expect(downloadRes).toEqual(expect.objectContaining({code:200, status: 'Success'}));
done();
});
test('test for uploadfiles- failure', async (done) => {
cancelCloudStorageMock();
const bucketStorageSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockImplementation(() => {
return new Storage({
projectId: 'testId',
keyFilename: path.resolve(process.cwd(), './tests/cloud-storage/sample-read.txt'),
scopes: ['testScope'],
autoRetry: false,
});
});
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
fs.createWriteStream = jest.fn().mockReturnValue(mockWritable);
fs.createReadStream = jest.fn().mockReturnValue(mockReadable);
pump().on = jest.fn(function(this: any, event, callback) {
if (event === 'error') {
callback();
}
return this;
});
const actual = BucketOperations.uploadEnvFiles('prod');
expect(actual).rejects.toEqual(
expect.objectContaining({
status: 'Error',
code: 500,
})
);
expect(bucketStorageSpy).toHaveBeenCalledTimes(1);
done();
});
test('test download - make the actual call - rej with auth error', async (done) => {
cancelCloudStorageMock();
console.dir(Storage);
const mockReadable = new PassThrough();
const mockWritable = new PassThrough();
fs.createWriteStream = jest.fn().mockReturnValue(mockWritable);
fs.createReadStream = jest.fn().mockReturnValue(mockReadable);
const createGcloudAuthenticationBucketSpy = jest
.spyOn(GcloudAuthenticationInstance, 'createGcloudAuthenticationBucket')
.mockImplementation(() => {
return new Storage();
});
try {
await BucketOperations.downloadEnvFiles('dev');
} catch (err) {
expect(err.code).toBe(500);
expect(err.status).toBe('Error');
}
expect(createGcloudAuthenticationBucketSpy).toHaveBeenCalledTimes(1);
createGcloudAuthenticationBucketSpy.mockReset();
done();
});
});

Resources