node-rdkafka producer not connecting/producing in docker alpine but running perfectly locally - node.js

I have the following setup
Dockerfile
FROM node:alpine
WORKDIR /usr/src/app
RUN apk --no-cache --update add python3 py3-pip alpine-sdk bash librdkafka openssl-dev
COPY package*.json ./
RUN npm install
COPY . .
EXPOSE 5004
CMD ["node", "app.js"]
A bills.js file
import fs from "fs";
import readline from "readline";
function readAllLines(path) {
return new Promise((resolve, reject) => {
// Test file access directly, so that we can fail fast.
// Otherwise, an ENOENT is thrown in the global scope by the readline internals.
try {
fs.accessSync(path, fs.constants.R_OK);
} catch (err) {
reject(err);
}
let lines = [];
const reader = readline.createInterface({
input: fs.createReadStream(path),
crlfDelay: Infinity
});
reader
.on('line', (line) => lines.push(line))
.on('close', () => resolve(lines));
});
}
const configFromPath = async function configFromPath(path) {
const lines = await readAllLines(path);
return lines
.filter((line) => !/^\s*?#/.test(line))
.map((line) => line
.split('=')
.map((s) => s.trim()))
.reduce((config, [k, v]) => {
config[k] = v;
return config;
}, {});
};
// Copy
// Next, we are going to create the producer application by pasting the following code into a file named producer.js:
import Kafka from "node-rdkafka";
function createConfigMap(config) {
if (config.hasOwnProperty('security.protocol')) {
return {
'bootstrap.servers': config['bootstrap.servers'],
'sasl.username': config['sasl.username'],
'sasl.password': config['sasl.password'],
'security.protocol': config['security.protocol'],
'sasl.mechanisms': config['sasl.mechanisms'],
'dr_msg_cb': true,
'request.required.acks': 1
}
} else {
return {
'bootstrap.servers': config['bootstrap.servers'],
'dr_msg_cb': true
}
}
}
function createProducer(config, onDeliveryReport) {
const producer = new Kafka.Producer(createConfigMap(config));
return new Promise((resolve, reject) => {
producer
.on('ready', () => resolve(producer))
.on('delivery-report', onDeliveryReport)
.on('event.error', (err) => {
console.warn('event.error', err);
reject(err);
});
producer.connect();
});
}
async function produceExample(value,key) {
let configPath ="kafka.properties";
const config = await configFromPath(configPath);
value = Buffer.from(JSON.stringify(value));
let topic = "bill";
const producer = await createProducer(config, (err, report) => {
if (err) {
console.warn('Error producing', err)
} else {
const {topic, key, value} = report;
let k = key.toString().padEnd(10, ' ');
console.log(`Produced event to topic ${topic}: key = ${k} value = ${value}`);
}
});
producer.produce(topic, -1, value, key);
producer.flush(10000, () => {
producer.disconnect();
});
}
Here's my config file
bootstrap.servers=xxxx.gcp.confluent.cloud:9092
security.protocol=SASL_SSL
sasl.mechanisms=PLAIN
sasl.username=xxxxxx
sasl.password=xxxxxx
# Best practice for higher availability in librdkafka clients prior to 1.7
session.timeout.ms=45000
If I debug this code and write a console.log just below producer.connect() it's working but not comming down to
producer.produce(topic, -1, value, key);
I have tested this code without dockerizing and it's working fine.
I also see some errors while building node-rdkafka when Dockerfile run npm install
I think the node-rdfkafka is not building properly and that's causing the issue of stuck behavior.
Here are some images of the build error I suspect
Many of these types of errors
Edit:
I'm now getting this error now even after making librdkafka from the source. Error

Related

How to write the unit test for 'fs unlink' using vitest for the follow function?

deleteDuplicatedImage.ts
import { unlink, PathLike } from "fs";
import { logger } from "../libraries";
export const deleteDuplicatedImage = (imagePath: PathLike) => {
unlink(imagePath, function (error) {
if (error) {
throw error;
}
// if no error is thrown, file has been deleted successfully
logger.info("File was deleted as it already exists in the db!");
});
};
This is the function for which I'm writing test case using vitest framework.
Though, I tried to write the test for it in the following way
deleteDuplicatedImage.spec.ts
require("dotenv").config();
import { nanoid } from "nanoid";
import { afterEach, describe, expect, it, vi } from "vitest";
import * as deleteDuplicatedImage from "../../src/lib/utilities/deleteDuplicatedImage";
const testImagePath: string = `${nanoid()}-testImagePath`;
describe("utilities -> deleteDuplicatedImage", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("it should throw an error", async () => {
const mockedDeleteDuplicatedImage = vi
.spyOn(deleteDuplicatedImage, "deleteDuplicatedImage")
.mockImplementation((_imagePath: any) => {});
deleteDuplicatedImage.deleteDuplicatedImage(testImagePath);
expect(mockedDeleteDuplicatedImage).toBeCalledWith(testImagePath);
expect(
deleteDuplicatedImage.deleteDuplicatedImage(testImagePath)
).toBeUndefined();
});
});
It is also passed but not including the coverage of the code!!
It should have 100% test coverage

Cypress is returning an empty array when trying to log sheetnames of an excel file

I am currently trying to get the sheetnames of an excel file but Cypress is returning an empty array. Is there something I missed? I'll be using it to verify data on later steps.
I'm using Cypress 9.6.0 with Cucumber. Below are my scripts and screenshots:
index.js for task
module.exports = (on, config) => {
on('file:preprocessor', cucumber());
on('task', {
checkExcelSheetContents(args){
if (fs.existsSync(args.filePath)) {
const workbook = xlsx.readFile(args.filePath);
return xlsx.utils.sheet_to_json(workbook.SheetNames)
} else {
throw new Error ("File not found")
}
}
})
return Object.assign({}, config, {
fixturesFolder: 'cypress/fixtures',
integrationFolder: 'cypress/integration',
screenshotsFolder: 'cypress/screenshots',
videosFolder: 'cypress/videos',
supportFile: 'cypress/support/index.js'
});
}
.js file
And ('try', () => {
var excelFilePath = "../CreateAutomatedTests/cypress/downloads/courses20220714_09_51_27.xlsx"
cy.wrap(excelFilePath).as('filePath')
cy.get('#filePath').then((filePath) => {
cy.task('checkExcelSheetContents', { filePath }).then((contents) => {
cy.log(contents)
})
})
})
Please see these screenshots as well
I've always used the buffer version of xlsx.read().
From xlsx package
For Node ESM, the readFile helper is not enabled. Instead, fs.readFileSync should be used to read the file data as a Buffer for use with XLSX.read:
import { readFileSync } from "fs";
import { read } from "xlsx/xlsx.mjs";
const buf = readFileSync("test.xlsx");
/* buf is a Buffer */
const workbook = read(buf);
Your task:
on('task', {
checkExcelSheetContents(args){
if (fs.existsSync(args.filePath)) {
const buf = fs.readFileSync(file);
const workbook = xlsx.read(buf, { type: 'buffer' });
return workbook.SheetNames
} else {
throw new Error ("File not found")
}
}
})

Update an imported module in Typescript

I'm sorry, but I'm kinda new in this language.
I was creating a custom discord bot these days and I got stucked on this problem...
I gave this bot the possibility to load the commands dynamically from a folder with one module for each command, but now I was trying to make a command to reload them all, but each time after the commands are reloaded the output is always the same.
Here is the code:
refreshCommands = () => {
this.commands = {};
console.log("Refreshing commands");
Promise.all(fs.readdirSync("./dist/commands").map(file => {
return new Promise(async resolve => {
const tmp = (await import(`./commands/${file}`)).default;
this.commands[tmp.name] = tmp;
resolve(tmp);
});
})).then(() => {
console.log("Listing commands: ");
console.log(Object.keys(this.commands));
});
}
Of course I update the commands from the js file, and not from the ts 'cause I would have to compile it again.
I tried to make a simple "ping! Pong!" like command, and then to edit it to "ping! ping!" on runtime before using the //reload command, but it keeps writing "ping! Pong!"
Edit 1:
The modules I have to import are made like this one:
import command from "../utils/command";
import { Guild, GuildEmoji, GuildEmojiManager, Message, MessageEmbed, Role } from "discord.js";
import { games } from "../utils/games";
import app from "../app";
import ReactionListener from "../utils/reactionListener";
const roleMessage: command = {
name: "rolesMessage",
description: "",
execute: async (message, bot) => {
message.delete();
createRoles(message.guild as Guild);
const embed = new MessageEmbed()
.setColor('#F00')
.setTitle("React to set your ROLE!");
games.forEach(game => {
let emoji = message.guild?.emojis.cache.find(emoji => emoji.name === game.emoji);
console.log(emoji);
embed.fields.push({
name: game.name,
value: (emoji as GuildEmoji).toString(),
inline: false
});
});
const msg = await message.channel.send(embed);
app.reactionListeners.push(new ReactionListener(msg,
(reaction, user) => {
let tmp = games.find(game=> reaction.emoji.name === game.emoji);
if(tmp){
//msg.channel.send(tmp);
const role = (message.guild as Guild).roles.cache.find(role => role.name === tmp?.roleName) as Role;
message.guild?.members.cache.find(member => member.id === user.id)?.roles.add(role);
}else{
reaction.remove();
}
}, (reaction, user)=>{
let tmp = games.find(game=> reaction.emoji.name === game.emoji);
if(tmp){
//msg.channel.send(tmp);
const role = (message.guild as Guild).roles.cache.find(role => role.name === tmp?.roleName) as Role;
message.guild?.members.cache.find(member => member.id === user.id)?.roles.remove(role);
}
})
);
games.forEach(game => {
msg.react((message.guild?.emojis.cache.find(emoji => emoji.name === game.emoji) as GuildEmoji));
});
}
}
const createRoles = (guild: Guild) => {
games.forEach(game => {
if(!guild.roles.cache.find(role => role.name === game.roleName)){
guild.roles.create({
data: {
name: game.roleName,
color: "#9B59B6",
},
reason: 'we needed a role for Super Cool People',
})
.then(console.log)
.catch(console.error);
}
});
}
export default roleMessage;
This is a different one from the one I was talking about earlier, but the problem is the same... Once I update and reload it (from the js compiled version), the old version keeps being runned
I managed to find a solution to the problem.
As node js chaches every module once imported, I deleted it from the cache like this
refreshCommands = () => {
Promise.all(fs.readdirSync("./dist/commands").map(file => {
return new Promise(async resolve => {
delete require.cache[require.resolve('./commands/' + file)];
resolve(file);
});
})).then(() => {
this.commands = {};
console.log("Refreshing commands");
Promise.all(fs.readdirSync("./dist/commands").map(file => {
return new Promise(async resolve => {
const tmp = (await import(`./commands/${file}`)).default;
this.commands[tmp.name] = tmp;
resolve(tmp);
});
})).then(() => {
console.log("Listing commands: ");
console.log(Object.keys(this.commands));
});
});
}
The code might look like garbage, but it actually works... I'm on my way to make it better, but meanwhile I can rely on it.
Any suggestion is well accepted

Maintaining native module dependencies for node.js and electron at the same time

I am trying to build an Electron app that requires nodegit, which is a native module. As far as I know, a native module's native library must targets the same NODE_MODULE_VERSION as the run-time engine (I mean Node.js or Electron) does.
For example, if my Electron runs with NODE_MODULE_VERSION 64, then my nodegit should be installed with a native library that targets NODE_MODULE_VERSION 64.
Current I have some tests in my project, and I would like to run them on both Electron and Node.js. Because (1) Electron is closer to the environment of the final product and (2) Node.js is much easier to debug.
To achieve this goal, the native module must be compatible with both Electron and Node.js at the same time. However, this is nearly impossible.
The funny thing is that, from the charts that list the NODE_MODULE_VERSION of Electron versions (it is called Chrome version in this chart) and Node.js versions, their NODE_MODULE_VERSION rarely match. It is hard to find a Electron version that uses a Node.js which also use the same NODE_MODULE_VERSION. As a consequence, I have to settle down with Electron and Node.js using different NODE_MODULE_VERSION. In other words, the native module can only be compatible with either Electron or Node.js, not both of them.
I am curious about if it is possible to separate the native module used by Node.js and Electron without rebuilding the module or is there a version switching functionality to let me quickly switch the version of the native module?
Or it would be event better if anyone can share a way to make the Electron and Node.js use the same NODE_MODULE_VERSION.
Don't know if there is a better solution, I came up with an extremely simple script that copy and paste module files with environment selection (attached below).
Would still greatly appreciate any good idea about how to solve this problem.
'use strict';
const fs = require('fs-extra');
const path = require('path');
let args = process.argv.slice(2);
let cachePath = path.resolve(__dirname, '../.module_cache');
let configPath = path.join(cachePath, '.config');
let modulePath = path.resolve(__dirname, '../node_modules');
wrapper(args)
.catch(err => {
console.error(err);
})
function wrapper(args) {
switch (args[0]) {
case 'save':
return saveModule(args[1], args[2]);
case 'load':
return loadModule(args[1], args[2]);
case 'drop':
if (args.length === 3) {
return dropModuleEnvironment(args[1]);
}
else {
return dropModule(args[1]);
}
case 'ls':
return listModules();
case 'which':
return printWhichModule(args[1]);
case 'versions':
return listModuleVersions(args[1]);
case 'help':
printUsage();
return Promise.resolve();
default:
printUsage();
return Promise.reject(new Error("Unexcepted arguments: " + args.join(', ')));
}
}
function printUsage() {
console.log(`
Usage:
save <module> <environment>: cache a module environment for later use
load <module> <environment>: load a previously saved module environment, and set it to be active
drop <module> [environment]: remove all cached module environments,
or when [environment] is provided, remove the specified environment
ls: list all cached modules and their current environment
which <module>: show the active environment for the module
versions <module>: list all available environments for the module. Active environment is marked by "*"
help: show this help info`);
}
function saveModule(moduleName, envName) {
let storePath = path.join(cachePath, moduleName, envName);
let sourcePath = path.join(modulePath, moduleName);
return fs.emptyDir(storePath)
.then(() => {
return fs.copy(sourcePath, storePath);
})
.then(() => {
return updateConfig(moduleName, ".system.");
});
}
function loadModule(moduleName, envName) {
let storePath = path.join(cachePath, moduleName, envName);
let targetPath = path.join(modulePath, moduleName);
return whichModuleVersion(moduleName)
.then(currentVersion => {
if (currentVersion === envName) {
console.log(`Not loading ${envName} for ${moduleName} because it is current version`);
return Promise.resolve();
}
else {
return fs.emptyDir(targetPath)
.then(() => {
return fs.copy(storePath, targetPath);
})
.then(() => {
return updateConfig(moduleName, envName);
})
}
})
}
function dropModuleEnvironment(moduleName, envName) {
let storePath = path.join(cachePath, moduleName, envName);
return fs.remove(storePath)
.then(() => {
return fs.readFile(configPath)
.then(configRaw => {
let config = JSON.parse(configRaw);
let currentEnv = config[moduleName];
if (currentEnv && currentEnv === envName) {
config[currentEnv] = '.system.';
}
return JSON.stringify(config);
})
.then(configRaw => {
return fs.writeFile(configPath, configRaw);
});
});
}
function dropModule(moduleName) {
return fs.remove(path.join(cachePath, moduleName))
.then(() => {
return fs.readFile(configPath)
.then(configRaw => {
let config = JSON.parse(configRaw);
if (config[moduleName]) {
delete config[moduleName];
}
return JSON.stringify(config);
})
.then(configRaw => {
return fs.writeFile(configPath, configRaw);
});
})
}
function listModules() {
return fs.readFile(configPath)
.then(configRaw => {
let config = JSON.parse(configRaw);
Object.keys(config).forEach(moduleName => {
printModuleVersion(moduleName, config[moduleName]);
})
})
}
function printWhichModule(moduleName) {
return whichModuleVersion(moduleName)
.then(version => {
printModuleVersion(moduleName, version);
});
}
function listModuleVersions(moduleName) {
let modulePath = path.join(cachePath, moduleName);
return fs.exists(modulePath)
.then(exists => {
if (exists) {
let currentVersion;
return whichModuleVersion(moduleName)
.then(version => currentVersion = version)
.then(() => fs.readdir(modulePath))
.then(envNames => {
envNames.forEach(envName => {
if (currentVersion === envName) {
console.log('* ' + envName);
}
else {
console.log(' ' + envName);
}
});
});
}
else {
console.log('not installed');
}
})
}
function whichModuleVersion(moduleName) {
return fs.readFile(configPath)
.then(configRaw => {
let config = JSON.parse(configRaw);
return config[moduleName];
});
}
function printModuleVersion(moduleName, moduleVersion) {
console.log(`${moduleName}: ${moduleVersion || 'not installed'}`);
}
function updateConfig(moduleName, envName) {
return fs.readFile(configPath)
.then(configRaw => {
let config = JSON.parse(configRaw);
config[moduleName] = envName;
return JSON.stringify(config);
})
.then(configRaw => {
fs.writeFile(configPath, configRaw);
})
}

Dynamically mock dependencies with Jest

I have a method that logs a message via one function in a node environment and via a different function in a browser environment. To check whether I am in a node or browser environment I use the libraries detect-node and is-browser like so:
const isNode = require('detect-node');
const isBrowser = require('is-browser');
log(level, message, data) {
if (isNode) {
this.nodeTransport.log(level, this.name, message, data);
}
if (isBrowser) {
this.browserTransport.log(level, this.name, message, data);
}
}
The variables isNode and isBrowser are set to true and false (automatically via the package) depending on, well, if I'm in a browser or in a node env.
Now I want to test this behavior using jest so I need to mock these npm packages. This is what I tried:
function setup() {
const loggerName = 'Test Logger';
const logger = new Logger(loggerName);
logger.nodeTransport = { log: jest.fn() };
logger.browserTransport = { log: jest.fn() };
logger.splunkTransport = { log: jest.fn() };
return { logger, loggerName };
}
test('it should call the the appropriate transports in a node environment', () => {
const { logger } = setup();
const message = 'message';
jest.mock('detect-node', () => true);
jest.mock('is-browser', () => false);
logger.log('error', message, []);
expect(logger.nodeTransport.log).toHaveBeenCalled();
expect(logger.browserTransport.log).not.toHaveBeenCalled();
});
test('it should call the the appropriate transports in a browser environment', () => {
const { logger } = setup();
const message = 'message';
jest.mock('detect-node', () => false);
jest.mock('is-browser', () => true);
logger.log('error', message, []);
expect(logger.nodeTransport.log).not.toHaveBeenCalled();
expect(logger.browserTransport.log).toHaveBeenCalled();
});
You see, I am using jest.mock to mock detect-node and is-browser and give it different return values. However, this just does not work. The first test is green because (I assume) Jest runs in node, but the second test fails saying
Expected mock function not to be called but it was called with:
["error", "Test Logger", "message", []]
Use .mockClear() to reset the mock calls between tests.
afterEach(() => {
logger.nodeTransport.log.mockClear();
logger.browserTransport.log.mockClear();
});

Resources