I am using this node-rdkafka library to implement the node kafka with consumer pause and resume method to handle the back-pressure. I have created the small demo where I can pause the consumer and resume the consumer but the problem is after resume the consumer It stops the consuming messages.
Here is my code.
const Kafka = require('node-rdkafka');
const topic = 'create_user_channel';
const log_divider = '-----------------------------------';
const consumer = new Kafka.KafkaConsumer({
'group.id':'gsuite_consumer',
'metadata.broker.list': '*******',
'sasl.mechanisms': 'PLAIN',
'sasl.username': '********',
'sasl.password': '********',
'security.protocol': 'SASL_SSL',
'enable.auto.commit':false
}, {});
// Connect the consumer.
consumer.connect({timeout: "1000ms"}, (err) => {
if (err) {
console.log(`Error connecting to Kafka broker: ${err}`);
process.exit(-1);
}
console.log("Connected to Kafka broker");
});
consumer.on('disconnected', (args) => {
console.error(`Consumer got disconnected: ${JSON.stringify(args)}`);
});
let max_queue_size = 3;
let current_queue = [];
let is_pause = false;
// register ready handler.
consumer.on('ready', (arg)=>{
console.log('consumer ready.' + JSON.stringify(arg));
console.log('Consumer is ready');
consumer.subscribe([topic]);
setInterval(function() {
console.log('consumer has consume on :'+timeMs());
consumer.consume();
}, 1000);
});
consumer.on('data',async (data)=>{
console.log('************consumer is consuming data***********:'+timeMs());
if(!is_pause) {
is_pause = true;
if(data && typeof data !== 'undefined') {
try {
console.log('consumer has received the data:'+timeMs());
consumer.pause([topic]);
console.log('consumer has pause the consuming:'+timeMs());
await processMessage(data);
console.log('consumer is resumed:'+timeMs());
consumer.resume([topic]);
console.log(log_divider);
is_pause = false;
} catch(error) {
console.log('data consuming error');
console.log(error);
}
} else {
is_pause = false;
}
}
});
async function processMessage(data) {
// await print_bulk(data);
await processData(0,data);
}
async function print_bulk(data) {
for(var i=0;i<data.length;i++) {
await processData(i,data[i]);
}
}
/**
* Wait specified number of milliseconds.
* #param ms
*/
async function wait(ms) {
console.log('wait for the 3 sec');
return new Promise((resolve) => setTimeout(resolve, ms));
}
var timeMs = ()=> {
var d = new Date();
var h = addZero(d.getHours(), 2);
var m = addZero(d.getMinutes(), 2);
var s = addZero(d.getSeconds(), 2);
var ms = addZero(d.getMilliseconds(), 3);
return h + ":" + m + ":" + s + ":" + ms;
}
var addZero = (x, n)=> {
while (x.toString().length < n) {
x = "0" + x;
}
return x;
}
async function processData(i,m) {
if (m) {
console.log('processing a data start:'+timeMs());
console.log('Received a message:');
console.log(' message: ' + m.value.toString());
console.log(' key: ' + m.key);
console.log(' size: ' + m.size);
console.log(' topic: ' + m.topic);
console.log(' offset: ' + m.offset);
console.log(' partition: ' + m.partition);
consumer.commitMessage(m);
}
await wait(3000);
console.log('process a data completed:'+timeMs());
// delete current_queue[i];
// console.log('after delting lenght of current queue:'+current_queue.length);
// console.log(log_divider);
return true;
}
can anybody help me, what I am doing wrong while resuming the consumer? When I start the consumer it just receives only one message and after resuming it still not consuming any further messages.
I have figure out the issue. Along with consumer.pause() & consumer.resume() method I need to use the consumer.assignments() method as well.
so It will be like this
consumer.pause(consumer.assignments());
consumer.resume(consumer.assignments());
Related
the problem is :
In Controller, the response is send before the result of condition into function setInterval
Steps :
Controller run function setInterval
Then send the response to frontend with res.status.json
The function setInterval run
condition of setInterval work and finish
setInterval don't return the resultto controller
the controller have already sent a res...
import checkHealth from "./checkHealth.js";
async function checkHealthInterval(labelData) {
console.log("labelData==>", labelData);
let resultInterval = { health: "", message: "" };
let i = 1;
let interval;
interval = await setInterval(async () => {
let healthCheckers = await checkHealth();
let healthCheckerStatus;
if (labelData== "example1") {
healthCheckerStatus = healthCheckers.statusInstanceGlobal;
console.log(
"healthCheckerStatus value from tenant1 : ",
healthCheckerStatus
);
} else {
healthCheckerStatus = healthCheckers.statusWam1Wam2;
console.log(
"healthCheckerStatus value from tenant2 : ",
healthCheckerStatus
);
}
i++;
if (healthCheckerStatus !== true && i > 36) {
clearInterval(interval);
console.log("VMs are UNHEALTHY in the end of interval or before the end");
this.isProcessing = false;
throw new ApiError(
500,
"VMs are UNHEALTHY in the end of interval or before the end"
);
} else if (healthCheckerStatus == true) {
clearInterval(interval);
console.log(
`VMs ${labelData} are HEALTHY and successfully started!`
);
resultInterval.health = "healthy";
resultInterval.message = "VMs are successfully started!";
console.log("resultInterval into interval===>", resultInterval);
return resultInterval
}
}, 10000);
console.log("Script interval is running to check health");
return interval
}
export { checkHealthInterval };
getStartVms = async (req, res, next) => {
console.log("You asked to start VMs: starting VMs");
try {
this.isProcessing = true;
const responseStart = await this.instanceService.startInstances();
// use interval to checkHealth of Vms with import function checkHealth()
const resultInterval = await checkHealthInterval(responseStart?.labelData);
res.status(200).json({
health: "healthy",
message: "VMs are successfully started!",
});
this.isProcessing = false;
io.emit("processChanged", this.isProcessing);
} catch (err) {
this.isProcessing = false;
io.emit("processChanged", this.isProcessing);
next(err);
}
};
I have a setInterval function that's been called in another function, and I need to stop it when the proccess is done. I tried to set this setInterval function as a variable and call clearInterval, but the interval keeps running
const createInterval = (visibilityTimeout, startDateTime, message) => {
setInterval(() => {
const currentDateTime = moment().valueOf();
const timeDifference = (visibilityTimeout * 1000) - (currentDateTime - startDateTime);
if (timeDifference >= 600000) {
return;
}
if (timeDifference < 494983) {
const params = {
QueueUrl: 'http://localhost:4566/000000000000/test-queue2',
ReceiptHandle: message.ReceiptHandle,
VisibilityTimeout: visibilityTimeout,
};
sqs.changeMessageVisibility(params, (err, data) => {
if (err) logger.error(err, err.stack);
else logger.info(data);
});
// eslint-disable-next-line no-param-reassign
visibilityTimeout += 300;
}
}, 5000);
};
module.exports = async (message) => {
const startDateTime = moment().valueOf();
const {
noteId,
} = JSON.parse(message.Body);
logger.info(`Processing message [noteId=${noteId}]`);
try {
const note = await TestSessionNote.findById(noteId);
const testSession = await TestSession.findById(note.test_session_id);
logger.info(`Downloading video [key=${testSession.video_key}]`);
const isProcessing = true;
const interval = createInterval(500, startDateTime, message, isProcessing);
await sleep(20000);
clearInterval(interval);
logger.info(`Finished processing message [noteId=${noteId}]`);
} catch (ex) {
await TestSessionNote.update(noteId, { status: 'transcribe_error' });
logger.error(`Error processing message [noteId=${noteId}]`, ex);
}
};
I know that if i create a var test = setInterval(() => {console.log('blabla')}, 500) and call clearInterval(test) it works, but i don't know how can i do this calling a function
I think that you have to return from createInterval function the intervalId and after that it should work.
Can you check what value has your intervalId right now, with your current implementation?
https://developer.mozilla.org/en-US/docs/Web/API/setInterval
"The returned intervalID is a numeric, non-zero value which identifies the timer created by the call to setInterval(); this value can be passed to clearInterval() to cancel the interval."
I want to insert around 60 Million data in Mongo DB using a Node js Script so i have create a connection and reuse it like that :
connection.js
const MongoClient = require("mongodb").MongoClient,
{ mongourl,dbName } = require('../../env');
let db;
let mongoobject;
const option = {
useUnifiedTopology: true,
useNewUrlParser: true,
socketTimeoutMS: 300000,
poolSize:1000,
keepAlive: 300000,
connectTimeoutMS: 300000,
};
const connectDb = (callback) => {
if (db) return callback()
MongoClient.connect( mongourl, option,
(err, database) => {
if (err) return console.log(err)
db = database.db(dbName);
mongoobject = database;
callback()
}
)
}
const getDb = () => {
return db;
}
const connectclient = () => {
return mongoobject;
}
module.exports = {
connectDb,
getDb,
connectclient
}
and my insertion function is
function saveData(){
return new Promise(function (resolve, reject) {
try {
fs.access(dirPath, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
const startTime = new Date();
let numlines = 0;
const fileReference = {}
fs.readdir(dirPath, function (err, filenames) {
if (err) {
console.error("Directory Not Found")
return;
}
filenames.forEach(function (filename) {
const readInterface = new readline(dirPath + filename);
let promises = [];
fileReference[filename] = 0
readInterface.on('line', function (line) {
fileReference[filename]++
let countcol = line.split('\t').length,
productname = line.split("\t"),
productsku = line.split("\t"),
productprice = line.split("\t");
let product_sku, product_name, product_price;
if (countcol == 3) {
product_sku = productname.splice(0, 2).join("-").toLowerCase();
product_name = productsku.splice(0, 2).join(" ");
product_price = productprice.splice(-1, 1);
} else if (countcol == 4) {
let product_sku_ini = productsku.splice(0, 2).join("").toLowerCase(),
product_sku_end = productsku.splice(0, 1).join(" ").toLowerCase(),
product_name_ini = productname.splice(0, 2).join(""),
product_name_end = productname.splice(0, 1).join(" ");
product_price = productprice.splice(-1, 1);
product_sku = product_sku_ini + "-" + product_sku_end
product_name = product_name_ini + " " + product_name_end
delete product_sku_ini, product_sku_end, product_name_ini, product_name_end,product_sku,product_name,product_price;
}
console.info('row start processing ==>>', filename, product_sku, line);
delete countcol, productname, productsku, productprice;
if (numlines >= 80000) {
readInterface.pause();
// console.log('promises:', promises)
Promise.all(promises)
.then(response => {
numlines = 0;
promises = [];
localStorage.setItem(filename, fileReference[filename]);
console.info(`filename Batch Resolved 1 ========>> ${filename}`, localStorage.getItem(filename))
console.log("====================================================== END 1============================================")
readInterface.resume()
// showHeapUses()
// setTimeout(() => process.exit(), 500)
// console.log('resume 1 time:', (new Date().getTime()) - startTime.getTime())
})
.catch(error => {
console.info(`Error in executing`, error)
numlines = 0;
readInterface.resume()
// console.log('resume 2 time:', (new Date()) - startTime)
})
}
console.log("num line", numlines)
numlines++
if(product_sku && product_name && product_price) {
const memoryUsedMb = process.memoryUsage().heapUsed / 1024 / 1024
console.info('the program used', memoryUsedMb, 'MB')
async.waterfall([
function (callback) {
const checkskuexists = async () => {
let checksamepro = { sku:product_sku };
let check_doc_exist = db.collection(collectionName).findOne(checksamepro);
return check_doc_exist;
}
checkskuexists().then(function(result) {
if(result === null){
callback(true, 'PRODUCT_NOT_FOUND');
}else{
callback(null, result.sku);
}
});
},
function (sku, callback) {
db.collection(collectionName).updateOne({sku:sku}, {$set:{price:product_price}});
resolve();
},
],function (err, result){
if (err) {
if (err && result == 'PRODUCT_NOT_FOUND') {
prodetails = {name:product_name, sku:product_sku, price:product_price, status:'active'}
db.collection(collectionName).insertOne(prodetails, function(err, res) {
if (err) throw err;
client.close();
});
}
resolve();
}
});
delete product_sku, product_name, product_price;
}else {
console.log('product is undefined -- so skiped', line);
delete product_sku, product_name, product_price;
}
});
readInterface.on('error', function (error) {
delete readInterface, fileReference, promises;
console.error("Error in reading file: ", error);
});
readInterface.on('end', function () {
// printPerformance(startTime);
localStorage.removeItem(filename);
Promise.all(promises)
.then(response => {
console.info(`filename Batch Resolved 2 ========>> ${filename} -- Completed`)
console.log("====================================================== END 2============================================")
})
.catch(error => {
console.info(`Error in executing`, error)
})
delete readInterface, fileReference, promises;
});
});
});
});
} catch (error) {
reject("ERROR GOES HERE ", error)
}
});
}
The error I am getting is:
MongoNetworkError: connection 812 to 127.0.0.1:27017 closed
at /var/www/html/reg-dealers-mongodb-script/node_modules/mongodb/lib/cmap/connection.js:68:15
at Map.forEach (<anonymous>)
at Socket.<anonymous> (/var/www/html/reg-dealers-mongodb-script/node_modules/mongodb/lib/cmap/connection.js:67:20)
at Socket.emit (events.js:314:20)
at Socket.EventEmitter.emit (domain.js:483:12)
at TCP.<anonymous> (net.js:675:12)
This comes after some insertion like 10k or 20k and sometimes some 100k just connection 812 that no. is different and rest error is same so any idea why this is happening and how to solve the issue.
Your insertion function is too big to follow. But from the error, it is clear that your insertion function is creating a new mongo connection from the pool.
Generally, when a single connection is used to work for a blocking operation other available connections from pool are used to handle the incoming requests that need to use the db. As you have defined 1000 as pool size that's why you are seeing 812 connection closed.
It is not a wise idea to insert 60 Million data at once instead divide that in smaller part organize your DB architecture and follow some recommended way to save them(like collection max size, read/write ops, indexing etc). When you need to save multiple documents, you should use the below mongo function:
db.collection.insertMany(
[ <document 1> , <document 2>, ... ],
{
writeConcern: <document>,
ordered: <boolean>
}
)
For more details check this.
I have a scenario that on a given topic I need to consume each message one by one, do some async task and then consume the next one. I am using rabbitmq and amqp.node.
I was able to achieve this with a prefetch of 1. Which of course is not an actual solution since this would lock the whole channel and the channel have multiple topics.
So far this is my producer:
const getChannel = require("./getChannel");
async function run() {
const exchangeName = "taskPOC";
const url = "amqp://queue";
const channel = await getChannel({ url, exchangeName });
const topic = "task.init";
let { queue } = await channel.assertQueue(topic, {
durable: true
});
const max = 10;
let current = 0;
const intervalId = setInterval(() => {
current++;
if (current === max) {
clearInterval(intervalId);
return;
}
const payload = JSON.stringify({
foo: "bar",
current
});
channel.sendToQueue(queue, Buffer.from(payload), { persistent: true });
}, 3000);
}
run()
.then(() => {
console.log("Running");
})
.catch(err => {
console.log("error ", err);
});
And this is my consumer
const getChannel = require("./getChannel");
async function run() {
const exchangeName = "taskPOC";
const url = "amqp://queue";
const channel = await getChannel({ url, exchangeName });
channel.prefetch(1);
const topic = "task.init";
const { queue } = await channel.assertQueue(topic, {
durable: true
});
channel.bindQueue(queue, exchangeName, topic);
let last = new Date().getTime();
channel.consume(
queue,
msg => {
const now = new Date().getTime();
console.log(
" [x] %s %s:'%s' ",
msg.fields.routingKey,
Math.floor((now - last) / 1000),
msg.content.toString()
);
last = now;
setTimeout(function() {
channel.ack(msg);
}, 10000);
},
{ exclusive: true, noAck: false }
);
}
run()
.then(() => {
console.log("Running");
})
.catch(err => {
console.log("error ", err);
});
Is there any way on RabbitMQ to do that or I would need to handle this on my app?
Thanks.
You can use the consumer prefetch setting (see https://www.rabbitmq.com/consumer-prefetch.html). In the case of amqp node, you set this option using the prefetch function:
channel.prefetch(1, false); // global=false
In this case, each consumer on the channel will have a prefetch of 1. If you want to have different configurations for each consumer, you should create more channels.
Hope this helps.
In some demo ,them close the connection in timer function, like follow:
var amqp = require('amqplib/callback_api');
amqp.connect('amqp://localhost', function(err, conn) {
conn.createChannel(function(err, ch) {
var q = 'hello';
var msg = 'Hello World!';
ch.assertQueue(q, {durable: false});
ch.sendToQueue(q, new Buffer(msg));
console.log(" [x] Sent %s", msg);
});
setTimeout(function() { conn.close(); process.exit(0) }, 500);
});
Is there some method to close the conn when send the message successfully?
I coded this something like below
var q = 'TEST';
var open = require('amqplib').connect('amqp://user:password#localhost:5672');
var sleep = require('sleep');
//Publisher
function publish() {
let ch;
var connection;
let publisher = open.then(function(conn) {
connection=conn;
return conn.createChannel();
}).then(function(chann) {
ch = chann;
ch.purgeQueue(q);
return ch.assertQueue(q,{noCreate: true});
}).then(function(ok) {
return ch.sendToQueue(q, new Buffer('HELLO WORLD'),{noAck:true});
}).then(function(ok) {
console.log('MESSAGE_SENT', ok);
return ok; //resolve('MESSAGE_SENT')
}).then((ok)=>{
console.log('Assert queue response:', ok);
setTimeout(function() { connection.close();}, 500);
})
.catch(function(err) {
console.log(err);
console.error('Unable to connect PUBLISHER');
process.exit(1);
//reject('MESSAGE_SENT')
});
return publisher;
}
publish();
module.exports = {
publish
};