I will explain what I want to achieve and then what I have done (without results)
I have two node services connected between them with RabbitMQ ussing exchanges (topic):
What I want is shutdown C1 while still sending messages to something.orange.something. Then I want to restart my C1 again and receive all the messages that I have lost.
What happens to me now is that each time I restart my consumer creates a new queue and creates a new binding in my exchange with the same routing key. So I have now two queues receiving the same information.
If I configure my queue with the param {exclusive: true}, I solve part of the problem, I no longer have queues without receivers, but still having the same problem... all messages sent without an active receiver are lost.
it's possible?
Here my code:
sender:
'use strict';
const amqp = require('amqplib/callback_api');
const logatim = require('logatim');
logatim.setLevel('info')
amqp.connect('amqp://localhost', (err, conn) => {
conn.createChannel((err, ch) => {
let ex = 'direct_colors';
let args = process.argv.slice(2);
let colors = ['colors.en.green', 'colors.en.yellow', 'colors.es.red']
ch.assertExchange(ex, 'topic', {durable: true});
setInterval(() => {
let color = colors[Math.floor(Math.random() * 3)];
let msg = `This is a ${color} message`;
ch.publish(ex, color, new Buffer(msg));
logatim[color.split('.').pop()].info(msg);
}, 1000);
});
});
reveiver:
'use strict';
const amqp = require('amqplib/callback_api');
const logatim = require('logatim');
logatim.setLevel('info');
const args = process.argv.slice(2);
amqp.connect('amqp://localhost', (err, conn) => {
conn.createChannel((err, ch) => {
var ex = 'direct_colors';
ch.assertExchange(ex, 'topic', {durable: true});
ch.assertQueue('', {exclusive: true, durable: true}, (err, q) => {
logatim.green.info(' [*] Waiting for logs. To exit press CTRL+C');
args.forEach((arg) => {
ch.bindQueue(q.queue, ex, arg);
});
ch.consume(q.queue, (msg) => {
logatim[msg.fields.routingKey.split('.').pop()].info(` [x] ${msg.content.toString()}`);
});
});
});
});
You need named queues. When you declare the queue in your receiver class, give it a well-known name (constant), something like
ch.assertQueue('my_service_1_queue', {durable: true}, ...
the basic examples of them are in RabbitMQ Tutorial
When you consumer will go down and re-start, it will be consuming from the same named queue.
NOTE: you don't need the exclusive queue there, as it will be deleted when you consumer goes down.
Related
I have created a NodeJS application to insert data into a MongoDB collection. This database insertion is done by using a Kafka. Kafka-node is the plugin I have used to call Kafka.
I can create the topic and send a message to the consumer at the producer level. The message and topic are taken from the POST request.
This is how I call the Kafka. Parameters are topic and message.
Every time I call this API, the producer is creating a new message and sent it to the consumer. In each call, all previous messages will be returned to the consumer.
I have used the fromOffset: 'earliest' and fromOffset: 'latest' options to restrict the previous messages, not working.
Can anyone give me a suggestion?
Version of Kafka-node
"kafka-node": "^5.0.0",
Code I have used
var kafka = require('kafka-node');
const {MongoClient} = require('mongodb');
var url = 'mongodb://127.0.0.1:27017/';
const mongoClient = new MongoClient(url);
var Producer = kafka.Producer,
client = new kafka.KafkaClient(),
offset = new kafka.Offset(client),
Consumer = kafka.Consumer,
producer = new Producer(client);
producer.on('ready', function () {
console.log('Producer is ready');
});
producer.on('error', function (err) {
console.log('Producer is in error state');
console.log(err);
})
const createProducer = async(req,res,next) => {
var topic = req.body.topic;
var sentMessage = JSON.stringify(req.body.messages);
producer.send(payloads, async function( err, data) {
})
client = new kafka.KafkaClient(),
consumer = new Consumer(client,
[
{ topic: topic, partition: 0 }
],
{
autoCommit: false,
fromOffset: 'earliest'
}
);
consumer.on('message', async function (message) {
console.log("Message : "+JSON.stringify(message))
try {
var currentdate = new Date();
var datetime = "Last Sync: " + currentdate.getDate() + "/"
+ (currentdate.getMonth()+1) + "/"
+ currentdate.getFullYear() + " # "
+ currentdate.getHours() + ":"
+ currentdate.getMinutes() + ":"
+ currentdate.getSeconds();
var abb = await createListing(mongoClient,
{
topic: topic,
message: sentMessage,
time: datetime
}
);
} catch (e) {
console.error(":"+e);
}
finally {
}
});
await mongoClient.close();
res.send({
message: 'Successfully send data from producer',
payloads: payloads
})
async function createListing(client, newListing){
await mongoClient.connect();
const result = await
client.db("sample_airbnb").collection("listingsAndReviews").insertOne(newListing);
console.log(`New listing created with the following id: ${result.insertedId}`);
return result.insertedId;
}
}
The Messages consumed at the consumer are
Thanks,
You consumer will always consume all offsets that have not been marked consumed by its consumer group before.
This means that after consuming a given message (or a batch of messages), you need to commit the highest consumed offset to your Kafka cluster, to effectively mark those messages as consumed. Only then will your consumer group not re-consume those messages on startup.
To commit your offsets, you can either use kafka.js’s autoCommit feature (which you explicitly disabled in your implementation), or manually commit your offsets using the API provided by kafka.js.
You can find the documentation to both here: https://kafka.js.org/docs/consuming#a-name-auto-commit-a-autocommit
I made some changes in the code, Now I can retrieve the latest message from my topic.
I have created consumer inside the offset.fetchLatestOffsets([topics],cb), and made some changes in the consumer options.
var payloads = [
{ topic: topicName, messages: messageTotopic, partition: 0}
];
producer.send(payloads, async function(err, data) {
});
var client = new kafka.KafkaClient();
offset.fetchLatestOffsets([topic], async function (error, offsets) {
if (error)
console.log(error);
offsetA = JSON.stringify(offsets[topic][0])
console.log('offset Value:: '+offsetA);
var consumer = new Consumer(
client,
[
{
topic: topic,
partition: 0,
offset: offsetA-1, // Offset value starts from 0
}
], {
autoCommit: false,
fromOffset: true,
}
);
consumer.on('message', async function (message) {
console.log("Message from last offset:: " + JSON.stringify(message)); // will return the latest message.
consumer.close();
});
});
Using this way I am able to overcome the memory leakage issue related to the event emitters in the KafkaClient.
I'm currently having problems figuring out how to capture my MQTT message event back to my REST API body which is written in NodeJS. My current setup is App -> NodeJS REST API -> MQTT broker inside RPi 3.
This is my MQTTHandler.js class where I have put all my reusable MQTT functions
const mqtt = require('mqtt')
class MQTTHandler {
constructor (host) {
this.client = null
this.host = host
}
connect () {
this.client = mqtt.connect(this.host)
this.client.on('error', function (err) {
console.log(err)
this.client.end()
})
this.client.on('connect', function () {
console.log('MQTT client connected...')
})
// I need this to send message back to app.js
this.client.on('message', function (topic, message) {
if (!message.toString()) message = 'null'
console.log(JSON.parse(message.toString()))
})
this.client.on('close', function () {
console.log('MQTT client disconnected...')
})
}
subscribeTopic (topic) {
this.client.subscribe(topic)
}
unsubscribeTopic (topic) {
this.client.unsubscribe(topic)
}
sendMessage (topic, message) {
this.client.publish(topic, message)
}
}
module.exports = MQTTHandler
And below is a short snippet of my app.js
const MQTTHandler = require('./mqtt.handler')
...
var mqttClient = new MQTTHandler('mqtt://127.0.0.1')
mqttClient.connect()
app.get('/hello', function (req, res) {
mqttClient.subscribeTopic('topic')
mqttClient.sendMessage('topic', 'hello world')
// I need to return the MQTT message event here
// res.json(<mqtt message here>)
res.end()
})
I have already tried using NodeJS' event emitter but it doesn't seem to work. Any help or suggestions would be much appreciated, thank you!
You are trying to mix a synchronous protocol (HTTP) with and asynchronous protocol (MQTT). These 2 paradigm don't easily mix.
When you publish an MQTT message you have no idea how many clients may be subscribed to that topic, it could be zero, it could be many. There is also no guarantee that any of them will send a reply so you need to include a timeout. (You also need to include a request id in the payload so you can coordinate any response with the request as you can't say anything about what order responses may come in.)
Your example code is only using 1 topic, this is very bad as you will end up needing to filter out request messages from response messages. Better to use 2 different topics (MQTT v5 even has a msg header to specify the topic the response should be sent on).
Having said all that it is possible to build something that will work (I will use request and reply topics.
var inflightRequests = {};
// interval to clear out requests waiting for a response
// after 3 seconds
var timer = setInterval(function() {
var now = new Date.now();
var keys = Object.keys(inflightRequests);
for (var key in keys) {
var waiting = inflightRequests[keys[key]];
var diff = now = waiting.timestamp;
// 3 second timeout
if (diff > 3000) {
waiting.resp.status(408).send({});
delete(inflightRequests[keys[key]]);
}
}
},500);
// on message handler to reply to the HTTP request
client.on('message', function(topic, msg){
if (topic.equals('reply')) {
var payload = JSON.parse(msg);
var waiting = inflightRequest[payload.requestId];
if (waiting) {
waiting.res.send(payload.body);
delete(inflightRequests[payload.requestId]);
} else {
// response arrived too late
}
}
});
// HTTP route handler.
app.get('/hello', function(req, res) {
//using timestamp as request Id as don't have anything better in this example.
var reqId = Date.now();
var waiting = {
timeStamp: reqId,
res: res
}
inflightRequests[reqId] = waiting;
var message = {
requestId: reqId,
payload: 'hello world'
}
client.publish('request',JSON.stringify(message));
});
I have written a node module to connect to Kafka.
kafka-connect.js
var kafka = require('kafka-node');
var Producer = kafka.Producer,
client = new kafka.Client(),
producer = new Producer(client);
module.exports = {
producer
};
KafkaService.js
const {producer} = require('./kafka-connect');
producer.on('error', function (err) {
console.log('Producer is in error state');
console.log(err);
});
producer.on('ready', function () {
console.log('Producer is ready');
});
const KafkaService = {
sendRecord: (kafkaTopic, data, callback = (err, data) => console.log(err)) => {
var sendingData = {};
sendingData.event_data = JSON.stringify(data);
sendingData.event_type = 6;
const record = [
{
topic: kafkaTopic,
messages: sendingData,
partition : 0
}
];
producer.send(record, callback);
}
};
module.exports = {
KafkaService
};
Now I am using these two to publish data to Kafka. Following is the code to do so:
const {KafkaService} = require('../kafka/KafkaService');
const {newOrder} = require('../objs/newOrderEvent');
KafkaService.sendRecord("incentive_order_data", newOrder);
But running this file gives error :
{ BrokerNotAvailableError: Broker not available
at new BrokerNotAvailableError (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/errors/BrokerNotAvailableError.js:11:9)
at Client.loadMetadataForTopics (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:389:15)
at Client.send (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:562:10)
at /Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:241:10
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:473:16
at iteratorCallback (/Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:1064:13)
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:969:16
at buildRequest (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:257:24)
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:3110:16
at eachOfArrayLike (/Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:1069:9) message: 'Broker not available' }
Producer is ready
Apparently, publish method is getting called before the producer is ready. I am not able to come up with a solution to this. One way is to bring Promises in the picture, but that is just my hypothesis, exact method might be different.
You're not waiting for the producer to be ready.
You'll need to do this
producer.on('ready', function () {
console.log('Producer is ready');
// send data here
});
I am trying my hand at node.js/typescript for the first time and having a bit of trouble making a consumer for a rabbit queue.
Code:
let amqp = require('amqp');
let connection = amqp.createConnection({url: "amqp://" + RABBITMQ_USER + ":" + RABBITMQ_PASSWORD + "#" + RABBITMQ_HOST + ":" + RABBITMQ_PORT + RABBITMQ_VHOST});
connection.on('ready', function() {
connection.exchange(RABBITMQ_WORKER_EXCHANGE, function (exchange) {
connection.queue(RABBITMQ_QUEUE, function (queue) {
queue.bind(exchange, function() {
queue.publish(function (message) {
console.log('subscribed to queue');
let encoded_payload = unescape(message.data);
let payload = JSON.parse(encoded_payload);
console.log('Received a message:');
console.log(payload);
})
})
})
})
})
It seems to connect to the amqp server and throws no errors but it just sits there and doesn't consume anything. Is there a step I am missing?
Any help would be greatly appreciated,
Thank you.
Here is my solution that is working based off of amqp's JS tutorial.
https://www.rabbitmq.com/tutorials/tutorial-three-javascript.html
Probably not up to TypeScript standards, feel free to correct me if there's a better way.
#!/usr/bin/env node
require('dotenv').config();
import amqp = require('amqplib/callback_api');
import db = require('./database');
amqp.connect({
protocol: process.env.RABBITMQ_PROTOCOL,
hostname: process.env.RABBITMQ_HOST,
port: process.env.RABBITMQ_PORT,
username: process.env.RABBITMQ_USER,
password: process.env.RABBITMQ_PASSWORD,
vhost: process.env.RABBITMQ_VHOST
}, function(err, conn) {
conn.createChannel(function (err, ch) {
// set exchange that is being used
ch.assertExchange(process.env.RABBITMQ_WORKER_EXCHANGE, 'direct', {durable: true});
// set queue that is being used
ch.assertQueue(process.env.RABBITMQ_QUEUE, {durable: true}, function (err, q) {
console.log(" [*] Waiting for messages in %s. To exit press CTRL+C", q.queue);
// bind the queue to the exchange
ch.bindQueue(q.queue, process.env.RABBITMQ_WORKER_EXCHANGE, '');
// consume from the queue, one message at a time.
ch.consume(q.queue, function (msg) {
console.log("Message received: %s", msg.content.toString());
//save message to db
db.store(msg.content.toString()).then(function() {
//acknowledge receipt of message to amqp
console.log("Acknowledging message");
ch.ack(msg, true);
});
}, {noAck: false});
});
});
});
import * as Amqp from "amqp-ts";
var connection = new Amqp.Connection("amqp://localhost");
var exchange = connection.declareExchange("ExchangeName");
var queue = connection.declareQueue("QueueName");
queue.bind(exchange);
queue.activateConsumer((message) => {
console.log("Message received: " + message.getContent());
});
// it is possible that the following message is not received because
// it can be sent before the queue, binding or consumer exist
var msg = new Amqp.Message("Test");
exchange.send(msg);
connection.completeConfiguration().then(() => {
// the following message will be received because
// everything you defined earlier for this connection now exists
var msg2 = new Amqp.Message("Test2");
exchange.send(msg2);
});
I am using google pub sub in nodejs.
I have a service which publishes messages, and then a worker which listens to the messages and implements some processes on the messages.
Now I want my service to pull five subscription at a time and pull another one when it finished to process .
How do I implement this?
Here is my start code how do I continue?
var pubsubClient = require('google-cloud').pubsub({
projectId: 'my-project'
});
var topic = pubsubClient.topic('my-first-pub-sub');
var options = {
reuseExisting: true,
ackDeadlineSeconds: 90,
autoAck: true,
interval: 5000
};
topic.subscribe('MySub', options, function(err, subscription, apiResponse) {
// Register listeners to start pulling for messages.
function onError(err) {
console.log("err", err);
}
function onMessage(message) {
console.log("subscription", message);
}
// subscription.on('error', onError);
// // subscription.on('message', onMessage);
//
var opts = {
maxResults: 1
};
subscription.pull(opts, function(err, message) {
console.log("subscription", err);
console.log("subscription", message);
});
});