Node Js / Typescript - AMQP Consumer - node.js

I am trying my hand at node.js/typescript for the first time and having a bit of trouble making a consumer for a rabbit queue.
Code:
let amqp = require('amqp');
let connection = amqp.createConnection({url: "amqp://" + RABBITMQ_USER + ":" + RABBITMQ_PASSWORD + "#" + RABBITMQ_HOST + ":" + RABBITMQ_PORT + RABBITMQ_VHOST});
connection.on('ready', function() {
connection.exchange(RABBITMQ_WORKER_EXCHANGE, function (exchange) {
connection.queue(RABBITMQ_QUEUE, function (queue) {
queue.bind(exchange, function() {
queue.publish(function (message) {
console.log('subscribed to queue');
let encoded_payload = unescape(message.data);
let payload = JSON.parse(encoded_payload);
console.log('Received a message:');
console.log(payload);
})
})
})
})
})
It seems to connect to the amqp server and throws no errors but it just sits there and doesn't consume anything. Is there a step I am missing?
Any help would be greatly appreciated,
Thank you.

Here is my solution that is working based off of amqp's JS tutorial.
https://www.rabbitmq.com/tutorials/tutorial-three-javascript.html
Probably not up to TypeScript standards, feel free to correct me if there's a better way.
#!/usr/bin/env node
require('dotenv').config();
import amqp = require('amqplib/callback_api');
import db = require('./database');
amqp.connect({
protocol: process.env.RABBITMQ_PROTOCOL,
hostname: process.env.RABBITMQ_HOST,
port: process.env.RABBITMQ_PORT,
username: process.env.RABBITMQ_USER,
password: process.env.RABBITMQ_PASSWORD,
vhost: process.env.RABBITMQ_VHOST
}, function(err, conn) {
conn.createChannel(function (err, ch) {
// set exchange that is being used
ch.assertExchange(process.env.RABBITMQ_WORKER_EXCHANGE, 'direct', {durable: true});
// set queue that is being used
ch.assertQueue(process.env.RABBITMQ_QUEUE, {durable: true}, function (err, q) {
console.log(" [*] Waiting for messages in %s. To exit press CTRL+C", q.queue);
// bind the queue to the exchange
ch.bindQueue(q.queue, process.env.RABBITMQ_WORKER_EXCHANGE, '');
// consume from the queue, one message at a time.
ch.consume(q.queue, function (msg) {
console.log("Message received: %s", msg.content.toString());
//save message to db
db.store(msg.content.toString()).then(function() {
//acknowledge receipt of message to amqp
console.log("Acknowledging message");
ch.ack(msg, true);
});
}, {noAck: false});
});
});
});

import * as Amqp from "amqp-ts";
var connection = new Amqp.Connection("amqp://localhost");
var exchange = connection.declareExchange("ExchangeName");
var queue = connection.declareQueue("QueueName");
queue.bind(exchange);
queue.activateConsumer((message) => {
console.log("Message received: " + message.getContent());
});
// it is possible that the following message is not received because
// it can be sent before the queue, binding or consumer exist
var msg = new Amqp.Message("Test");
exchange.send(msg);
connection.completeConfiguration().then(() => {
// the following message will be received because
// everything you defined earlier for this connection now exists
var msg2 = new Amqp.Message("Test2");
exchange.send(msg2);
});

Related

Close consumer connection to AMQP library in Nodejs

I use AMQP for my application. I want to close the connection to the consumer AMQP after all the message queues have been received. However, I don't know how to handle it. I will be very grateful and appreciated if someone help me. Thank you
var amqp = require('amqplib');
amqp.connect('amqp://localhost').then(function(conn) {
process.once('SIGINT', function() { conn.close(); });
return conn.createChannel().then(function(ch) {
var ok = ch.assertQueue('hello', {durable: false});
ok = ok.then(function(_qok) {
return ch.consume('hello', function(msg) {
console.log(" [x] Received '%s'", msg.content.toString());
}, {noAck: false});
});
return ok.then(function(_consumeOk) {
console.log(' [*] Waiting for messages. To exit press CTRL+C');
});
})
}).catch(console.warn);
conn.close() this close function will be close the connection

How to consume the latest message from Kafka-consumer using NodeJs?

I have created a NodeJS application to insert data into a MongoDB collection. This database insertion is done by using a Kafka. Kafka-node is the plugin I have used to call Kafka.
I can create the topic and send a message to the consumer at the producer level. The message and topic are taken from the POST request.
This is how I call the Kafka. Parameters are topic and message.
Every time I call this API, the producer is creating a new message and sent it to the consumer. In each call, all previous messages will be returned to the consumer.
I have used the fromOffset: 'earliest' and fromOffset: 'latest' options to restrict the previous messages, not working.
Can anyone give me a suggestion?
Version of Kafka-node
"kafka-node": "^5.0.0",
Code I have used
var kafka = require('kafka-node');
const {MongoClient} = require('mongodb');
var url = 'mongodb://127.0.0.1:27017/';
const mongoClient = new MongoClient(url);
var Producer = kafka.Producer,
client = new kafka.KafkaClient(),
offset = new kafka.Offset(client),
Consumer = kafka.Consumer,
producer = new Producer(client);
producer.on('ready', function () {
console.log('Producer is ready');
});
producer.on('error', function (err) {
console.log('Producer is in error state');
console.log(err);
})
const createProducer = async(req,res,next) => {
var topic = req.body.topic;
var sentMessage = JSON.stringify(req.body.messages);
producer.send(payloads, async function( err, data) {
})
client = new kafka.KafkaClient(),
consumer = new Consumer(client,
[
{ topic: topic, partition: 0 }
],
{
autoCommit: false,
fromOffset: 'earliest'
}
);
consumer.on('message', async function (message) {
console.log("Message : "+JSON.stringify(message))
try {
var currentdate = new Date();
var datetime = "Last Sync: " + currentdate.getDate() + "/"
+ (currentdate.getMonth()+1) + "/"
+ currentdate.getFullYear() + " # "
+ currentdate.getHours() + ":"
+ currentdate.getMinutes() + ":"
+ currentdate.getSeconds();
var abb = await createListing(mongoClient,
{
topic: topic,
message: sentMessage,
time: datetime
}
);
} catch (e) {
console.error(":"+e);
}
finally {
}
});
await mongoClient.close();
res.send({
message: 'Successfully send data from producer',
payloads: payloads
})
async function createListing(client, newListing){
await mongoClient.connect();
const result = await
client.db("sample_airbnb").collection("listingsAndReviews").insertOne(newListing);
console.log(`New listing created with the following id: ${result.insertedId}`);
return result.insertedId;
}
}
The Messages consumed at the consumer are
Thanks,
You consumer will always consume all offsets that have not been marked consumed by its consumer group before.
This means that after consuming a given message (or a batch of messages), you need to commit the highest consumed offset to your Kafka cluster, to effectively mark those messages as consumed. Only then will your consumer group not re-consume those messages on startup.
To commit your offsets, you can either use kafka.js’s autoCommit feature (which you explicitly disabled in your implementation), or manually commit your offsets using the API provided by kafka.js.
You can find the documentation to both here: https://kafka.js.org/docs/consuming#a-name-auto-commit-a-autocommit
I made some changes in the code, Now I can retrieve the latest message from my topic.
I have created consumer inside the offset.fetchLatestOffsets([topics],cb), and made some changes in the consumer options.
var payloads = [
{ topic: topicName, messages: messageTotopic, partition: 0}
];
producer.send(payloads, async function(err, data) {
});
var client = new kafka.KafkaClient();
offset.fetchLatestOffsets([topic], async function (error, offsets) {
if (error)
console.log(error);
offsetA = JSON.stringify(offsets[topic][0])
console.log('offset Value:: '+offsetA);
var consumer = new Consumer(
client,
[
{
topic: topic,
partition: 0,
offset: offsetA-1, // Offset value starts from 0
}
], {
autoCommit: false,
fromOffset: true,
}
);
consumer.on('message', async function (message) {
console.log("Message from last offset:: " + JSON.stringify(message)); // will return the latest message.
consumer.close();
});
});
Using this way I am able to overcome the memory leakage issue related to the event emitters in the KafkaClient.

var mqtt = require('mqtt'); that shows syntax error

i am following this tutorial regardign saving data to database using mqtt to mysql sensor to mysql via mqtt where i am at the app_mqtt.js part where i try to follow the exact same thing to create the js file but when i try to run it via python, it shows an error of :
File "app_mqtt.js", line 1
var mqtt = require('mqtt');
^
SyntaxError: invalid syntax
I have already the mqtt via npm i mqtt and npm install mqtt --save yet it still have such an error, i really need help in this. Thank you.
var mqtt = require('mqtt');
var Topic = '#'; //subscribe to all topics
var Broker_URL = 'mqtt://192.168.1.123';
var options = {
clientId: 'MyMQTT',
port: 1883,
keepalive : 60
};
var client = mqtt.connect(Broker_URL, options);
client.on('connect', mqtt_connect);
client.on('reconnect', mqtt_reconnect);
client.on('error', mqtt_error);
client.on('message', mqtt_messsageReceived);
client.on('close', mqtt_close);
function mqtt_connect()
{
console.log("Connecting MQTT");
client.subscribe(Topic, mqtt_subscribe);
}
function mqtt_subscribe(err, granted)
{
console.log("Subscribed to " + Topic);
if (err) {console.log(err);}
}
function mqtt_reconnect(err)
{
console.log("Reconnect MQTT");
if (err) {console.log(err);}
client = mqtt.connect(Broker_URL, options);
}
function mqtt_error(err)
{
console.log("Error!");
if (err) {console.log(err);}
}
function after_publish()
{
//do nothing
}
function mqtt_messsageReceived(topic, message, packet)
{
console.log('Topic=' + topic + ' Message=' + message);
}
function mqtt_close()
{
console.log("Close MQTT");
}
As per your code , it shows fine.
1) Try to restart the server.
2) check your code to this code
npm i mqtt --save
var mqtt = require('mqtt')
var client = mqtt.connect('mqtt://{Your connection string}')
client.on('connect', function () {
client.subscribe('presence', function (err) {
if (!err) {
client.publish('presence', 'Hello mqtt')
}
})
})
client.on('message', function (topic, message) {
// message is Buffer
console.log(message.toString())
client.end()
})
3) At last , re-install the package.
Note: If still issue , please share your code. So I can debug it.

Redis subscribe timeout

How can i set timer function for Redis sub, so when client connect and message is not received from pubblisher in 200ms socket emit an event?
This is what i have now:
io.on('connection', function(socket){
const sub = redis.createClient();
sub.subscribe("data");
sub.on("message", function(channel, message) {
io.emit("data", JSON.parse(message));
});
sub.on("error", function(err){
console.log("Error" + err);
});
socket.on("disconnect", function(){
sub.removeListerner('message', function(){
io.emit("disconnected");
})
});
});
I know its a late question but hopefully this answer can help someone else or if you still havent found one. You can use a timeout function that calls a handler function just in case the message isnt received (im guessing that's what your asking)
I'm using redis as well and I do a call to other databases to check if they have the user with the requested id. To make sure the user doesn't stay and wait I added a timeout function which I set to 500ms. so its a race condition and whichever comes first I unsubscribe the redis client to make sure its a one off as well.
return User.mongoose.findAsync({ _id: params.id })
.then ( results => {
if (sails.IS.empty(results)) {
const request_id = Date.now() + '#' + sails.HOST_NAME + '/user/' + params.id
sails.REDIS_SUB.subscribe(request_id)
sails.REDIS_SUB.on('message', (channel, message) => {
if (channel == request_id) {
const data = JSON.parse(message)
sails.dlogwarn(params.id + '\t<=\t(' + data.responder + ')')
sails.REDIS_SUB.unsubscribe(request_id)
return data.params.results
}
})
sails.REDIS_PUB.publish('/user/find', JSON.stringify({
request_id: request_id,
params: {
_id: params.id
}
}))
setTimeout( () => {
sails.REDIS_SUB.unsubscribe(request_id)
return results
}, Number(process.env.REDIS_MAX_QUERY_TIMEOUT))
} else return results
})

Simple publishing of many messages to Rabbitmq

I want to send the same messages many times in a row, but i need to use a loop. When I use a loop though, no messages are sent. I am using amqp in Nodejs.
Here is the working code for sending a single messages. What should I do to send many. I have already tried just wrapping a while loop around the connection.publish part and nothing was sent.
var amqp = require('amqp');
var connection = amqp.createConnection({url: "amqp://tester:tstpsswrd#10.4.52.115:5672"});
connection.on('ready', function () {
connection.queue('my-queue', function (q) {
connection.publish('my-queue', 'hi');
});
});
I'm positive that I am doing something stupid wrong here, or maybe missing something. First time with rabbitmq.
Update, Loop example
var amqp = require('amqp');
var connection = amqp.createConnection({url: "amqp://tester:tstpsswrd#10.4.52.115:5672"});
connection.on('ready', function () {
connection.queue('my-queue', function (q) {
while(true){
connection.publish('my-queue', 'hi');
}
});
});
In practical scenario you can not and should not be having a infinite loop as such for writing to a message broker. There have to be some event based thing or a proper defined number.
Try this code you can use the for loop according to your requirement:
var amqp = require('amqp');
var connection = amqp.createConnection({ host: 'localhost', port: 5672});
connection.on('ready', function () {
for(var i=0; i<1000; i++){
var status = writeOnQueue("testing the queue"+i);
}
});
function writeOnQueue(xml){
var msg = xml;
console.log(msg);
try{
connection.exchange('test-exchange', {confirm: true},function(exchange) {
publish = exchange.publish('my-queue',msg, { mandatory: false });
console.log('sent the message success test-exchange');
return true;
});
}
catch(e){
console.log('Some error occured.'+ e);
}
}

Resources