RabbitMQ HeartBeat Timeout issue - node.js

I'm currently using RabbitMQ as a message broker. Recently, I see many error HeartBeat Timeout in my error log.
Also in RabbitMQ log, I see this log:
I don't know why there is too many connection from vary ranges of port. I use default setup without any further configuration.
Here is my code used to publish and consume:
import { connect } from 'amqplib/callback_api';
import hanlder from '../calculator/middleware';
import { logger } from '../config/logger';
async function consumeRabbitMQServer(serverURL, exchange, queue) {
connect('amqp://localhost', async (error0, connection) => {
if (error0) throw error0;
const channel = connection.createChannel((error1) => {
if (error1) throw error1;
});
channel.assertExchange(exchange, 'direct', {
durable: true
});
channel.assertQueue(
queue,
{
durable: true
},
(error2) => {
if (error2) throw error2;
logger.info(`Connect to ${serverURL} using queue ${queue}`);
}
);
channel.prefetch(1);
channel.bindQueue(queue, exchange, 'info');
channel.noAck = true;
channel.consume(queue, (msg) => {
hanlder(JSON.parse(msg.content.toString()))
.then(() => {
channel.ack(msg);
})
.catch((err) => {
channel.reject(msg);
});
});
});
}
export default consumeRabbitMQServer;
Code used to publish message:
import createConnection from './connection';
import { logger } from '../config/logger';
async function publishToRabbitMQServer(serverURL, exchange, queue) {
const connection = createConnection(serverURL);
const c = await connection.then(async (conn) => {
const channel = await conn.createChannel((error1) => {
if (error1) throw error1;
});
channel.assertExchange(exchange, 'direct', {
durable: true
});
channel.assertQueue(
queue,
{
durable: true
},
(error2) => {
if (error2) throw error2;
logger.info(`Publish to ${serverURL} using queue ${queue}`);
}
);
channel.bindQueue(queue, exchange, 'info');
return channel;
});
return c;
}
export default publishToRabbitMQServer;
Whenever I start my server, I run this piece of code to create a client consume to RabbitMQ:
const { RABBITMQ_SERVER } = process.env;
consumeRabbitMQServer(RABBITMQ_SERVER, 'abc', 'abc');
And this piece of code is used when ever a message in need published to RabbitMQ
const payloads = call.request.payloads;
const { RABBITMQ_SERVER } = process.env;
const channel = await publishToRabbitMQServer(RABBITMQ_SERVER, 'abc', 'abc');
for (let i = 0; i < payloads.length; i++) {
channel.publish('abc', 'info', Buffer.from(JSON.stringify(payloads[i])));
}
I'm reusing code from RabbitMQ document, and it seem that this problem happen whenever there are too many user publish message. Thanks for helping.
Update: I think the root cause is when I need to publish a message, I create a new connection. I'm working to improve it, any help is appreciate. Many thanks.

Related

What could be the reasons for not receiving messages in RabbitMQ?

This is my first time testing RabbitMQ with node.js, and I utilized amqplib.
First, I run the
node ./messages/consumer.js
and out put as follow -:
Connected to RabbitMQ
Channel created
Waiting for messages...
Second, I run the
node ./messages/producer.js
and out put as follow -:
Connected to RabbitMQ
Channel created
Message sent: Hello, world!
Connection to RabbitMQ closed
From the RabbitMQ management console, I observed the presence of test_exchange, test_queue, and test_key, but there was no information regarding any messages. And the consumer terminal did not log any indication of receiving a message. It still displays the message "Waiting for message.". Could you kindly inform me of where I may have overlooked this information?
//config.js
module.exports = {
rabbitmq: {
host: 'localhost',
port: 5672,
username: 'guest',
password: 'guest',
vhost: '/',
exchange: 'test_exchange',
queue: 'test_queue',
routingKey: 'test_key'
}
}
//rabbitmq.js
const amqp = require("amqplib");
const config = require("../config/config");
class RabbitMQ {
constructor() {
this.connection = null;
this.channel = null;
}
async connect() {
try {
const { host, port, username, password, vhost } = config.rabbitmq;
this.connection = await amqp.connect(
`amqp://${username}:${password}#${host}:${port}/${vhost}`
);
console.log("Connected to RabbitMQ");
return this.connection;
} catch (error) {
console.error("Error connecting to RabbitMQ", error);
}
}
async createChannel() {
try {
if (!this.connection) {
await this.connect();
}
this.channel = await this.connection.createChannel();
console.log("Channel created");
return this.channel;
} catch (error) {
console.error("Error creating channel", error);
}
}
async close() {
try {
await this.connection.close();
console.log("Connection to RabbitMQ closed");
} catch (error) {
console.error("Error closing connection to RabbitMQ", error);
}
}
}
module.exports = new RabbitMQ();
//producer.js
const rabbitmq = require('../lib/rabbitmq');
const config = require('../config/config');
async function produceMessage(message) {
try {
const channel = await rabbitmq.createChannel();
const exchange = config.rabbitmq.exchange;
const queue = config.rabbitmq.queue;
const key = config.rabbitmq.routingKey;
await channel.assertExchange(exchange, 'direct', { durable: true });
await channel.assertQueue(queue, { durable: true });
await channel.bindQueue(queue, exchange, key);
const messageBuffer = Buffer.from(message);
await channel.publish(exchange, key, messageBuffer);
console.log(`Message sent: ${message}`);
await rabbitmq.close();
} catch (error) {
console.error('Error producing message', error);
}
}
produceMessage('Hello, world!');
//consumer.js
const rabbitmq = require('../lib/rabbitmq');
const config = require('../config/config');
async function consumeMessage() {
try {
const channel = await rabbitmq.createChannel();
const exchange = config.rabbitmq.exchange;
const queue = config.rabbitmq.queue;
const key = config.rabbitmq.routingKey;
await channel.assertExchange(exchange, 'direct', { durable: true });
await channel.assertQueue(queue, { durable: true });
await channel.bindQueue(queue, exchange, key);
channel.consume(queue, (msg) => {
console.log(`Message received: ${msg.content.toString()}`);
channel.ack(msg);
}, { noAck: false });
console.log('Waiting for messages...');
} catch (error) {
console.error('Error consuming message', error);
}
}
consumeMessage();
Problem:
Your message is failing to send because you are closing the connection immediately after executing the publish command. You can try this by commenting the line await rabbitmq.close(); in producer.js.
Solution:
If you want to close the connection after sending the message. You can create confirm channel instead of a normal channel, which will allow you to receive send acknowledgment.
1. Channel Creation
Change the channel creation line in rabbitmq.js file:
this.channel = await this.connection.createConfirmChannel();
2. Producer:
In the producer.js, call waitForConfirms function before closing the connection:
await channel.waitForConfirms();
await rabbitmq.close();

AMQP + NodeJS wait for channel

I have a service in FeathersJS that initiates a connection to RabbitMQ, the issue is how to await for a channel to be ready before receiving requests:
class Service {
constructor({ amqpConnection, queueName }) {
this.amqpConnection = amqpConnection;
this.queueName = queueName;
this.replyQueueName = queueName + "Reply"
}
async create(data, params) {
new Promise(resolve => {
if (!this.channel) await this.createChannel();
channel.responseEmitter.once(correlationId, resolve);
channel.sendToQueue(this.queueName, Buffer.from(data), {
correlationId: asyncLocalStorage.getStore(),
replyTo: this.replyQueueName,
});
});
}
async createChannel() {
let connection = this.amqpConnection();
let channel = await connection.createChannel();
await channel.assertQueue(this.queueName, {
durable: false,
});
this.channel = channel;
channel.responseEmitter = new EventEmitter();
channel.responseEmitter.setMaxListeners(0);
channel.consume(
this.replyQueueName,
(msg) => {
channel.responseEmitter.emit(
msg.properties.correlationId,
msg.content.toString("utf8")
);
},
{ noAck: true }
);
}
....
}
Waiting for the channel to be created during a request seems like a waste. How should this be done "correctly"?
Feathers services can implement a setup method which will be called when the server is started (or you call app.setup() yourself):
class Service {
async setup () {
await this.createChannel();
}
}

Unable to properly establish a connection between React Native client and Node.js server with redux-saga and socket.io

Quick context: I'm trying to build a react native prototype of a comment page where users can receive live updates (comments, users entering the comment screen, users leaving, etc.). To do this, I am using react-redux, redux-saga, socket.io, and node.js (server). I'm new to redux-saga so I might be missing something obvious here, so hang on, please... The culprit definitely lies in the watchCommentActions function/saga...
The problem: As soon as it is done mounting, the comment screen dispatches the following action { type: comment.room.join, value }, which is then correctly acknowledged by rootSaga, however, when trying to connect to the socket using a promise-resolve structure via const socket = yield call(connect); the promise never resolves, which blocks the generator (it does not proceed to the next yield). What's weird is that on the other side the server does log the connection to the socket, so the connection client --> server appears to be ok. Also, by hot reloading the app I can manage to resolve the promise (it's like the generator needs to run twice to resolve the socket connection), but then the socket.emit("join-room") never reaches the server and the generator gets stuck again.
Similarly, when I try to fire the write generator by posting a comment and thus dispatching {type: comment.post.start, value } the *socket.emit("comment", {text: value.text}) does not reach the server either.
To sum it up briefly nothing's really working and no error is getting thrown... GREAT.
Last words: Before moving my socket logic to saga the socket connection was working seamlessly. I've also tried to reuse the documentation's implementation with channels by using the same connect function instead of createWebSocketConection (https://redux-saga.js.org/docs/advanced/Channels.html) but the promise-resolve-socket situation still occurs. Also, I've noticed similar questions derived from the same git repo I've studied to understand the sagas logic (https://github.com/kuy/redux-saga-chat-example/blob/master/src/client/sagas.js), however, none of them allowed me to understand what's wrong with my implementation. Finally, if there is a better way to implement this logic with redux-saga, I am interested, all I want is a robust, centralized, and reusable implementation.
Sagas/index.js
import { all, takeEvery, takeLatest } from "redux-saga/effects";
import { comment } from "../Reducers/commentCacheReducer";
import { like } from "../Reducers/postsCacheReducer";
import { posts } from "../Reducers/postsReducer";
import flow from "./commentSagas";
import { likePost, unlikePosts } from "./likeSagas";
import { fetchPosts } from "./postsSagas";
function* watchLikeActions() {
yield takeLatest(like.add.start, likePost);
yield takeLatest(like.remove.start, unlikePost);
}
function* watchFetchActions() {
yield takeEvery(posts.fetch.start, fetchPosts);
}
function* watchCommentsActions() {
yield takeEvery(comment.room.join, flow);
}
export default function* rootSaga() {
yield all([watchLikeActions(), watchFetchActions(), watchCommentsActions()]);
}
Sagas/commentSaga.js
import { eventChannel } from "redux-saga";
import { call, cancel, fork, put, take } from "redux-saga/effects";
import io from "socket.io-client";
import { endpoint } from "../../API/ServerAPI";
import { addUser, fetchComment, leaveRoom, removeUser } from "../Actions/commentActions";
import { comment } from "../Reducers/commentCacheReducer";
function connect() {
const socket = io(endpoint);
return new Promise((resolve) => {
socket.on("connection", () => {
resolve(socket);
});
});
}
function subscribe(socket) {
return new eventChannel((emit) => {
socket.on("users.join-room", ({ userId }) => {
emit(addUser({ userId }));
});
socket.on("users.leave-room", ({ userId }) => {
emit(removeUser({ userId }));
});
socket.on("comments.new", ({ comments }) => {
emit(fetchComment({ comments }));
});
socket.on("users.join-room", ({ userId }) => {
emit(addUser({ userId }));
});
return () => {};
});
}
function* read(socket) {
const channel = yield call(subscribe, socket);
while (true) {
let action = yield take(channel);
yield put(action);
}
}
function* write(socket) {
while (true) {
const { value } = yield take(comment.post.start);
socket.emit("comment", { text: value.text });
}
}
function* handleIO(socket) {
yield fork(read, socket);
yield fork(write, socket);
}
export default function* flow() {
const socket = yield call(connect);
socket.emit("join-room", (res) => {
console.log(JSON.stringify(res));
});
const task = yield fork(handleIO, socket);
let action = yield take(leaveRoom);
yield cancel(task);
yield put(action);
socket.emit("leave-room");
}
server.js
const http = require("http");
const app = require("./app");
const socketIo = require("socket.io");
const mongoose = require("mongoose");
const normalizePort = (val) => {
const port = parseInt(val, 10);
if (isNaN(port)) {
return val;
}
if (port >= 0) {
return port;
}
return false;
};
const port = normalizePort(process.env.PORT || "3000");
app.set("port", port);
const errorHandler = (error) => {
if (error.syscall !== "listen") {
throw error;
}
const address = server.address();
const bind = typeof address === "string" ? "pipe " + address : "port: " + port;
switch (error.code) {
case "EACCES":
console.error(bind + " requires elevated privileges.");
process.exit(1);
break;
case "EADDRINUSE":
console.error(bind + " is already in use.");
process.exit(1);
break;
default:
throw error;
}
};
const server = http.createServer(app);
const io = socketIo(server);
server.on("error", errorHandler);
server.on("listening", () => {
const address = server.address();
const bind = typeof address === "string" ? "pipe " + address : "port " + port;
console.log("Listening on " + bind);
});
// comments room
// Storing in variable just for testing purposes, will
// connect to MongoDB once the socket problem gets solved.
let userIds = [];
io.on("connection", (socket) => {
console.log("[server] connect");
});
io.on("join-room", (socket, {userId}) => {
console.log(`[server] join-room: ${userId}`);
userIds.push(userId);
socket.socket.username = userId;
socket.broadcast.emit("users.join-room", { userId });
});
io.on("leave-room", (socket) => {
const { userId } = socket.socket;
if (userId) {
console.log(`[server] leaving-room: ${userId}`);
userIds = userIds.filter((u) => u !== userId);
delete socket.socket["userId"];
socket.broadcast("users.leave-room", { userId });
}
});
// Storing in variable just for testing purposes, will
// connect to MongoDB once the socket problem gets solved.
let messages = [];
io.on("comment", (socket, { text }) => {
console.log(`[server] message: ${text}`);
const message = {
id: messages.length,
text,
userId: socket.socket.userId
};
messages.push(message);
socket.broadcast("comments.new", { message });
});
EDIT 1
After quickly going through socket.io documentation I realised that my server quick implementation was faulty, I simply forgot to register event handlers inside the connecting protocol... However, the generator still requires to be triggered twice for the socket connection to start, allowing the promise to resolve and the user to join the socket room.
io.on("connect", (socket) => {
console.log("[server] connect");
socket.on("join-room", ({ userId }) => {
console.log(`[server] join-room: ${userId}`);
userIds.push(userId);
socket.username = userId;
socket.broadcast.emit("users.join-room", { userId });
});
socket.on("leave-room", ({ userId }) => {
if (userId) {
console.log(`[server] leaving-room: ${userId}`);
userIds = userIds.filter((u) => u !== userId);
delete socket["userId"];
socket.broadcast.emit("users.leave-room", { userId });
}
});
socket.on("comment", ({ text }) => {
console.log(`[server] message: ${text}`);
const message = {
id: messages.length,
text,
userId: socket.userId
};
messages.push(message);
socket.broadcast.emit("comments.new", { message });
});
});
It’s connect, not connection
https://github.com/socketio/socket.io-client
(commentSagas.js > connect())

ReactNative MQTT Listener

I have the following code excerpt:
client.on ('message', function (msg) {
// ....
});
Can someone tell me how I get the value of msg from this function and can access it from outside.
I tried but it doesn't work:
this.setState ({msg: msg})
You can make an MQTT manager and use it.
Example
module.exports = { // cached singleton instance
QOS: 1, // Only 0 and 1 supported by Rabbit
props: null,
create(userID, connectionProps = {}) {
if (userID && connectionProps) {
// http://www.hivemq.com/demos/websocket-client/
this.onConnectionOpened = this.onConnectionOpened.bind(this);
this.onConnectionClosed = this.onConnectionClosed.bind(this);
this.onError = this.onError.bind(this);
this.onMessageArrived = this.onMessageArrived.bind(this);
this.disconnect = this.disconnect.bind(this);
const deviceId = this.randIdCreator()
.replace(/[^a-zA-Z0-9]+/g, '');
this.conProps = _.extend({
clientId: `realtime.${userID}.${deviceId}`,
channelToUse: `mqtt-subscription-realtime.${userID}`,
auth: false,
clean: true, // clean session YES deletes the queue when all clients disconnect
}, connectionProps);
/* create mqtt client */
MQTT.createClient(this.conProps)
.then((client) => {
this.client = client;
client.on('closed', this.onConnectionClosed);
client.on('error', this.onError);
client.on('message', this.onMessageArrived);
client.on('connect', this.onConnectionOpened);
client.connect();
}).catch((err) => {
console.error(`MQTT.createtClient error: ${err}`);
});
}
},
...
onMessageArrived(message) {
if (message) {
console.log(`MQTT New message: ${JSON.stringify(message)}`)
}
}
...
Usage
import MqttNotificationsManager from './realtimeManager';
// init realtime
MqttNotificationsManager.create(
'bob',
{
uri: 'mqtt://test.mosquitto.org:1883',
},
);

RabbitMQ have an exclusive consumer consume message serially

I have a scenario that on a given topic I need to consume each message one by one, do some async task and then consume the next one. I am using rabbitmq and amqp.node.
I was able to achieve this with a prefetch of 1. Which of course is not an actual solution since this would lock the whole channel and the channel have multiple topics.
So far this is my producer:
const getChannel = require("./getChannel");
async function run() {
const exchangeName = "taskPOC";
const url = "amqp://queue";
const channel = await getChannel({ url, exchangeName });
const topic = "task.init";
let { queue } = await channel.assertQueue(topic, {
durable: true
});
const max = 10;
let current = 0;
const intervalId = setInterval(() => {
current++;
if (current === max) {
clearInterval(intervalId);
return;
}
const payload = JSON.stringify({
foo: "bar",
current
});
channel.sendToQueue(queue, Buffer.from(payload), { persistent: true });
}, 3000);
}
run()
.then(() => {
console.log("Running");
})
.catch(err => {
console.log("error ", err);
});
And this is my consumer
const getChannel = require("./getChannel");
async function run() {
const exchangeName = "taskPOC";
const url = "amqp://queue";
const channel = await getChannel({ url, exchangeName });
channel.prefetch(1);
const topic = "task.init";
const { queue } = await channel.assertQueue(topic, {
durable: true
});
channel.bindQueue(queue, exchangeName, topic);
let last = new Date().getTime();
channel.consume(
queue,
msg => {
const now = new Date().getTime();
console.log(
" [x] %s %s:'%s' ",
msg.fields.routingKey,
Math.floor((now - last) / 1000),
msg.content.toString()
);
last = now;
setTimeout(function() {
channel.ack(msg);
}, 10000);
},
{ exclusive: true, noAck: false }
);
}
run()
.then(() => {
console.log("Running");
})
.catch(err => {
console.log("error ", err);
});
Is there any way on RabbitMQ to do that or I would need to handle this on my app?
Thanks.
You can use the consumer prefetch setting (see https://www.rabbitmq.com/consumer-prefetch.html). In the case of amqp node, you set this option using the prefetch function:
channel.prefetch(1, false); // global=false
In this case, each consumer on the channel will have a prefetch of 1. If you want to have different configurations for each consumer, you should create more channels.
Hope this helps.

Resources