How to consume just one message from rabbit mq on nodejs - node.js

Im using amqp.node library to integrate rabbitmq into my system.
But in consumer i want to process just one message at the time, then acknowledge the message then consume the next message from the queue.
The current code is:
// Consumer
open.then(function(conn) {
var ok = conn.createChannel();
ok = ok.then(function(ch) {
ch.assertQueue(q);
ch.consume(q, function(msg) {
if (msg !== null) {
othermodule.processMessage(msg, function(error, response){
console.log(msg.content.toString());
ch.ack(msg);
});
}
});
});
return ok;
}).then(null, console.warn);
The ch.consume will process all messages in the channel at one time and the function of the module call it here othermodule will not be executed in the same time line.
I want to wait for the othermodule function to finish before consume the next message in the queue.

At this moment (2018), I think RabbitMQ team has an option to do that:
https://www.rabbitmq.com/tutorials/tutorial-two-javascript.html
ch.prefetch(1);
In order to defeat that we can use the prefetch method with the value
of 1. This tells RabbitMQ not to give more than one message to a
worker at a time. Or, in other words, don't dispatch a new message to
a worker until it has processed and acknowledged the previous one.
Instead, it will dispatch it to the next worker that is not still
busy.

Follow up the example here :
https://www.npmjs.com/package/amqplib
// Consumer
function consumer(conn) {
var ok = conn.createChannel(on_open);
function on_open(err, ch) {
if (err != null) bail(err);
ch.assertQueue(q);
// IMPORTANT
ch.prefetch(1);
ch.consume(q, function(msg) {
if (msg !== null) {
console.log(msg.content.toString());
ch.ack(msg);
}
});
}
}
Refs: http://www.squaremobius.net/amqp.node/channel_api.html#channel_prefetch

You need to set a prefetch value as shown in this example:
https://github.com/squaremo/amqp.node/blob/master/examples/tutorials/rpc_server.js#L22

When you create the model you need to set the QOS on it. Here is how we would do it in C#:
var _model = rabbitConnection.CreateModel();
// Configure the Quality of service for the model. Below is how what each setting means.
// BasicQos(0="Dont send me a new message untill I’ve finshed", _fetchSize = "Send me N messages at a time", false ="Apply to this Model only")
_model.BasicQos(0, _fetchSize, false);
var consumerTag = _model.BasicConsume(rabbitQueue.QueueName, false, _consumerName, queueingConsumer);

You have to set QoS = 1.
ch = ...
ch.qos(1);
ch.consume(q, msg => { ... });
(javascript)

Related

Websocket - Waiting for a http request callback to execute before next pusher event

So I'm working with websockets to process data from website's API. For every new event I also send some http requests back to the website in order to obtain more data. Up untill now everything has worked fine, but now that I started using async requests to speed it up a bit things got a bit different. My code used to process one event and then move on to the next one (these events come in extremely quick - around 10 per second) but now it just seems to ignore the async (non blocking) part and move on to the next event and that way it just skips over half of the code. Note that the code works fine outside the Pusher. I'm using the 'pusher-client' module. My code looks like this:
var Request = require("request");
var requestSync = require('sync-request');
var Pusher = require('pusher-client');
var events_channel = pusher.subscribe('inventory_changes');
events_channel1.bind('listed', function(data)
{
var var2;
//Async request (to speed up the code)
function myFunction(callback){
request("url", function(error, response, body) {
if (!error && response.statusCode == 200)
{
result = JSON.stringify(JSON.parse(body));
return callback(null, result);
}
else
{
return callback(error, null);
}
});
}
myFunction(function(err, data){
if(!err)
{
var2 = data
return(data);
}
else
{
return(err);
}
});
//The part of the code below waits for the callback and the executes some code
var var1 = var2;
check();
function check()
{
if(var2 === var1)
{
setTimeout(check, 10);
return;
}
var1 = var2;
//A CHUNK OF CODE EXECUTES HERE (connected to the data from the callback)
}
});
In conclusion the code works, but not inside the pusher due to the pusher skipping the asynchronous request. How would I make the pusher wait for my async request to finish, before processing the next event (I have no idea)? If you happen to know, please let me know :)
You need to implement a queue to handle events one after another. I'm curious how it worked before, even without Pusher you'd have to implement some queue mechanism for it.
const eventsQueue = []
events_channel1.bind('listed', function(data) {
eventsQueue.push(data)
handleNewEvent()
})
let processingEvent = false
function handleNewEvent() {
if (processingEvent) return // do nothing if already processing an event
processingEvent = true
const eventData = eventsQueue.shift() // pick the first element from array
if (!eventData) return // all events are handled at the moment
... // handle event data here
processingEvent = false
handleNewEvent() // handle next event
}
Also, you should call clearTimeout method to clear your timeout when you don;t need it anymore.
And it's better to use promises or async/await instead of callbacks. Your code will be much easier to read and maintain.

How to run asynchronous tasks synchronous?

I'm developing an app with the following node.js stack: Express/Socket.IO + React. In React I have DataTables, wherein you can search and with every keystroke the data gets dynamically updated! :)
I use Socket.IO for data-fetching, so on every keystroke the client socket emits some parameters and the server calls then the callback to return data. This works like a charm, but it is not garanteed that the returned data comes back in the same order as the client sent it.
To simulate: So when I type in 'a', the server responds with this same 'a' and so for every character.
I found the async module for node.js and tried to use the queue to return tasks in the same order it received it. For simplicity I delayed the second incoming task with setTimeout to simulate a slow performing database-query:
Declaration:
const async = require('async');
var queue = async.queue(function(task, callback) {
if(task.count == 1) {
setTimeout(function() {
callback();
}, 3000);
} else {
callback();
}
}, 10);
Usage:
socket.on('result', function(data, fn) {
var filter = data.filter;
if(filter.length === 1) { // TEST SYNCHRONOUSLY
queue.push({name: filter, count: 1}, function(err) {
fn(filter);
// console.log('finished processing slow');
});
} else {
// add some items to the queue
queue.push({name: filter, count: filter.length}, function(err) {
fn(data.filter);
// console.log('finished processing fast');
});
}
});
But the way I receive it in the client console, when I search for abc is as follows:
ab -> abc -> a(after 3 sec)
I want it to return it like this: a(after 3sec) -> ab -> abc
My thought is that the queue runs the setTimeout and then goes further and eventually the setTimeout gets fired somewhere on the event loop later on. This resulting in returning later search filters earlier then the slow performing one.
How can i solve this problem?
First a few comments, which might help clear up your understanding of async calls:
Using "timeout" to try and align async calls is a bad idea, that is not the idea about async calls. You will never know how long an async call will take, so you can never set the appropriate timeout.
I believe you are misunderstanding the usage of queue from async library you described. The documentation for the queue can be found here.
Copy pasting the documentation in here, in-case things are changed or down:
Creates a queue object with the specified concurrency. Tasks added to the queue are processed in parallel (up to the concurrency limit). If all workers are in progress, the task is queued until one becomes available. Once a worker completes a task, that task's callback is called.
The above means that the queue can simply be used to priorities the async task a given worker can perform. The different async tasks can still be finished at different times.
Potential solutions
There are a few solutions to your problem, depending on your requirements.
You can only send one async call at a time and wait for the first one to finish before sending the next one
You store the results and only display the results to the user when all calls have finished
You disregard all calls except for the latest async call
In your case I would pick solution 3 as your are searching for something. Why would you use care about the results for "a" if they are already searching for "abc" before they get the response for "a"?
This can be done by giving each request a timestamp and then sort based on the timestamp taking the latest.
SOLUTION:
Server:
exports = module.exports = function(io){
io.sockets.on('connection', function (socket) {
socket.on('result', function(data, fn) {
var filter = data.filter;
var counter = data.counter;
if(filter.length === 1 || filter.length === 5) { // TEST SYNCHRONOUSLY
setTimeout(function() {
fn({ filter: filter, counter: counter}); // return to client
}, 3000);
} else {
fn({ filter: filter, counter: counter}); // return to client
}
});
});
}
Client:
export class FilterableDataTable extends Component {
constructor(props) {
super();
this.state = {
endpoint: "http://localhost:3001",
filters: {},
counter: 0
};
this.onLazyLoad = this.onLazyLoad.bind(this);
}
onLazyLoad(event) {
var offset = event.first;
if(offset === null) {
offset = 0;
}
var filter = ''; // filter is the search character
if(event.filters.result2 != undefined) {
filter = event.filters.result2.value;
}
var returnedData = null;
this.state.counter++;
this.socket.emit('result', {
offset: offset,
limit: 20,
filter: filter,
counter: this.state.counter
}, function(data) {
returnedData = data;
console.log(returnedData);
if(returnedData.counter === this.state.counter) {
console.log('DATA: ' + JSON.stringify(returnedData));
}
}
This however does send unneeded data to the client, which in return ignores it. Somebody any idea's for further optimizing this kind of communication? For example a method to keep old data at the server and only send the latest?

Node kafka consumer to process messages in sequence

I have a kafka topic that I want to consume with a node app. The node app must process the messages from the topic in sequence, one by one, not many at the same time.
I tried this kind of code but this is not doing what I want. When there is messages in the topic waiting for processing and this code is started the on 'message' event gets triggered immediately for all the the messages. The first message gets mutex lock first but the rest of the messages are processed in random order.
var mutex = require( 'node-mutex' )();
var crypto = require('crypto');
var mutexToken = crypto.randomBytes(64).toString('hex');
var kafka = require('kafka-node');
var Consumer = kafka.Consumer;
var client = new kafka.Client('localhost:2181');
var consumer = new Consumer(
client,
[
{ topic: 'my_topic' }
]
);
consumer.on('message', function(message) {
console.log("new message")
mutex
.lock( mutexToken )
.then( function( unlock ) {
console.log(message);
unlock();
} );
});
Is it possible to consume the messages one by one, synchronously? Maybe with some other library?
I believe you can control the message offset directly by explicitly disabling the autoCommit feature.
Here is the link to the consumer documentation:
https://www.npmjs.com/package/kafka-node#highlevelconsumer
Here is a link to an example with autoCommit set off:
https://github.com/SOHU-Co/kafka-node/blob/master/example/consumer.js
https://github.com/SOHU-Co/kafka-node/blob/master/example/offset.js
I have never needed to disable the autoCommit feature, so I can't speak to the implementation.
From the test code it looks like it should be something along these lines:
var Offset = kafka.Offset;
var offset = new Offset(client);
var topics = [ { topic: EXISTS_TOPIC_2 } ];
var options = { autoCommit: false, groupId: '_groupId_1_test' };
var consumer = new Consumer(client, topics, options);
var count = 0;
consumer.on('error', noop);
consumer.on('offsetOutOfRange', function (topic) {
offsetOutOfRange(topic, this);
});
consumer.on('message', function (message) {
message.topic.should.equal(EXISTS_TOPIC_2);
message.value.should.equal('hello kafka');
message.partition.should.equal(0);
offset.commit('_groupId_1_test', [message], function (err) {
if (count++ === 0) done(err);
});
});
OK I looked at the API a little more and thought this might eb an angle worth investigating for you:
Consumer.prototype.pauseTopics = function (topics) {
if (!this.pausedPayloads) this.pausedPayloads = [];
pauseOrResume(this.payloads, this.pausedPayloads, topics);
};
Consumer.prototype.resumeTopics = function (topics) {
if (!this.pausedPayloads) this.pausedPayloads = [];
var reFetch = !this.payloads.length;
pauseOrResume(this.pausedPayloads, this.payloads, topics);
reFetch = reFetch && this.payloads.length;
if (reFetch) this.fetch();
};
from the documentation:
pause()
Pause the consumer. Calling pause does not automatically stop messages
from being emitted. This is because pause just stops the kafka
consumer fetch loop. Each iteration of the fetch loop can obtain a
batch of messages (limited by fetchMaxBytes).
So if you only fetch one message (perhaps your bytes are small enough that a max fetch is only 1), then pause will stop the next fetch from happening. But if you fetched multiple messages, pause will not prevent more than one message being emitted.
I think to be 100% certain you would need to write the logic to handle the messages synchronously. Perhaps emit messages into a queue, and process off of the queue?

How to work around amqplib's Channel#consume odd signature?

I am writing a worker that uses amqplib's Channel#consume method. I want this worker to wait for jobs and process them as soon as they appear in the queue.
I wrote my own module to abstract away ampqlib, here are the relevant functions for getting a connection, setting up the queue and consuming a message:
const getConnection = function(host) {
return amqp.connect(host);
};
const createChannel = function(conn) {
connection = conn;
return conn.createConfirmChannel();
};
const assertQueue = function(channel, queue) {
return channel.assertQueue(queue);
};
const consume = Promise.method(function(channel, queue, processor) {
processor = processor || function(msg) { if (msg) Promise.resolve(msg); };
return channel.consume(queue, processor)
});
const setupQueue = Promise.method(function setupQueue(queue) {
const amqp_host = 'amqp://' + ((host || process.env.AMQP_HOST) || 'localhost');
return getConnection(amqp_host)
.then(conn => createChannel(conn)) // -> returns a `Channel` object
.tap(channel => assertQueue(channel, queue));
});
consumeJob: Promise.method(function consumeJob(queue) {
return setupQueue(queue)
.then(channel => consume(channel, queue))
});
My problem is with Channel#consume's odd signature. From http://www.squaremobius.net/amqp.node/channel_api.html#channel_consume:
#consume(queue, function(msg) {...}, [options, [function(err, ok) {...}]])
The callback is not where the magic happens, the message's processing should actually go in the second argument and that breaks the flow of promises.
This is how I planned on using it:
return queueManager.consumeJob(queue)
.then(msg => {
// do some processing
});
But it doesn't work. If there are no messages in the queue, the promise is rejected and then if a message is dropped in the queue nothing happens. If there is a message, only one message is processed and then the worker stalls because it exited the "processor" function from the Channel#consume call.
How should I go about it? I want to keep the queueManager abstraction so my code is easier to reason about but I don't know how to do it... Any pointers?
As #idbehold said, Promises can only be resolved once. If you want to process messages as they come in, there is no other way than to use this function. Channel#get will only check the queue once and then return; it wouldn't work for a scenario where you need a worker.
just as an option. You can present your application as a stream of some messages(or events). There is a library for this http://highlandjs.org/#examples
Your code should look like this(it isn`t a finished sample, but I hope it illustrates the idea):
let messageStream = _((push, next) => {
consume(queue, (msg) => {
push(null, msg)
})
)
// now you can operate with your stream in functional style
message.map((msg) => msg + 'some value').each((msg) => // do something with msg)
This approach provides you a lot of primitives for synchronization and transformation
http://highlandjs.org/#examples

Mqtt - Use QoS 1 when …

I'm following the very good tutorial
www.hivemq.com/mqtt-essentials-part-6-mqtt-quality-of-service-levels/
but I can't figure out to implement
Of course your application must be tolerating duplicates and process
them accordingly.
Can to give me a simple example, please ?
for instance now I've like
(I set up my deploy following https://medium.com/#lelylan/how-to-build-an-high-availability-mqtt-cluster-for-the-internet-of-things-8011a06bd000)
module.exports.authorizePublish = function(client, topic, payload, callback) {
var chunks = topic.split('/');
if(chunks.length === 4) {
Debug('AUTHORIZING SUBSCRIBE', client.device_id == chunks[1]);
Debug('NICKNAME', chunks[1]);
Debug('CHANNEL', chunks[3]);
Debug('TOPIC', chunks[2]);
Debug('PAYLOAD', payload.toString('utf8'));
var data = {
deviceNickname:chunks[1],
channel:chunks[3],
topic:chunks[2],
payload:payload.toString('utf8')
};
Message.insert(data, function (err, message) {
if (err){
Debug(err);
return;
}
Debug(message);
});
}
callback(null, client.device_id === chunks[1]);
}
I'm still rather a learner of MQTT than an expert, but my understanding of handling of duplicate messages with QoS 1 is following:
Suppose you have an application that for whatever reason needs to count messages received from a broker. However, you don't want to take duplicate messages (sent when your client didn't ACK the message in time) to be taken into account.
I use Java Paho client, so the code for it would be:
int counter = 0;
public void messageArrived(String topic, MqttMessage message) throws MqttException {
if (message.isDuplicate() == false) {
counter++;
}
}

Resources