How do I get the number of messages currently en-queued?
My code is basically the following:
function readQueue() {
var open = require('amqplib').connect(config.rabbitServer);
open.then(function (conn) {
var ok = conn.createChannel();
ok = ok.then(function (ch) {
ch.prefetch(config.bulkSize);
setInterval(function () {
handleMessages();
}, config.bulkInterval);
ch.assertQueue(config.inputQueue);
ch.consume(config.inputQueue, function (msg) {
if (msg !== null) {
pendingMessages.push(msg);
}
});
});
return ok;
}).then(null, console.warn);
}
I found nothing in the documentation or while debugging, and I did see a different library that allows this, so wondering if amqplib supports this as well.
You can get the queue-length with amqplib.
In my case the queue has the feature 'durable:true'. You have to pass it as an option.
var amqp = require('amqplib/callback_api');
amqp.connect(amqp_url, function(err, conn) {
conn.createChannel(function(err, ch) {
var q = 'task2_queue';
ch.assertQueue(q, {durable: true}, function(err, ok) {
console.log(ok);
});
});
});
It will return an object like this:
{ queue: 'task2_queue', messageCount: 34, consumerCount: 2 }
For more information: https://www.squaremobius.net/amqp.node/channel_api.html#channel_assertQueue
I think the assertQueue method call will return an object that contains the current message count. I don't remember the exact property name off-hand, but it should be in there.
The real trick, though, is that this number will never be updated once you call assertQueue. The only way to get an updated message count is to call assertQueue again. This can have some performance implications if you're checking it too frequently.
You should call channel.checkQueue(queueName) and then you will get an object { queue: 'queueName', messageCount: 1, consumerCount: 0 } where the property messageCount which is the exactly current number of messages in the queue
I couldn't find a direct solution using node, but by using api from RabbitMQ I was able to get message count.
After enabling management plugin of RabbitMQ the apis can be accessed using http://127.0.0.1:15672/api/queues/vhost/name and user login as guest with password guest.
var request = require('request');
var count_url = "http://guest:guest#127.0.0.1:15672/api/queues/%2f/" + q;
var mincount = 0;
..........
..........
request({
url : count_url
}, function(error, response, body) {
console.log("Called RabbitMQ API");
if (error) {
console.error("Unable to fetch Queued Msgs Count" + error);
return;
}
else
{
var message = JSON.parse(body);
if (message.hasOwnProperty("messages_ready")) {
// this DOES NOT COUNT UnAck msgs
var msg_ready = JSON.stringify(message.messages_ready);
console.log("message.messages_ready=" + msg_ready);
if (msg_ready == mincount) {
console.log("mincount Reached ..Requesting Producer");
///Code to Produce msgs ..
}
}
if (message.hasOwnProperty("messages")) {
// _messages_ total messages i.e including unAck
var msg = JSON.stringify(message.messages);
console.log("message.messages=" + msg);
}
}
});
Related
I am quite new to Node js and RabbitMq. I wanted to create 100k queues. but I am getting the following error after creating around 6ooo queues.
error: "Cannot read property 'createChannel' of undefined"
I want to call conn.createConnection() method again if I have an error. How to do this?
Here's the code
var amqp = require("amqplib/callback_api");
var time = 0;
var limit = 100000
var timer = setInterval(() => {
time+=1;
if(time>=limit){
clearInterval(timer);
}
var now = Date.now()
//RabbitMQ
amqp.connect("amqp://localhost",function(err,conn){
conn.createChannel(function(err,ch){
if(err){
//try to do conn.createChannel again
}
var q = "queue_name"+time.toString();
// console.log(q);
var msg = "this is the message string!!!";
ch.assertQueue(q,{durable: false});
ch.sendToQueue(q,new Buffer(msg),{persistent: false});
// console.log("time = "+time);
});
});
//RabbitMQ`
},10);
What you are doing is not only creating 100k queues, but also 100k connections and channels. Likely you are running out of file handles and you can adjust the ulimit to allow more. However, you probably don't want to create all those connections and channels.
Try using one connection with one publisher channel with something along these lines:
var amqp = require("amqplib/callback_api");
var time = 0;
var limit = 100000
amqp.connect("amqp://localhost", function(err, conn) {
if (err) {
console.log('Somethings wrong with connection:', err);
return;
}
conn.createChannel(function(err, ch){
if (err) {
console.log('Somethings wrong with channel:', err);
return;
}
var timer = setInterval(() => {
time+=1;
if(time>=limit) {
clearInterval(timer);
}
var now = Date.now()
var q = "queue_name"+time.toString();
var msg = "this is the message string!!!";
ch.assertQueue(q, {durable: false} );
ch.sendToQueue(q, new Buffer(msg),{persistent: false});
console.log('sent', time);
},10);
});
});
I am working on a node app that essentially is a simple AWS SQS poller that should sit and listen to new items in different queues.
Here is my module.export:
module.exports = {
readMessage: function(qParams, qType, tableName) {
logger.debug(qType);
SQS.receiveMessage(qParams, handleSqsResponse);
function handleSqsResponse (err, data) {
if(err) logger.error("handleSqsResponse error:" + err);
if (data && data.Messages) {
data.Messages.forEach(processMessage)
readMessage(); // continue reading until draining the queue (or UPTIME reached)
}
else{
logger.debug("no data in sqs.");
// process.exit();
}
}
// 'processing' is mainly writing to logs using winston. Could add here any transformations and transmission to remote systems
function processMessage(sqsMessage){
// Parse sqs messag
var msgObj = JSON.parse(sqsMessage.Body);
// Process
logger.info(msgObj.Message);
_.extend(qParams, { "ReceiptHandle": sqsMessage.ReceiptHandle });
dbMap[qType](msgObj, qParams, tableName);
}
}
}
The issue I am running into is when I attempt to call readMessage(); again. I get the error of ReferenceError: readMessage is not defined
module.exports is a plain object that is exposed to outer modules that has a method readMessage. readMessage() should be module.exports.readMessage().
Also i would suggest creating a variable and then exporting that:
var obj = {
readMessage: function(qParams, qType, tableName) {
logger.debug(qType);
SQS.receiveMessage(qParams, handleSqsResponse);
function handleSqsResponse (err, data) {
if(err) logger.error("handleSqsResponse error:" + err);
if (data && data.Messages) {
data.Messages.forEach(processMessage)
obj.readMessage(); // continue reading until draining the queue (or UPTIME reached)
}
else{
logger.debug("no data in sqs.");
// process.exit();
}
}
// 'processing' is mainly writing to logs using winston. Could add here any transformations and transmission to remote systems
function processMessage(sqsMessage){
// Parse sqs messag
var msgObj = JSON.parse(sqsMessage.Body);
// Process
logger.info(msgObj.Message);
_.extend(qParams, { "ReceiptHandle": sqsMessage.ReceiptHandle });
dbMap[qType](msgObj, qParams, tableName);
}
}
}
module.exports = obj;
Please note that I only responded to the question you specifically asked. I didn't take into account any architectural issue associate with the code.
function functionName(has = false){
var total = 0;
if(has){
functionName(true)
} else {
// Todo
}
}
module.exports.functionName = functionName;
I am trying to keep a session open with the Bloomberg Public API, relaying calls from my own service's API to it to fetch data. I am running the Node.JS / Express server locally right now. I have an API route that works fine the first time: I send the GET, and quickly get the response back. If I then send another GET to the same route, and I can see the data that the Bloomberg API returns in my server console, but it seems that the server gets stuck at the res.send(...) and I have no Idea why. I've tried numerous things like moving code blocks around and forcefully destroying variables, but to no avail. Do you guys see anything obvious that would/might work?
'use strict';
var _ = require('lodash');
var Blpapi = require('./blpapi.model');
var count = 0;
var blpapi = require('blpapi');
// Add 'authenticationOptions' key to session options if necessary.
var session = new blpapi.Session({ serverHost: '10.8.8.1', serverPort: 8194 });
var service_refdata = 1; // Unique identifier for refdata service
session.start();
session.on('SessionStarted', function(m) {
console.log(m);
session.openService('//blp/refdata', service_refdata);
});
session.on('ServiceOpened', function(m) {
console.log(m);
});
session.on('SessionStartupFailure', function(m) {
console.log('SessionStartupFailure', util.inspect(m));
session.stop();
session.destroy();
});
session.on('SessionTerminated', function(m) {
console.log('Session Terminated');
session.stop();
session.destroy();
});
exports.getStock = function (req, res) {
var stock = req.url.substring(8, req.url.length);
stock = stock.replace(/_/g, ' ');
session.on('HistoricalDataResponse', function(m) {
console.log(m);
if(m.eventType === 'RESPONSE' && m.correlations[0].value === 101) {
console.log('send');
res.send(m.data.securityData);
}
else {
res.send(500);
}
});
newRequest(stock);
};
function newRequest(sec) {
if(typeof sec !== 'string') return;
session.request('//blp/refdata', 'HistoricalDataRequest',
{ securities: [sec],
fields: ['PX_LAST', 'OPEN'],
startDate: "20140101",
endDate: "20140301",
periodicitySelection: "DAILY" }, 101);
}
function handleError(res, err) {
return res.send(500, err);
}
Edit1: If I change the res.send(m.data.securityData); to res.send(201);, the requests come back fine, so I'm figuring it has to do with that object.
I figured it out. It's because I was declaring the session.on('HistoricalDataResponse', .... statement inside of my route controller. Moving it out and adding a bit of logic around it solved the problem.
I'm trying to convert an existing API to work with RxJS... fairly new to node, and very new to RxJs, so please bear with me.
I have an existing API (getNextMessage), that either blocks (asynchronously), or returns a new item or error via a node-style (err, val) callback, when the something becomes available.
so it looks something like:
getNextMessage(nodeStyleCompletionCallback);
You could think of getNextMessage like an http request, that completes in the future, when the server responds, but you do need to call getNextMessage again, once a message is received, to keep getting new items from the server.
So, in order to make it into an observable collection, I have to get RxJs to keep calling my getNextMessage function until the subscriber is disposed();
Basically, I'm trying to create my own RxJs observable collection.
The problems are:
I don't know how to make subscriber.dispose() kill the async.forever
I probably shouldn't be using async.forever in the first place
I'm not sure I should be even getting 'completed' for each message - shouldn't that be at the end of a sequence
I'd like to eventually remove the need for using fromNodeCallback, to have a first class RxJS observable
Clearly I'm a little confused.
Would love a bit of help, thanks!
Here is my existing code:
var Rx = require('rx');
var port = require('../lib/port');
var async = require('async');
function observableReceive(portName)
{
var observerCallback;
var listenPort = new port(portName);
var disposed = false;
var asyncReceive = function(asyncCallback)
{
listenPort.getNextMessage(
function(error, json)
{
observerCallback(error, json);
if (!disposed)
setImmediate(asyncCallback);
}
);
}
return function(outerCallback)
{
observerCallback = outerCallback;
async.forever(asyncReceive);
}
}
var receive = Rx.Observable.fromNodeCallback(observableReceive('rxtest'));
var source = receive();
var subscription = source.forEach(
function (json)
{
console.log('receive completed: ' + JSON.stringify(json));
},
function (error) {
console.log("receive failed: " + error.toString());
},
function () {
console.log('Completed');
subscription.dispose();
}
);
So here's probably what I would do.
var Rx = require('Rx');
// This is just for kicks. You have your own getNextMessage to use. ;)
var getNextMessage = (function(){
var i = 1;
return function (callback) {
setTimeout(function () {
if (i > 10) {
callback("lawdy lawd it's ova' ten, ya'll.");
} else {
callback(undefined, i++);
}
}, 5);
};
}());
// This just makes an observable version of getNextMessage.
var nextMessageAsObservable = Rx.Observable.create(function (o) {
getNextMessage(function (err, val) {
if (err) {
o.onError(err);
} else {
o.onNext(val);
o.onCompleted();
}
});
});
// This repeats the call to getNextMessage as many times (11) as you want.
// "take" will cancel the subscription after receiving 11 items.
nextMessageAsObservable
.repeat()
.take(11)
.subscribe(
function (x) { console.log('next', x); },
function (err) { console.log('error', err); },
function () { console.log('done'); }
);
I realize this is over a year old, but I think a better solution for this would be to make use of recursive scheduling instead:
Rx.Observable.forever = function(next, scheduler) {
scheduler = scheduler || Rx.Scheduler.default,
//Internally wrap the the callback into an observable
next = Rx.Observable.fromNodeCallback(next);
return Rx.Observable.create(function(observer) {
var disposable = new Rx.SingleAssignmentDisposable(),
hasState = false;
disposable.setDisposable(scheduler.scheduleRecursiveWithState(null,
function(state, self) {
hasState && observer.onNext(state);
hasState = false;
next().subscribe(function(x){
hasState = true;
self(x);
}, observer.onError.bind(observer));
}));
return disposable;
});
};
The idea here is that you can schedule new items once the previous one has completed. You call next() which invokes the passed in method and when it returns a value, you schedule the next item for invocation.
You can then use it like so:
Rx.Observable.forever(getNextMessage)
.take(11)
.subscribe(function(message) {
console.log(message);
});
See a working example here
I made simple notification application that subscribes to redis channel and when it will have new message then this application send it to user.
client.on("auth", function(sessId, userId){
console.log('AUTH: userId ' + userId)
var userIdFromRedis;
redis1.get('PHPREDIS_SESSION:' + sessId, function (err , reply) {
if (reply){
reply = reply.toString();
var result = reply.match(/s:2:\"id\";i:(\d+);/);
if (result) {
userIdFromRedis = result[1]
console.log('AUTH: userIdFromRedis ' + userIdFromRedis)
} else {
result = reply.match(/s:7:\"guestId\";i:(\d+);/);
if (result) {
var guestIdFromRedis = result[1]
console.log('AUTH: guestIdFromRedis ' + guestIdFromRedis)
}
}
if (userIdFromRedis == userId) {
client.userId = userId;
subscribe.subscribe(channelPrefix + userId);
clients[userId] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else if (guestIdFromRedis) {
client.guestId = guestIdFromRedis;
subscribe.subscribe(channelPrefix + guestIdFromRedis);
clients[guestIdFromRedis] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else {
client.disconnect();
console.log('AUTH: result - fail')
}
} else {
client.disconnect();
}
});
})
subscribe.on("message", function(channel, message) {
var userId = Math.round(channel.substr(channelPrefix.length));
if (client.userId == userId || client.guestId == userId) {
console.log('Subscriber: ' + message)
client.send(message);
}
});
client.on("message", function(text){
client.send(text);
})
And in log file sometimes in top hour I can find error message
(node) warning: possible EventEmitter memory leak detected. 11
listeners added. Use emitter.setMaxListeners() to increase limit.
And node.js application's process uses 90% of CPU.
Please advice how can I resolve this problem?
Whenever your client connects to your server, you add another EventEmitter to the stack. Unfortunatly you are not closing them, so you are hitting the listeners limit of 11. You should track down the piece of code that adds the eventEmitters and then make another statement, that if there is already a client connected, there should not be any other emitters generated.