What is reason that Node.Js application uses 90% of CPU? - node.js

I made simple notification application that subscribes to redis channel and when it will have new message then this application send it to user.
client.on("auth", function(sessId, userId){
console.log('AUTH: userId ' + userId)
var userIdFromRedis;
redis1.get('PHPREDIS_SESSION:' + sessId, function (err , reply) {
if (reply){
reply = reply.toString();
var result = reply.match(/s:2:\"id\";i:(\d+);/);
if (result) {
userIdFromRedis = result[1]
console.log('AUTH: userIdFromRedis ' + userIdFromRedis)
} else {
result = reply.match(/s:7:\"guestId\";i:(\d+);/);
if (result) {
var guestIdFromRedis = result[1]
console.log('AUTH: guestIdFromRedis ' + guestIdFromRedis)
}
}
if (userIdFromRedis == userId) {
client.userId = userId;
subscribe.subscribe(channelPrefix + userId);
clients[userId] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else if (guestIdFromRedis) {
client.guestId = guestIdFromRedis;
subscribe.subscribe(channelPrefix + guestIdFromRedis);
clients[guestIdFromRedis] = client;
client.emit("auth", {"success":true});
console.log('AUTH: result - ok')
} else {
client.disconnect();
console.log('AUTH: result - fail')
}
} else {
client.disconnect();
}
});
})
subscribe.on("message", function(channel, message) {
var userId = Math.round(channel.substr(channelPrefix.length));
if (client.userId == userId || client.guestId == userId) {
console.log('Subscriber: ' + message)
client.send(message);
}
});
client.on("message", function(text){
client.send(text);
})
And in log file sometimes in top hour I can find error message
(node) warning: possible EventEmitter memory leak detected. 11
listeners added. Use emitter.setMaxListeners() to increase limit.
And node.js application's process uses 90% of CPU.
Please advice how can I resolve this problem?

Whenever your client connects to your server, you add another EventEmitter to the stack. Unfortunatly you are not closing them, so you are hitting the listeners limit of 11. You should track down the piece of code that adds the eventEmitters and then make another statement, that if there is already a client connected, there should not be any other emitters generated.

Related

Multiple stripe charges being created from a NodeJS function with a loop

I have a NodeJS API running which uses Node Schedule to call a function every month.
This function gets a list of clients from my MYSQL db.
The clients are looped through and then each is charged for SMS's they have sent this month.
My issue is that although it is running, most clients were charged between 2 and 5 times (none only once).
This leads me to think I have issues with my stripe call or perhaps the NodeJS loop... or the Async/Await properties.
App.js:
schedule.scheduleJob("0 0 1 * *",async () => {
console.log("SMS Charging Schedule Running");
await stripePayments.chargeCustomers();
})
stripePayments.js:
module.exports = {
chargeCustomers : async function chargeCustomers() {
{
var stripeCusID;
var success = false;
try{
//I get the list of clients here. I have checked this query and it is 100% correct, with only one record per client.
var sqlString = "CALL chemPayments();";
var countCharged = 0;
authConnection.query(sqlString,async(err,rows,fields)=>{
let chemRows = rows[0];
if(!err){
success = true;
for (let i=0; i < chemRows.length; i++){
stripeCusID = chemRows[i].stripeCusID;
chargeValue = chemRows[i].chargeValue;
chemistName = chemRows[i].chemistName;
chemistID = chemRows[i].chemistID;
countSMS = chemRows[i].countSMS;
//console.log(stripeCusID);
try{
await chargeSMS(stripeCusID,chargeValue,chemistName,chemistID,countSMS);
countCharged++;
}catch (e) {
// this catches any exeption in this scope or await rejection
console.log(e);
}
}
if (countCharged == 0){
console.log("No SMS Charges to perform - " + Date(Date.now()).toString());
}
}
else
{
console.log("Error calling DB");
}
})
} catch (e) {
// this catches any exeption in this scope or await rejection
console.log(e);
//return res.status(500).json({ Result: e });
}
}
}
}
I then charge each client.
async function chargeSMS(stripeCusID,chargeValue,chemistName,chemistID,countSMS){
let customerObject;
let payMethodID;
let secondaryPayMethodID;
let payMethodToUse;
customerObject = await stripe.customers.retrieve(
stripeCusID
);
payMethodID = customerObject.invoice_settings.default_payment_method;
secondaryPayMethodID = customerObject.default_source;
if(payMethodID != null){
payMethodToUse = payMethodID;
}else{
payMethodToUse = secondaryPayMethodID;
}
await stripe.paymentIntents.create({
amount: chargeValue,
currency: 'aud',
customer: stripeCusID,
description: `SMS Charges: ${countSMS} sent - ${chemistName}`,
payment_method: payMethodToUse,
confirm: true
},
function(err, response) {
if (err) {
console.log(chemistName + ": " + err.message + " " + stripeCusID + " " + chargeValue );
return;
}else{
console.log(`Successful SMS Charge: ${response.id} ${chemistName}`);
chemCharged(chemistID);
}
})
}
This final step then updates teh database and tags each sms as "charged" = true, thus they are no longer on the initial select query.
async function chemCharged(chemistID){
try{
var sqlString = "SET #chemistID = ?;CALL chemCharged(#chemistID);";
authConnection.query(sqlString,chemistID,async(err,rows,fields)=>{
//console.log(rows);
if(err){
console.error("Error marking chemist as charged");
}else{
console.log(chemistID + "updated on SMS DB");
}
})
} catch (e) {
// this catches any exeption in this scope or await rejection
console.log(e);
//return res.status(500).json({ Result: e });
}
}
My largest issue is that when I copy this code and run it with the stripe TEST key... I can't replicate the problem!! The code runs fine and each client is only charged once, but when I leave my code for the cron to run at the start of each month.. I get heaps of charges per client. Sometimes 1 or 2, but up to 5 of the same charge goes through!

Keeping track of SocketID in NodeJs and Express

After playing around a bit i found out the hard way that the socket.io will change the socketId on a socket reconnection. That said i am looking into a way for my NodeJs to keep track of changes to the socketID. Socket has a build in store which keeps track of SocketId's , Remote addresses etc. How reliable is that store and can i query a socketID quickly by a UserID which i stored in that db ?
I also found some docs on that one can set its own SocketID which has to be unique but does this allow me to avoid to issue a new SocketID on refresh or is that still not possible.
Instead of SocketID, is there a way to send a message to a client based on a USERID instead ?
If neither of the above work, i guess i will be back to drawing board and store session info in a DB table and update the data.
Many questions but hopefully someone can share some inside with me
Here is my SocketIO helper class i use
const io = require('socket.io');
class ioWrapper {
constructor(app, { middlewares, updateSession, clearSession } = {}) {
this.io = io(app);
this.connected = {};
if (middlewares && middlewares.length) {
for (const middleware of middlewares) {
this.io.use(middleware);
}
}
this.io.on('connect', client => {
//put callback to constructor to update your session
if (updateSession) {
updateSession(client.id);
console.log('Updated: ' + client.id)
}
client.jwt = client.id
this.connected[client.id] = client;
console.log('Connected: ' + client.id + ' ' )// + this.io.socket.request.connection.remoteAddress)
client.on('disconnect', () => {
if (clearSession) {
clearSession(client.id);
}
if (this.connected[client.id]) {
delete this.connected[client.id];
}
console.log('Disconnect: ' + client.id )
});
client.on('error', e => console.log(e));
client.on('connect_error', e => console.log('connect_error', e));
});
this.ioEmit = this.ioEmit.bind(this);
this.ioInject = this.ioInject.bind(this);
}
ioEmit(socketId, event) {
try {
if (this.connected[socketId]) {
this.connected[socketId].emit(...event);
}
}
catch (e) {
console.log(e);
}
}
ioInject(socketId, handler) {
try {
if (this.connected[socketId]) {
this.connected[socketId].on(...handler);
}
}
catch (e) {
console.log(e);
}
}
}
module.exports = (app, middleware, updateSession) => new ioWrapper(app, middleware, updateSession);

socket hangup / ETIMEDOUT exceptions with nodeJS

I realize a great deal has been said on this topic, but I believe I am closing my connections, and would appreciate a new set of eyes to tell me if I am missing something.
an object containing the request information is created and passed to throttle
throttle puts it on a stack and starts an interval timer which calls process_queue every 1200ms; get_info is passed as the request callback.
At some point (varies) during execution, socket hang up and connect ETIMEDOUT exceptions are thrown, at which point I re-queue pending requests, wait a short interval, and start the process_queue again.
This works fine early on, but the occurance of exceptions seems to accelerate over time, and really, they shouldn't be happening anyhow if I am closing them correctly.
Any feedback is greatly appreciated.
var timer = null;
var takeabreather = null;
var stack = [];
var pendingqueue = [];
function throttle(item) {
stack.push(item);
pendingqueue.push(item);
if (timer === null) {
timer = setInterval(process_queue, 1200);
}
}
function process_queue() {
var item = stack.shift();
var req = http.request(item.opts, get_info);
req.on('error', function(e) {
logger.error('--- PROCESS_QUEUE: ERROR: ' + e);
req.end();
});
req.end(function(){
logger.debug('PROCESS_QUEUE: ENDING...');
})
// clear timer is there is no work left to do...
if (stack.length === 0) {
clearInterval(timer);
timer = null;
logger.info('PROCESS_QUEUE: queue is empty');
}
}
function get_info(response) {
var body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
var parsed = JSON.parse(body);
var doc = {};
parsed.forEach(function (item) {
try {
doc.name = item.name;
}
catch (err) {
logger.error('--- GET_INFO ERROR: ', response.req.path, err);
}
});
// code to remove item from pending queue redacted //
logger.debug('--- GET_INFO END: ', response.req.path);
});
}
process.on('uncaughtException', function (e) {
logger.error('--- UNCAUGHT_EXCEPTION: ' + e);
clearInterval(timer);
timer = null;
if (takeabreather === null ) {
logger.warn('--- REQUEUING...');
stack = pendingqueue;
logger.warn('--- TAKING A BREATHER...' );
takeabreather = setTimeout(process_queue, 10000);
}
});
As it turns out, I had a nested http.request in get_info that was not being closed via .end()

RabbitMQ with NodeJS - using amqplib to get message count

How do I get the number of messages currently en-queued?
My code is basically the following:
function readQueue() {
var open = require('amqplib').connect(config.rabbitServer);
open.then(function (conn) {
var ok = conn.createChannel();
ok = ok.then(function (ch) {
ch.prefetch(config.bulkSize);
setInterval(function () {
handleMessages();
}, config.bulkInterval);
ch.assertQueue(config.inputQueue);
ch.consume(config.inputQueue, function (msg) {
if (msg !== null) {
pendingMessages.push(msg);
}
});
});
return ok;
}).then(null, console.warn);
}
I found nothing in the documentation or while debugging, and I did see a different library that allows this, so wondering if amqplib supports this as well.
You can get the queue-length with amqplib.
In my case the queue has the feature 'durable:true'. You have to pass it as an option.
var amqp = require('amqplib/callback_api');
amqp.connect(amqp_url, function(err, conn) {
conn.createChannel(function(err, ch) {
var q = 'task2_queue';
ch.assertQueue(q, {durable: true}, function(err, ok) {
console.log(ok);
});
});
});
It will return an object like this:
{ queue: 'task2_queue', messageCount: 34, consumerCount: 2 }
For more information: https://www.squaremobius.net/amqp.node/channel_api.html#channel_assertQueue
I think the assertQueue method call will return an object that contains the current message count. I don't remember the exact property name off-hand, but it should be in there.
The real trick, though, is that this number will never be updated once you call assertQueue. The only way to get an updated message count is to call assertQueue again. This can have some performance implications if you're checking it too frequently.
You should call channel.checkQueue(queueName) and then you will get an object { queue: 'queueName', messageCount: 1, consumerCount: 0 } where the property messageCount which is the exactly current number of messages in the queue
I couldn't find a direct solution using node, but by using api from RabbitMQ I was able to get message count.
After enabling management plugin of RabbitMQ the apis can be accessed using http://127.0.0.1:15672/api/queues/vhost/name and user login as guest with password guest.
var request = require('request');
var count_url = "http://guest:guest#127.0.0.1:15672/api/queues/%2f/" + q;
var mincount = 0;
..........
..........
request({
url : count_url
}, function(error, response, body) {
console.log("Called RabbitMQ API");
if (error) {
console.error("Unable to fetch Queued Msgs Count" + error);
return;
}
else
{
var message = JSON.parse(body);
if (message.hasOwnProperty("messages_ready")) {
// this DOES NOT COUNT UnAck msgs
var msg_ready = JSON.stringify(message.messages_ready);
console.log("message.messages_ready=" + msg_ready);
if (msg_ready == mincount) {
console.log("mincount Reached ..Requesting Producer");
///Code to Produce msgs ..
}
}
if (message.hasOwnProperty("messages")) {
// _messages_ total messages i.e including unAck
var msg = JSON.stringify(message.messages);
console.log("message.messages=" + msg);
}
}
});

Redis subscribe timeout

How can i set timer function for Redis sub, so when client connect and message is not received from pubblisher in 200ms socket emit an event?
This is what i have now:
io.on('connection', function(socket){
const sub = redis.createClient();
sub.subscribe("data");
sub.on("message", function(channel, message) {
io.emit("data", JSON.parse(message));
});
sub.on("error", function(err){
console.log("Error" + err);
});
socket.on("disconnect", function(){
sub.removeListerner('message', function(){
io.emit("disconnected");
})
});
});
I know its a late question but hopefully this answer can help someone else or if you still havent found one. You can use a timeout function that calls a handler function just in case the message isnt received (im guessing that's what your asking)
I'm using redis as well and I do a call to other databases to check if they have the user with the requested id. To make sure the user doesn't stay and wait I added a timeout function which I set to 500ms. so its a race condition and whichever comes first I unsubscribe the redis client to make sure its a one off as well.
return User.mongoose.findAsync({ _id: params.id })
.then ( results => {
if (sails.IS.empty(results)) {
const request_id = Date.now() + '#' + sails.HOST_NAME + '/user/' + params.id
sails.REDIS_SUB.subscribe(request_id)
sails.REDIS_SUB.on('message', (channel, message) => {
if (channel == request_id) {
const data = JSON.parse(message)
sails.dlogwarn(params.id + '\t<=\t(' + data.responder + ')')
sails.REDIS_SUB.unsubscribe(request_id)
return data.params.results
}
})
sails.REDIS_PUB.publish('/user/find', JSON.stringify({
request_id: request_id,
params: {
_id: params.id
}
}))
setTimeout( () => {
sails.REDIS_SUB.unsubscribe(request_id)
return results
}, Number(process.env.REDIS_MAX_QUERY_TIMEOUT))
} else return results
})

Resources