Node Process hangs when saving many pubnub instances in memory - node.js

When load testing the program i was using pubnub for creating some integration i sent around 2000 request and on each request pubnub instance was created with different pub,sub keys and subscription to channel and listeners added but after some time when there is network issue pubnub throws socket hang up error and the memory starts spiking and eventually process is killed though i am destroying pubnub object when there is failure in subscription.
class pubnub{
private config;
private pubnub;
constructor(options){
this.config = options
}
register(callback) {
let timetoken = null;
this.pubnub = new Pubnub({
publish_key: options.publish_key,
subscribe_key: options.subscribe_key,
ssl: true,
keepAlive: true
});
this.pubnub.addListener({
message: function (m) {
// console.log('----------------- ', m);
if (timetoken !== m.timetoken) {
timetoken = m.timetoken;
}
},
status: function (m) {
console.log(m);
if (m && m.error === true) {
this.pubnub.destroy(true);
return callback(m.errorData);
}
callback(null, true);
}
}
});
this.pubnub.subscribe({
channels: option.channels
});
}
}

Related

Unable to use ActiveMQ priority messages using STOMP protocol in nodejs

I have an application which sends messages to a queue, and another application which subscribes to the queue and process it. I want OTP messages to be given higher priority than other messages, hence I am trying to use ActiveMQ message priority to achieve this.
This is the code for ActiveMQ connection using STOMP protocol in nodejs using stompit library:
const serverPrimary = {
host: keys.activeMQ.host,
port: keys.activeMQ.port,
ssl: ssl,
connectHeaders: {
host: '/',
login: keys.activeMQ.username,
passcode: keys.activeMQ.password,
'heart-beat': '5000,5000',
},
}
connManager = new stompit.ConnectFailover(
[serverPrimary, serverFailover],
reconnectOptions,
)
connManager.on('error', function (e) {
const connectArgs = e.connectArgs
const address = connectArgs.host + ':' + connectArgs.port
logger.error({ error: e, customMessage: address })
})
channelPool = new stompit.ChannelPool(connManager)
Code for sending message
const pushMessageToAMQ = (queue, message) => {
const queues = Object.values(activeMQ.queues)
if (!queues.includes(queue)) {
_mqLog(mqLogMessages.unknownQueue + queue)
return
}
//Priority header is set
const header = {
destination: queue,
priority: 7
}
//If message is not a string
if (typeof message !== 'string') message = JSON.stringify(message)
//Logging message before sending
_mqLog(
mqLogMessages.sending,
{ service: services.amq },
{ header: header, message: message },
)
//Sending message to amq
_sendMessageToAMQ(header, message, error => {
if (error) {
_mqError(error, mqLogMessages.sendingError, { service: services.amq })
}
})
}
const _sendMessageToAMQ = (headers, body, callback) => {
channelPool.channel((error, channel) => {
if (error) {
callback(error)
return
}
channel.send(headers, body, callback)
})
}
Here's the code for subscribing to queue in the second application:
const amqSubscribe = (queue, callback, ack = 'client-individual') => {
log({ customMessage: 'Subscribing to ' + queue })
const queues = Object.values(activeMQ.queues)
if (!queues.includes(queue)) {
return
}
channelPool.channel((error, channel) => {
let header = {
destination: queue,
ack: ack,
'activemq.prefetchSize': 1,
}
//Check for error
if (error) {
_mqError(error, mqLogMessages.baseError, header)
} else {
channel.subscribe(
header,
_synchronisedHandler((error, message, next) => {
//Check for error
if (error) {
_mqError(error, mqLogMessages.subscriptionError, header)
next()
} else {
//Read message
message.readString('utf-8', function (error, body) {
if (error) {
_mqError(error, mqLogMessages.readError, header)
next()
} else {
//Message read successfully call callback
callback(body, () => {
//Acknowledgment callback
channel.ack(message)
next()
})
}
})
}
}),
)
}
})
}
Activemq.xml
<policyEntries>
<policyEntry queue=">" prioritizedMessages="true" useCache="false" expireMessagesPeriod="0" queuePrefetch="1" />
.......
I tried pushing different messages with different priority and turned on the second application (i.e. the one which subscribes to the messages) after all the messages were pushed to queue. However, the execution order of the messages was the same as the one which was sent. The priority didn't change anything. Is there something that I am missing?
Do I have to add something in consumer end for it to work?
Support for priority is disabled by default in ActiveMQ "Classic" (used by Amazon MQ). As the documentation states:
...support [for message priority] is disabled by default so it needs to be be enabled using per destination policies through xml configuration...
You need to set prioritizedMessages="true" in the policyEntry for your queue, e.g.:
<destinationPolicy>
<policyMap>
<policyEntries>
<policyEntry queue=">" prioritizedMessages="true"/>
...
To be clear, this is configured on the broker (i.e. not the client) in activemq.xml, and it applies to every kind of client.

Node.js +Vue App +Websockets, updating list of connected elements

I'm trying to make this app based on node+socket.io+Vue.js.
Referring to the back end I set the server connected to socket io and all referring to the different emitters :
Class Server
const express = require("express");
const cors = require("cors");
const corsObject = {
origin: ["http://localhost:8100"],
methods: ["GET", "POST", "DELETE", "PUT", "PATCH"],
allowedHeaders: ["Content-Type", "Authorization","token-response"],
credentials: true,
};
const { conectToMongo } = require("../database/database");
const {
socketController,
} = require("../sockets-controllers/socket-controller");
class Server {
constructor() {
this.allowedOrigins = "http://localhost:8100";
this.app = express();
this.port = process.env.PORT; arranque
this.connectingdatabase();
this.server = require("http").createServer(this.app);
this.io = require("socket.io")(this.server, {
cors: corsObject,
});
this.connectionSocketClient();
this.middlewares();
this.routes();
}
middlewares() {
this.app.use(express.json()); //lectura y parsoin del body a fomato json
this.app.use(express.static("public")); //directorio fornt
this.app.use(cors({ credentials: true, origin: "http://localhost:8100" })); //cors
this.app.get("/", (request, response, next) => {
response.status(200).json({
ok: true,
message: "request correct",
});
});
}
routes() {
this.app.use("/user", require("../routes/user-routes"));
this.app.use("/auth", require("../routes/user-login"));
// this.app.use(this.usuariosPath,require('../routes/user-routes'))
}
async connectingdatabase() {
await conectToMongo();
}
connectionSocketClient() {
this.io.on("connection",(socket)=> socketController(socket,this.io));
}
portListener() {
this.server.listen(this.port, () => {
console.log(`server running on port : ${this.port}`);
});
}
}
module.exports = Server; //Exportando la clase interface creada para su uso en los demas modulos
In order to initialize the controllers i first set a class which might be used on controllers in a easier way importing its instance
class Message {
constructor(userId, userMessage, message) {
this.userId = userId;
this.userMessage = userMessage;
this.message = message;
}
} //Class and its constructor Message
class ChatMessage {
constructor() {
this.messages = [];
this.usersOnConnection = {};
}
//constructor of this class with two components , first a array of message
//and second a object that would be stacking the connected users
get usersConnected() {
return Object.values(this.usersOnConnection);
}
//This getter would retrieve the object of users connected converting it to
//an array
addUserToChat(user) {
this.usersOnConnection[user.id] = user
}
//this method would add a user to a chat i this case adding the new consumer
//to the object of users connected usersOnConnection(HERE problem too)
}
//Class and its constructor Message
module.exports = { Message, ChatMessage };
Then the socket controller on charge of set the logic would be like this
const { Socket } = require("socket.io");
const { ChatMessage, Message } = require("../models/chat-model");//Interfaces imported
const socketController = async (socket = new Socket(), io) => {
const user = await jwtValidatorRenew(
socket.handshake.headers["token-response"]
);
if (!user || user == undefined) {
console.log("user disconnected");
return socket.disconnect();
}
//if for some reason the user requested thorugh a token i retrieve is null
//or some like , the controller ends here with a disconnection to the socket
//else
const chatMessage =await new ChatMessage(); //creating new isntance of class ChatMessage
//first i do create a new instance of the class Chatmessage in order to access from here
chatMessage.addUserToChat(user); //adding user to the chat (HERE )
//then any time the browser is recharged or user inits session , this user
//would be added, using the method addUserToChat from the class instance
io.emit("active-users", chatMessage.usersConnected); //sending users connected
//once the user is added i do proceed to emit through io , the state of all connected
//consumers aiming to the flag "active-users", accesing the getter usersConnected
//from the instance
socket.on("disconnect", () => {
console.log("Client DisconectedConected", socket.id); // desconexion
chatMessage.disconnectUserFromChat(user.id);
//diconnecting user
io.emit("active-users", chatMessage.usersConnected);
});
//On disconnection the instance of chatmessage class is called in order to disconnect
//user according to its id. Also accession io from socket I do emit for all consumers
//a new state under the flag "active-users", passing ass method the getter usersConnected
//from the instance chatMessage(but only receive one user)
};
module.exports = { socketController };
Then on my front after setting socket io client i set this on my Vue state manangement(vuex ) for the socket connection, being this action dispatched any time i need.
ACTIONS VUEX
conectingSocket() {
const socket= io("localhost:3006", {
extraHeaders: {
"token-response": localStorage.getItem("token"),
},
});
socket.on("connect",()=>{
console.log("socket online");
})
//flag on connection
socket.on("disconnect",()=>{
console.log("socket offline");
})
//flag on discconnection
socket.on("active-users",(usersPayload)=>{
console.log(usersPayload);
})
//Here i log the users connected updated once the back emit on this flag a action . But always
//brings me the user of the browser that I recharge not updating the other ones
},
Then any time i initialize my app this method on front is triggered in order to retrieve the users
connected, thus for that simply call the method and set it also in my mounted Vue life cycle:
export default {
name: "AllUsersComponent",
components: { IonCard, IonContent, IonItem, IonInput, IonButton, IonLabel },
// components: { IonLabel, IonInput, IonItem },
data() {
return {
allUsersFinal: [],
message: "",
state: false,
socket: io(process.env.VUE_APP_BACK_URL),
//client socket and its connection
};
},
methods: {
...mapActions(["getAllUsers", "validateToken", "conectingSocket"]),
socketConection() {
this.$store.dispatch("conectingSocket");
},
...some methods
},
},
computed: {
...mapGetters(["getterGetAllUsers"]),
...some computed methods
},
mounted() {
this.socketConection()
},
created() {
...some methods
},
};
</script>
But keeps showing me only the user of the browser I do recharge. For this purpose i initialize on the app two users from different browsers(Firefox and Chrome)
Any help on this would be amazing!!!!!

NodeJS streams not awaiting async

I have run into an issue when testing NodeJS streams. I can't seem to get my project to wait for the output from the Duplex and Transform streams after running a stream.pipeline, even though it is returning a promise. Perhaps I'm missing something, but I believe that the script should wait for the function to return before continuing. The most important part of the project I'm trying to get working is:
// Message system is a duplex (read/write) stream
export class MessageSystem extends Duplex {
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _read(size: number): void {
var chunk = this.read();
console.log(`Recieved ${chunk}`);
this.push(chunk);
}
public _write(chunk: Message, encoding: string,
callback: (error?: Error | null | undefined, chunk?: Message) => any): void {
if (chunk.data === null) {
callback(new Error("Message.Data is null"));
} else {
callback();
}
}
}
export class SystemStream extends Transform {
public type: MessageType = MessageType.Global;
public data: Array<Message> = new Array<Message>();
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _transform(chunk: Message, encoding: string,
callback: TransformCallback): void {
if (chunk.single && (chunk.type === this.type || chunk.type === MessageType.Global)) {
console.log(`Adding ${chunk}`);
this.data.push(chunk);
chunk = new Message(chunk.data, MessageType.Removed, true);
callback(undefined, chunk); // TODO: Is this correct?
} else if (chunk.type === this.type || chunk.type === MessageType.Global) { // Ours and global
this.data.push(chunk);
callback(undefined, chunk);
} else { // Not ours
callback(undefined, chunk);
}
}
}
export class EngineStream extends SystemStream {
public type: MessageType = MessageType.Engine;
}
export class IOStream extends SystemStream {
public type: MessageType = MessageType.IO;
}
let ms = new MessageSystem();
let es = new EngineStream();
let io = new IOStream();
let pipeline = promisify(Stream.pipeline);
async function start() {
console.log("Running Message System");
console.log("Writing new messages");
ms.write(new Message("Hello"));
ms.write(new Message("world!"));
ms.write(new Message("Engine data", MessageType.Engine));
ms.write(new Message("IO data", MessageType.IO));
ms.write(new Message("Order matters in the pipe, even if Global", MessageType.Global, true));
ms.end(new Message("Final message in the stream"));
console.log("Piping data");
await pipeline(
ms,
es,
io
);
}
Promise.all([start()]).then(() => {
console.log(`Engine Messages to parse: ${es.data.toString()}`);
console.log(`IO Messages to parse: ${io.data.toString()}`);
});
Output should look something like:
Running message system
Writing new messages
Hello
world!
Engine Data
IO Data
Order Matters in the pipe, even if Global
Engine messages to parse: Engine Data
IO messages to parse: IO Data
Any help would be greatly appreciated. Thanks!
Note: I posted this with my other account, and not this one that is my actual account. Apologies for the duplicate.
Edit: I initially had the repo private, but have made it public to help clarify the answer. More usage can be found on the feature/inital_system branch. It can be run with npm start when checked out.
Edit: I've put my custom streams here for verbosity. I think I'm on a better track than before, but now getting a "null" object recieved down the pipeline.
As the documentation states, stream.pipeline is callback-based doesn't return a promise.
It has custom promisified version that can be accessed with util.promisify:
const pipeline = util.promisify(stream.pipeline);
...
await pipeline(...);
After some work of the past couple of days, I've found my answer. The issue was my implementation of the Duplex stream. I have since changed the MessageSystem to be a Transform stream to be easier to manage and work with.
Here is the product:
export class MessageSystem extends Transform {
constructor() {
super({highWaterMark: 100, readableObjectMode: true, writableObjectMode: true});
}
public _transform(chunk: Message, encoding: string,
callback: TransformCallback): void {
try {
let output: string = chunk.toString();
callback(undefined, output);
} catch (err) {
callback(err);
}
}
}
Thank you to #estus for the quick reply and check. Again, I find my answer in the API all along!
An archived repository of my findings can be found in this repository.

NodeJS VM2 proper way to access console when set to 'redirect'

I'm using the VM2 package to run user code. I'm trying to intercept console output and have set the NodeVM object's console property to 'redirect':
// Create a new sandbox VM for this request
const vm = new NodeVM( {
console: 'redirect',
timeout: 30000,
sandbox: { request, state, response },
require: {
external: true
}
});
According to the documentation that redirects console output to 'events'. I'm new to NodeJS, how do I hook into those events to capture the console.log messages executed inside the Sandbox?
After digging through the source code, I found this file where the event emit is occuring:
sandbox.js
if (vm.options.console === 'inherit') {
global.console = Contextify.readonly(host.console);
} else if (vm.options.console === 'redirect') {
global.console = {
log(...args) {
vm.emit('console.log', ...Decontextify.arguments(args));
return null;
},
info(...args) {
vm.emit('console.info', ...Decontextify.arguments(args));
return null;
},
warn(...args) {
vm.emit('console.warn', ...Decontextify.arguments(args));
return null;
},
error(...args) {
vm.emit('console.error', ...Decontextify.arguments(args));
return null;
},
dir(...args) {
vm.emit('console.dir', ...Decontextify.arguments(args));
return null;
},
time: () => {},
timeEnd: () => {},
trace(...args) {
vm.emit('console.trace', ...Decontextify.arguments(args));
return null;
}
};
}
All you need to do to listen to these events is to bind an event listener on the vm you've created:
// Create a new sandbox VM for this request
const vm = new NodeVM( {
console: 'redirect',
require: {
external: ['request']
}
});
vm.on('console.log', (data) => {
console.log(`VM stdout: ${data}`);
});
Likewise, you can bind to console.log, console.info, console.warn, console.error, console.dir, and console.trace. Hopefully this will save someone else some time.

Replicate EasyNetQ Request/Response with amqplib in nodeJS

I'm replicating EasyNetQ functionality in NodeJS (so that a Node app can communicate with over Rabbit with an EasyNetQ enabled .NET app). I've replicated EasyNetQ's Publish/Subscribe and EasyNetQ's Send/Receive, but i'm having some difficulty with EasyNetQ's Request/Response.
Here is my current Node code:
var rqrxID = uuid.v4(); //a GUID
var responseQueue = 'easynetq.response.' + rqrxID;
Q(Play.AMQ.ConfirmChannel.assertQueue(responseQueue, { durable: false, exclusive: true, autoDelete: true }))
.then((okQueueReply) =>
Play.AMQ.ConfirmChannel.consume(responseQueue, (msg) => {
//do something here...
Play.AMQ.ConfirmChannel.ack(msg);
})
)
.then((okSubscribeReply) => {
Q(Play.AMQ.ConfirmChannel.assertExchange('easy_net_q_rpc', 'direct', { durable: true, autoDelete: false }))
.then((okExchangeReply) =>
Play.AMQ.ConfirmChannel.publish(
global.AppConfig.amq.rpc.exchange,
dto.AsyncProcessorCommand.Type,
Play.ToBuffer(command),
{ type: command.GetType() },
(err, ok): void => {
if (err !== null) {
console.warn('Message nacked!');
responseDeferred.reject(err);
}
}
)
)
})
.catch((failReason) => {
console.error(util.format('Error creating response queue: %s', failReason));
return null;
});
Note that the publish works and is received by the .NET code. That code then sends a response and the issue is that the response isn't received. Here's the .NET code:
Bus.Respond<AsyncProcessorCommand, AsyncProcessorCommandResponse>(
request =>
{
Console.WriteLine("Got request: '{0}'", request);
return new AsyncProcessorCommandResponse()
{
ID = Guid.NewGuid(),
ResponseType = "ENQResp"
};
});
I'm sure I'm missing something, but not sure what. Who can help?
UPDATE
I have solved at least part of this. Taking the value of responseQueue and setting that into the options for publish as "replyTo" hooks the response up - nice. Now I just have to figure out how to either not create a new queue each time OR, make the response queue go away...
UPDATE FINAL
So, using the channel setup I had and saving the cinsumerTag (actually, specifying it) allowed me to cancel the consumer and the queue auto-deleted.
Taking my comments from above to answer this.
There are two pieces to this. First, from the code above, create your response queue so that it auto-deletes (when the consumer count drops to 0):
channel.assertQueue(responseQueue, { durable: false, exclusive: true, autoDelete: true }))
Then create/publish to the queue the "server" is listening on - making sure to set "replyTo" for the response queue you just created (the type piece is another bit of ENQ-needed code):
{ type: command.GetType(), replyTo: responseQueue }
So an entire (currently messy as it's "play" code) method for executing this pattern looks like:
private static Request(command: dto.AsyncProcessorCommand): Q.Promise<dto.interfaces.IAsyncProcessorCommandResponse> {
var responseDeferred = Q.defer<dto.interfaces.IAsyncProcessorCommandResponse>();
var consumerTag = uuid.v4();
var rqrxID = uuid.v4();
var responseQueue = 'easynetq.response.' + rqrxID;
var handleResponse = (msg: any): void => {
var respType = null;
switch(command.Action) {
default:
respType = 'testResp';
}
//just sending *something* back, should come from 'msg'
responseDeferred.resolve(new dto.AsyncProcessorCommandResponse(respType, { xxx: 'yyy', abc: '123' }));
}
Q(Play.AMQ.ConfirmChannel.assertQueue(responseQueue, { durable: false, exclusive: true, autoDelete: true }))
.then((okQueueReply) =>
Play.AMQ.ConfirmChannel.consume(responseQueue, (msg) => {
handleResponse(msg);
Play.AMQ.ConfirmChannel.ack(msg);
Play.AMQ.ConfirmChannel.cancel(consumerTag);
},
{ consumerTag: consumerTag })
)
.then((okSubscribeReply) => {
Q(Play.AMQ.ConfirmChannel.assertExchange('easy_net_q_rpc', 'direct', { durable: true, autoDelete: false }))
.then((okExchangeReply) =>
Play.AMQ.ConfirmChannel.publish(
'easy_net_q_rpc',
dto.AsyncProcessorCommand.Type,
Play.ToBuffer(command),
{ type: command.GetType(), replyTo: responseQueue },
(err, ok): void => {
if (err !== null) {
console.warn('Message nacked!');
responseDeferred.reject(err);
}
}
)
)
})
.catch((failReason) => {
console.error(util.format('Error creating response queue: %s', failReason));
return null;
});
return responseDeferred.promise
}

Resources