How to add logs to newrelic in nodejs - node.js

I have a Kafka consumer topic, where I am able to get the data from the topic. But I wanted to add the newrelic for the topic.
How can I write to manually send the events to newrelic.
I have a newrelic.js file and I have configured it, but I have read that for Kafka I need to write manually the events.
Sample Code Snippet
const newrelic = require('newrelic');
const config = require('./config')
const topicConfig = require('./topicConfig')
const scheduler = require("../scheduler");
const Kafka = require("node-rdkafka");
const defaultConfig = {...config}
const topicConf = {...topicConfig}
try {
var topic = new Kafka.KafkaConsumer(
defaultConfig,
topicConf,
{}
);
topic.connect();
logger.info(
"topic object",
topic
);
topic
.on("ready", function () {
topic.subscribe([config.topicConf]);
topic.consume();
})
.on("data", function (data) {
scheduler.handleMessage(data.value);
})
.on("error", function (err) {
logger.error("Error in topic consumer ", err);
});
} catch (error) {
logger.error(
"Exception while connecting to kafka",
error
);
}
How can I add the manual events to newrelic? Which events to use, like these below were mentioned on doc, but confused, How to integrate and where to add inside this try catch block.
Example methods as in the docs:
newrelic.setTransactionName(name)
newrelic.setControllerName(name, [action])
Any help appreciated.

You have to define custom event something like this. Pass the event to addPageAction custom event.
window.addEventListener('DOMContentLoaded', (event) => {
document.getElementById('btnFndCntnt').addEventListener('click',function (e) {
newrelic.addPageAction('Find');
})
});

Related

How to listen to socketIO private message in React client?

I have a SocketIO instance in an Express app, that listens to a React client requests. A user can send private messages to a specific person. The server receives the private message, and should dispatch it back to both sender & recipient thanks to the io.to(socketId).emit(content) method.
How to listen to this event in React and update the message array? In order to ease the process, I have created a connectedUsers object, whose keys are mongoDB's user._id, and whose values are the unique socketID generated by socketIO. This way, I can easily address message to specific persons in the client. Once sent, the messages are stored in a MongoDB database.
Here is the back-end. The point of interest is io.on("privateMessage")
const connectedUsers = {};
const socketManager = (io) => {
io.on("identifyUser", (user) => {
if (!([user.id] in connectedUsers)) {
connectedUsers[user.id] = io.id;
}
});
io.on("privateMessage", (data) => {
io.to(connectedUsers[data.recipientId]).emit(data.message);
io.to(connectedUsers[data.senderId]).emit(data.message);
});
io.on("disconnect", () => console.log("user disconnected!"));
};
Here is the listening function in React. Everything works but the "privateMessage" part.
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connect", () => {
socket.emit("identifyUser", { id: res.data._id });
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
});
} catch (err) {
throw new Error(err);
}
}
Thanks for your help!
I think you need to put the socket.on("privateMessage") part outside the socket.on("connect") scope.
React must load all events at the beginning.
The backend side must be responsible for the authorization.
For the client there is connection event, not connect.
Subscription to event privateMessage should be outside connection callback.
This code should work. Hope this helps
import io from 'socket.io-client'
async function getUser(socketId) {
try {
const res = await ax.get(`${serverUrl}/login`);
const socket = io(serverUrl);
socketId.current = socket;
socket.on("connection", () => {
socket.emit("identifyUser", { id: res.data._id });
});
socket.on("privateMessage", (data) =>
console.log("private message received!", data)
);
} catch (err) {
throw new Error(err);
}
}

How to send MQTT "message" event back to REST body?

I'm currently having problems figuring out how to capture my MQTT message event back to my REST API body which is written in NodeJS. My current setup is App -> NodeJS REST API -> MQTT broker inside RPi 3.
This is my MQTTHandler.js class where I have put all my reusable MQTT functions
const mqtt = require('mqtt')
class MQTTHandler {
constructor (host) {
this.client = null
this.host = host
}
connect () {
this.client = mqtt.connect(this.host)
this.client.on('error', function (err) {
console.log(err)
this.client.end()
})
this.client.on('connect', function () {
console.log('MQTT client connected...')
})
// I need this to send message back to app.js
this.client.on('message', function (topic, message) {
if (!message.toString()) message = 'null'
console.log(JSON.parse(message.toString()))
})
this.client.on('close', function () {
console.log('MQTT client disconnected...')
})
}
subscribeTopic (topic) {
this.client.subscribe(topic)
}
unsubscribeTopic (topic) {
this.client.unsubscribe(topic)
}
sendMessage (topic, message) {
this.client.publish(topic, message)
}
}
module.exports = MQTTHandler
And below is a short snippet of my app.js
const MQTTHandler = require('./mqtt.handler')
...
var mqttClient = new MQTTHandler('mqtt://127.0.0.1')
mqttClient.connect()
app.get('/hello', function (req, res) {
mqttClient.subscribeTopic('topic')
mqttClient.sendMessage('topic', 'hello world')
// I need to return the MQTT message event here
// res.json(<mqtt message here>)
res.end()
})
I have already tried using NodeJS' event emitter but it doesn't seem to work. Any help or suggestions would be much appreciated, thank you!
You are trying to mix a synchronous protocol (HTTP) with and asynchronous protocol (MQTT). These 2 paradigm don't easily mix.
When you publish an MQTT message you have no idea how many clients may be subscribed to that topic, it could be zero, it could be many. There is also no guarantee that any of them will send a reply so you need to include a timeout. (You also need to include a request id in the payload so you can coordinate any response with the request as you can't say anything about what order responses may come in.)
Your example code is only using 1 topic, this is very bad as you will end up needing to filter out request messages from response messages. Better to use 2 different topics (MQTT v5 even has a msg header to specify the topic the response should be sent on).
Having said all that it is possible to build something that will work (I will use request and reply topics.
var inflightRequests = {};
// interval to clear out requests waiting for a response
// after 3 seconds
var timer = setInterval(function() {
var now = new Date.now();
var keys = Object.keys(inflightRequests);
for (var key in keys) {
var waiting = inflightRequests[keys[key]];
var diff = now = waiting.timestamp;
// 3 second timeout
if (diff > 3000) {
waiting.resp.status(408).send({});
delete(inflightRequests[keys[key]]);
}
}
},500);
// on message handler to reply to the HTTP request
client.on('message', function(topic, msg){
if (topic.equals('reply')) {
var payload = JSON.parse(msg);
var waiting = inflightRequest[payload.requestId];
if (waiting) {
waiting.res.send(payload.body);
delete(inflightRequests[payload.requestId]);
} else {
// response arrived too late
}
}
});
// HTTP route handler.
app.get('/hello', function(req, res) {
//using timestamp as request Id as don't have anything better in this example.
var reqId = Date.now();
var waiting = {
timeStamp: reqId,
res: res
}
inflightRequests[reqId] = waiting;
var message = {
requestId: reqId,
payload: 'hello world'
}
client.publish('request',JSON.stringify(message));
});

How to manage Postgres connection in concurrent AWS lambda function?

Anybody who has experience building concurrent AWS Lambda Function with Postgres?
I have to build a lambda cron that will ingest thousands of invoices into a Postgres database. I have to call the ingestion lambda function concurrently for each invoices. The problem is, because the it is concurrent, each instance of the ingestion function will create a connection to the database. Which means, if I have a 1000 invoice to ingest, each invoice will invoke a lambda function, that will create 1000 database connection. This will exhaust the max connection that Postgres can handle. Some instance of the lambda function invoked will return an error saying that there are no more connection available.
Any tips you can give how to handle this problem?
Here are some snippets of my code:
ingestInvoiceList.js
var AWS = require('aws-sdk');
var sftp = require('ssh2-sftp-client');
var lambda = AWS.Lambda();
exports.handler = async (evenrt) => {
...
let folder_contents;
try {
// fetch list of Zip format invoices
folder_contents = await sftp.list(client_folder);
} catch (err) {
console.log(`[${client}]: ${err.toString()}`);
throw new Error(`[${client}]: ${err.toString()}`);
}
let invoiceCount = 0;
let funcName = 'ingestInvoice';
for (let item of folder_contents) {
if (item.type === '-') {
let payload = JSON.stringify({
invoice: item.name
});
let params = {
FunctionName: funcName,
Payload: payload,
InvocationType: 'Event'
};
//invo9ke ingest invoice concurrently
let result = await new Promise((resolve) => {
lambda.invoke(params, (err, data) => {
if (err) resolve(err);
else resolve(data);
});
});
console.log('result: ', result);
invoiceCount++;
}
}
...
}
ingestInvoice.js
var AWS = require('aws-sdk');
var sftp = require('ssh2-sftp-client');
var DBClient = require('db.js')l
var lambda = AWS.Lambda();
exports.handler = async (evenrt) => {
...
let invoice = event.invoice;
let client = 'client name';
let db = new DBClient();
try {
console.log(`[${client}]: Extracting documents from ${invoice}`);
try {
// get zip file from sftp server
await sftp.fastGet(invoice, '/tmp/tmp.zip', {});
} catch (err) {
throw err;
}
let zip;
try {
// extract the zip file...
zip = await new Promise((resolve, reject) => {
fs.readFile("/tmp/tmp.zip", async function (err, data) {
if (err) return reject(err);
let unzippedData;
try {
unzippedData = await JSZip.loadAsync(data);
} catch (err) {
return reject(err);
}
return resolve(unzippedData);
});
});
} catch (err) {
throw err;
}
let unibillRegEx = /unibill.+\.txt/g;
let files = [];
zip.forEach(async (path, entry) => {
if (unibillRegEx.exec(entry.name)) {
files['unibillObj'] = entry;
} else {
files['pdfObj'] = entry;
}
});
// await db.getClient().connect();
await db.setSchema(client);
console.log('Schema has been set.');
let unibillStr = await files.unibillObj.async('string');
console.log('ingesting ', files.unibillObj.name);
//Do ingestion queries here...
...
await uploadInvoiceDocsToS3(client, files);
} catch (err) {
console.error(err.stack);
throw err;
} finally {
try {
// console.log('Disconnecting from database...');
// await db.endClient();
console.log('Disconnecting from SFTP...');
await sftp.end();
} catch (err) {
console.log('ERROR: ' + err.toString());
throw err;
}
}
...
}
db.js
var { Pool } = require('pg');
module.exports = class DBClient {
constructor() {
this.pool = new Pool();
}
async setSchema(schema) {
await this.execQuery(`SET search_path TO ${schema}`);
}
async execQuery(sql) {
return await this.pool.query(sql);
}
}
Any answer would be appreciated, thank you!
I see two ways to handle this. Ultimately it depends on how fast you want to process this data.
Change the concurrency setting for you Lambda to a "Reserve Concurrency:
.
This will allow you to limit the number of concurrent Lambda's running (see this link for more details).
Change your code to queue the work to be done in an SQS queue. From there you would have to create another Lambda to be triggered by the queue and process it as needed. This Lambda could decide how much to pull off the queue at a time and it too would likely need to be limited on concurrency. But you could tune it to, for example, run for the maximum 15 minutes which may be enough to empty the queue and would not kill the DB. Or if you had, say, a max concurrency of 100 then you would process quickly without killing the DB.
First, you have to initialize your connection outside the handler, so each time your warm lambda will be executed it won't open a new one:
const db = new DBClient();
exports.handler = async (event) => {
...
await db.query(...)
...
}
If is node-pg there is a package that keep tracks of all the idle connections, kill them if necessary and retry in case of error or sorry, too many clients already:
https://github.com/MatteoGioioso/serverless-pg
Any other custom implemented retry mechanism with backoff will work as well.
There is also a one for MySQL as well: https://github.com/jeremydaly/serverless-mysql
These days a good solution to consider for this problem, on AWS, is RDS Proxy, which acts as a transparent proxy between your lambda(s) and database:
Amazon RDS Proxy allows applications to pool and share connections established with the database, improving database efficiency, application scalability, and security.

Publish event is getting triggered before producer is ready

I have written a node module to connect to Kafka.
kafka-connect.js
var kafka = require('kafka-node');
var Producer = kafka.Producer,
client = new kafka.Client(),
producer = new Producer(client);
module.exports = {
producer
};
KafkaService.js
const {producer} = require('./kafka-connect');
producer.on('error', function (err) {
console.log('Producer is in error state');
console.log(err);
});
producer.on('ready', function () {
console.log('Producer is ready');
});
const KafkaService = {
sendRecord: (kafkaTopic, data, callback = (err, data) => console.log(err)) => {
var sendingData = {};
sendingData.event_data = JSON.stringify(data);
sendingData.event_type = 6;
const record = [
{
topic: kafkaTopic,
messages: sendingData,
partition : 0
}
];
producer.send(record, callback);
}
};
module.exports = {
KafkaService
};
Now I am using these two to publish data to Kafka. Following is the code to do so:
const {KafkaService} = require('../kafka/KafkaService');
const {newOrder} = require('../objs/newOrderEvent');
KafkaService.sendRecord("incentive_order_data", newOrder);
But running this file gives error :
{ BrokerNotAvailableError: Broker not available
at new BrokerNotAvailableError (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/errors/BrokerNotAvailableError.js:11:9)
at Client.loadMetadataForTopics (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:389:15)
at Client.send (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:562:10)
at /Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:241:10
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:473:16
at iteratorCallback (/Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:1064:13)
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:969:16
at buildRequest (/Users/rajat.mishra/self/nodekafka/node_modules/kafka-node/lib/client.js:257:24)
at /Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:3110:16
at eachOfArrayLike (/Users/rajat.mishra/self/nodekafka/node_modules/async/dist/async.js:1069:9) message: 'Broker not available' }
Producer is ready
Apparently, publish method is getting called before the producer is ready. I am not able to come up with a solution to this. One way is to bring Promises in the picture, but that is just my hypothesis, exact method might be different.
You're not waiting for the producer to be ready.
You'll need to do this
producer.on('ready', function () {
console.log('Producer is ready');
// send data here
});

How to 'pipe' oracle-db data from 'on data' event

I've been using node-oracledb for a few months and I've managed to achieve what I have needed to so far.
I'm currently working on a search app that could potentially return about 2m rows of data from a single call. To ensure I don't get a disconnect from the browser and the server, I thought I would try queryStream so that there is a constant flow of data back to the client.
I implemented the queryStream example as-is, and this worked fine for a few hundred thousand rows. However, when the returned rows is greater than one million, Node runs out of memory. By logging and watching both client and server log events, I can see that client is way behind the server in terms of rows sent and received. So, it looks like Node is falling over because it's buffering so much data.
It's worth noting that at this point, my selectstream implementation is within a req/res function called via Express.
To return the data, I do something like....
stream.on('data', function (data) {
rowcount++;
let obj = new myObjectConstructor(data);
res.write(JSON.stringify(obj.getJson());
});
I've been reading about how streams and pipe can help with flow, so what I'd like to be able to do is to be able to pipe the results from the query to a) help with flow and b) to be able to pipe the results to other functions before sending back to the client.
E.g.
function getData(req, res){
var stream = myQueryStream(connection, query);
stream
.pipe(toSomeOtherFunction)
.pipe(yetAnotherFunction)
.pipe(res);
}
I'm spent a few hours trying to find a solution or example that allows me to pipe results, but I'm stuck and need some help.
Apologies if I'm missing something obvious, but I'm still getting to grips with Node and especially streams.
Thanks in advance.
There's a bit of an impedance mismatch here. The queryStream API emits rows of JavaScript objects, but what you want to stream to the client is a JSON array. You basically have to add an open bracket to the beginning, a comma after each row, and a close bracket to the end.
I'll show you how to do this in a controller that uses the driver directly as you have done, instead of using separate database modules as I advocate in this series.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Once you get the concepts, you can simplify things a bit with a reusable Transform class which allows you to use pipe in the controller logic:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Rather than writing your own logic to create a JSON stream, you can use JSONStream to convert an object stream to (stringified) JSON, before piping it to its destination (res, process.stdout etc) This saves the need to muck around with .on('data',...) events.
In the example below, I've used pipeline from node's stream module rather than the .pipe method: the effect is similar (with better error handling I think). To get objects from oracledb.queryStream, you can specify option {outFormat: oracledb.OUT_FORMAT_OBJECT} (docs). Then you can make arbitrary modifications to the stream of objects produced. This can be done using a transform stream, made perhaps using through2-map, or if you need to drop or split rows, through2. Below the stream is sent to process.stdout after being stringified as JSON, but you could equally send to it express's res.
require('dotenv').config() // config from .env file
const JSONStream = require('JSONStream')
const oracledb = require('oracledb')
const { pipeline } = require('stream')
const map = require('through2-map') // see https://www.npmjs.com/package/through2-map
oracledb.getConnection({
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
connectString: process.env.CONNECT_STRING
}).then(connection => {
pipeline(
connection.queryStream(`
select dual.*,'test' as col1 from dual
union select dual.*, :someboundvalue as col1 from dual
`
,{"someboundvalue":"test5"} // binds
,{
prefetchRows: 150, // for tuning
fetchArraySize: 150, // for tuning
outFormat: oracledb.OUT_FORMAT_OBJECT
}
)
,map.obj((row,index) => {
row.arbitraryModification = index
return row
})
,JSONStream.stringify() // false gives ndjson
,process.stdout // or send to express's res
,(err) => { if(err) console.error(err) }
)
})
// [
// {"DUMMY":"X","COL1":"test","arbitraryModification":0}
// ,
// {"DUMMY":"X","COL1":"test5","arbitraryModification":1}
// ]

Resources