Google Pub/Sub pull method restart express server after every 1 min - node.js

I am using pun/sub to pull messages when someone buys a subscription from google play.
const { PubSub } = require('#google-cloud/pubsub');
const grpc = require('grpc');
// Instantiates a client
const pubSubClient = new PubSub({ grpc });
const pubsub = () => {
const projectId = process.env.GOOGLE_PUB_SUB_PROJECT_ID; // Your Google Cloud Platform project ID
const subscriptionName = process.env.GOOGLE_PUB_SUB_SUBSCRIBER_NAME; // Name of our subscription
const timeout = 60;
const maxInProgress = 10;
const subscriberOptions = {
flowControl: {
maxMessages: maxInProgress,
},
};
// Get our created subscription
const subscriptionPub = pubSubClient.subscription(subscriptionName, subscriberOptions);
console.log(`subscription ${subscriptionPub.name} found.`);
// Create an event handler to handle messages
let messageCount = 0;
// Create an event handler to handle messages
const messageHandler = message => {
console.log(`Received message: ${message.id}`);
console.log(`Data: ${message.data}`);
console.log(`Attributes: ${JSON.stringify(message.attributes)}`);
//todo: you can update your backend here using the purchase token
messageCount += 1;
// "Ack" (acknowledge receipt of) the message
message.ack();
};
// Create an event handler to handle errors
const errorHandler = function (error) {
console.log(`GOOGLE PUB SUB ERROR: ${error}`);
throw error;
};
// Listen for new messages/errors until timeout is hit
subscriptionPub.on('message', messageHandler);
subscriptionPub.on('error', errorHandler);
setTimeout(() => {
subscriptionPub.removeListener('message', messageHandler);
subscriptionPub.removeListener('error', errorHandler);
console.log(`${messageCount} message(s) received.`);
}, timeout * 1000);
};
module.exports = pubsub;
And above file is called in the main.js file and every 1 min I am receiving log subscription ${subscriptionPub.name} found.
also, I have commented setTimeout code as of now but I want to understand why removeListener is important to remove the listener every one minute.

Related

How to listen to azure storage queue with Node.js?

Azure storage queue is not listening to the message automatically when we push to the queue, we have to write a custom lister in-order to fetch the message from the queue.
import {
QueueServiceClient,
} from "#azure/storage-queue";
// Create a QueueServiceClient object
const queueServiceClient = QueueServiceClient.fromConnectionString("AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;DefaultEndpointsProtocol=http;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;QueueEndpoint=http://127.0.0.1:10001/devstoreaccount1;TableEndpoint=http://127.0.0.1:10002/devstoreaccount1;");
// // Create a new queue
const queueName = "test-local-queue";
const queueClient = queueServiceClient.getQueueClient(queueName);
queueClient.create().then(() => {
console.log(`Queue "${queueName}" was created successfully`);
});
// Add a message to the queue
const message = "Hello, world!";
queueClient.sendMessage(message).then(() => {
console.log(`Message "${message}" was added to the queue`);
});
// Set the number of messages to retrieve (up to 32)
const maxMessages = 10;
// Set the visibility timeout (in seconds) for the messages
const visibilityTimeout = 60;
// Receive messages from the queue
queueClient
.receiveMessages({ maxMessages, visibilityTimeout })
.then(response => {
const messages = response.receivedMessageItems;
console.log(`Received ${messages.length} messages from the queue`);
// Process the messages
messages.forEach(message => {
console.log(message);
// Do something with the message...
});
})
.catch(error => {
console.error(`Failed to receive messages from the queue: ${error}`);
});
Does Azure service bus solves the above stated problem?
Well instead of recievemessage function try peekmessage .
In this function by default it will peek at a single message but you can configure it the way you want. Here I have configured it to peek 3 messages.
const peekedMessages = await queueClient.peekMessages({ numberOfMessages: 3 });
Now the peekMessages const contains an array called peekedMessageItems which will contain the peeked messages
here I have just iterated in that array and read the messages using messageText property
Complete Code:
const { QueueClient, QueueServiceClient } = require("#azure/storage-queue");
const queueServiceClient = QueueServiceClient.fromConnectionString(conn);
const queueClient = queueServiceClient.getQueueClient(qname);
const peekedMessages = await queueClient.peekMessages({ numberOfMessages: 3 });
for (i = 0; i < peekedMessages.peekedMessageItems.length; i++) {
// Display the peeked message
console.log("Peeked message: ", peekedMessages.peekedMessageItems[i].messageText);
}
Also as the name suggest this will peek message not dequeue them.

GCP provided code snippets to both subscribe and publish mqtt in the same app doesn't work

In my Node.js app, I can successfully publish telemetry/state topics or subscribe to config/command topics, but can't both publish and subscribe.
Both Node.js code snippets that appear below are from
https://cloud.google.com/iot/docs/how-tos/mqtt-bridge
The subscribe code is as follows -
// const deviceId = `myDevice`;
// const registryId = `myRegistry`;
// const region = `us-central1`;
// const algorithm = `RS256`;
// const privateKeyFile = `./rsa_private.pem`;
// const serverCertFile = `./roots.pem`;
// const mqttBridgeHostname = `mqtt.googleapis.com`;
// const mqttBridgePort = 8883;
// const messageType = `events`;
// const numMessages = 5;
// The mqttClientId is a unique string that identifies this device. For Google
// Cloud IoT Core, it must be in the format below.
const mqttClientId = `projects/${projectId}/locations/${region}/registries/${registryId}/devices/${deviceId}`;
// With Google Cloud IoT Core, the username field is ignored, however it must be
// non-empty. The password field is used to transmit a JWT to authorize the
// device. The "mqtts" protocol causes the library to connect using SSL, which
// is required for Cloud IoT Core.
const connectionArgs = {
host: mqttBridgeHostname,
port: mqttBridgePort,
clientId: mqttClientId,
username: 'unused',
password: createJwt(projectId, privateKeyFile, algorithm),
protocol: 'mqtts',
secureProtocol: 'TLSv1_2_method',
ca: [readFileSync(serverCertFile)],
};
// Create a client, and connect to the Google MQTT bridge.
const iatTime = parseInt(Date.now() / 1000);
const client = mqtt.connect(connectionArgs);
// Subscribe to the /devices/{device-id}/config topic to receive config updates.
// Config updates are recommended to use QoS 1 (at least once delivery)
client.subscribe(`/devices/${deviceId}/config`, {qos: 1});
// Subscribe to the /devices/{device-id}/commands/# topic to receive all
// commands or to the /devices/{device-id}/commands/<subfolder> to just receive
// messages published to a specific commands folder; we recommend you use
// QoS 0 (at most once delivery)
client.subscribe(`/devices/${deviceId}/commands/#`, {qos: 0});
// The MQTT topic that this device will publish data to. The MQTT topic name is
// required to be in the format below. The topic name must end in 'state' to
// publish state and 'events' to publish telemetry. Note that this is not the
// same as the device registry's Cloud Pub/Sub topic.
const mqttTopic = `/devices/${deviceId}/${messageType}`;
client.on('connect', success => {
console.log('connect');
if (!success) {
console.log('Client not connected...');
} else if (!publishChainInProgress) {
publishAsync(mqttTopic, client, iatTime, 1, numMessages, connectionArgs);
}
});
client.on('close', () => {
console.log('close');
shouldBackoff = true;
});
client.on('error', err => {
console.log('error', err);
});
client.on('message', (topic, message) => {
let messageStr = 'Message received: ';
if (topic === `/devices/${deviceId}/config`) {
messageStr = 'Config message received: ';
} else if (topic.startsWith(`/devices/${deviceId}/commands`)) {
messageStr = 'Command message received: ';
}
messageStr += Buffer.from(message, 'base64').toString('ascii');
console.log(messageStr);
});
client.on('packetsend', () => {
// Note: logging packet send is very verbose
});
// Once all of the messages have been published, the connection to Google Cloud
// IoT will be closed and the process will exit. See the publishAsync method.
and the publish code is -
const publishAsync = (
mqttTopic,
client,
iatTime,
messagesSent,
numMessages,
connectionArgs
) => {
// If we have published enough messages or backed off too many times, stop.
if (messagesSent > numMessages || backoffTime >= MAXIMUM_BACKOFF_TIME) {
if (backoffTime >= MAXIMUM_BACKOFF_TIME) {
console.log('Backoff time is too high. Giving up.');
}
console.log('Closing connection to MQTT. Goodbye!');
client.end();
publishChainInProgress = false;
return;
}
// Publish and schedule the next publish.
publishChainInProgress = true;
let publishDelayMs = 0;
if (shouldBackoff) {
publishDelayMs = 1000 * (backoffTime + Math.random());
backoffTime *= 2;
console.log(`Backing off for ${publishDelayMs}ms before publishing.`);
}
setTimeout(() => {
const payload = `${argv.registryId}/${argv.deviceId}-payload-${messagesSent}`;
// Publish "payload" to the MQTT topic. qos=1 means at least once delivery.
// Cloud IoT Core also supports qos=0 for at most once delivery.
console.log('Publishing message:', payload);
client.publish(mqttTopic, payload, {qos: 1}, err => {
if (!err) {
shouldBackoff = false;
backoffTime = MINIMUM_BACKOFF_TIME;
}
});
const schedulePublishDelayMs = argv.messageType === 'events' ? 1000 : 2000;
setTimeout(() => {
const secsFromIssue = parseInt(Date.now() / 1000) - iatTime;
if (secsFromIssue > argv.tokenExpMins * 60) {
iatTime = parseInt(Date.now() / 1000);
console.log(`\tRefreshing token after ${secsFromIssue} seconds.`);
client.end();
connectionArgs.password = createJwt(
argv.projectId,
argv.privateKeyFile,
argv.algorithm
);
connectionArgs.protocolId = 'MQTT';
connectionArgs.protocolVersion = 4;
connectionArgs.clean = true;
client = mqtt.connect(connectionArgs);
client.on('connect', success => {
console.log('connect');
if (!success) {
console.log('Client not connected...');
} else if (!publishChainInProgress) {
publishAsync(
mqttTopic,
client,
iatTime,
messagesSent,
numMessages,
connectionArgs
);
}
});
client.on('close', () => {
console.log('close');
shouldBackoff = true;
});
client.on('error', err => {
console.log('error', err);
});
client.on('message', (topic, message) => {
console.log(
'message received: ',
Buffer.from(message, 'base64').toString('ascii')
);
});
client.on('packetsend', () => {
// Note: logging packet send is very verbose
});
}
publishAsync(
mqttTopic,
client,
iatTime,
messagesSent + 1,
numMessages,
connectionArgs
);
}, schedulePublishDelayMs);
}, publishDelayMs);
};
I am wondering if anyone has gotten their Node.js app to both successfully publish and subscribe with Google Cloud. If so, what might I be missing?

Why can my azure PubSub subscriber not recieve from my publisher

I am seeking to use groups within Azure PubSub, but it appears that either my publisher and subscriber are not both joining the same group somehow, or my serverless functions are not handling the broadcast after the message gets published. The service works if I publish without the groups implemented, but once I attempted to add groups I can see messages hitting the live trace tool on azure, but no messages being sent out after, so I suspect I may be missing something in my azure functions, but am not sure what that would be.
Publisher code:
const hub = "simplechat";
let service = new WebPubSubServiceClient("Endpoint=endpointURL", hub);
// by default it uses `application/json`, specify contentType as `text/plain` if you want plain-text
const group = service.group("myGroup");
group.sendToAll('Hello World', { contentType: "text/plain" });
Subscriber code:
const WebSocket = require('ws');
const { WebPubSubServiceClient } = require('#azure/web-pubsub');
var printer = require("printer/lib");
var util = require('util');
async function main() {
const hub = "simplechat";
let service = new WebPubSubServiceClient("EndpointEndpointURL", hub);
const group = service.group("myGroup");
let token = await service.getClientAccessToken();
let ws = new WebSocket(token.url, 'json.webpubsub.azure.v1');
ws.on('open', () => console.log('connected'));
ws.on('message', data => {
console.log('Message received: %s', data);
});
}
main();
I think you missed the part of joining your subscriber to the group.
The simplest way is to give the connection a user name and call addUser to add the connection to the group when the connection is connected:
async function main() {
const hub = "simplechat";
let service = new WebPubSubServiceClient("EndpointEndpointURL", hub);
const group = service.group("myGroup");
let token = await service.getClientAccessToken({ userId: "user1"});
// with this approach, the WebSocket actually does not need to be 'json.webpubsub.azure.v1' subprotocol, a simple WebSocket connection also works
let ws = new WebSocket(token.url, 'json.webpubsub.azure.v1');
ws.on('open', () => {
console.log('connected');
group.addUser("user1");
}
);
ws.on('message', data => {
console.log('Message received: %s', data);
});
}
Or you can wait until received the Connected Response to get the connectionId of the connection and call addConnection to add the subscriber to the group.
Another way, since you are already using json.webpubsub.azure.v1 protocol, would be that your subscriber to send the joinGroup request:
async function main() {
const hub = "simplechat";
let service = new WebPubSubServiceClient("EndpointEndpointURL", hub);
// make sure you set the joinLeaveGroup role for the group
let token = await service.getClientAccessToken({
roles: ['webpubsub.joinLeaveGroup.myGroup']
});
let ws = new WebSocket(token.url, 'json.webpubsub.azure.v1');
let ackId = 0;
ws.on('open', () => {
console.log('connected');
ws.send(JSON.stringify({
type: 'joinGroup',
group: 'myGroup',
ackId: ++ackId,
}));
});
ws.on('message', data => {
console.log('Message received: %s', data);
});
}
When your subscriber receives the AckMessage for this joinGroup action, your subscriber successfully joins the group.

Rabbitmq and nodejs delete queue after message has been received

Im new in the rabbitmq, trying to figure out how to delete queue after message has been received. Any help appreciated. Here is consumer script:
const amqp = require("amqplib");
let result = connect();
async function connect() {
try {
const amqpServer = "amqp://localhost"
const connection = await amqp.connect(amqpServer)
const channel = await connection.createChannel();
await channel.assertQueue("jobs");
channel.consume("jobs", message => {
const input = JSON.parse(message.content.toString());
console.log(`Recieved job with input ${input}`);
})
console.log("Waiting for messages...");
} catch (ex) {
console.error(ex)
}
}
According to the assertQueue queue docs, you can pass an autoDelete option when creating which will clean up after the number of consumers drops to 0.
const amqp = require("amqplib");
let result = connect();
async function connect() {
try {
const amqpServer = "amqp://localhost"
const connection = await amqp.connect(amqpServer)
const channel = await connection.createChannel();
await channel.assertQueue("jobs", {autoDelete: true});
channel.consume("jobs", message => {
const input = JSON.parse(message.content.toString());
console.log(`Recieved job with input ${input}`);
})
console.log("Waiting for messages...");
} catch (ex) {
console.error(ex)
}
}
You could then call cancel on the channel to stop consuming the messages.
channel.cancel("jobs");
Lastly, you could forcefully delete the queue with deleteQueue, although this might have some strange side effects if doing it in a callback.

GCP Pubsub batch publishing triggering 3 to 4x time messages than actual number of messages

I am trying to publish messages via google pubsub batch publishing feature. The batch publishing code looks like below.
const gRPC = require("grpc");
const { PubSub } = require("#google-cloud/pubsub");
const createPublishEventsInBatch = (topic) => {
const pubSub = new PubSub({ gRPC });
const batchPublisher = pubSub.topic(topic, {
batching: {
maxMessages: 100,
maxMilliseconds: 1000,
},
});
return async (logTrace, eventData) => {
console.log("Publishing batch events for", eventData);
try {
await batchPublisher.publish(Buffer.from(JSON.stringify(eventData)));
} catch (err) {
console.error("Error in publishing", err);
}
};
};
And this batch publisher is getting called from a service like this.
const publishEventsInBatch1 = publishEventFactory.createPublishEventsInBatch(
"topicName1"
);
const publishEventsInBatch2 = publishEventFactory.createPublishEventsInBatch(
"topicName2"
);
events.forEach((event) => {
publishEventsInBatch1(logTrace, event);
publishEventsInBatch2(logTrace, event);
});
I am using push subscription to receive the messages with the below settings.
Acknowledgement deadline: 600 Seconds
Retry policy: Retry immediately
The issue I am facing is, if the total number of events/messages is 250k, the push subscription is supposed to get less than or equal to 250k messages based on the message execution. But in my case, I am getting 3-4 M records on subscription and it is getting varied.
My fastify and pubsub configuration is
fastify: 3.10.1
#google-cloud/pubsub: 2.12.0
Adding the subscription code
fastify.post("/subscription", async (req, reply) => {
const message = req.body.message;
let event;
let data;
let entityType;
try {
let payload = Buffer.from(message.data, "base64").toString();
event = JSON.parse(payload);
data = event.data;
entityType = event.entityType;
if (entityType === "EVENT") {
if (event.version === "1.0") {
console.log("Processing subscription");
await processMessage(fastify, data);
} else {
console.error("Unknown version of stock event, being ignored");
}
} else {
console.error("Ignore event");
}
reply.code(200).send();
} catch (err) {
if (err.status === 409) {
console.error("Ignoring stock update due to 409: Conflict");
reply.code(200).send();
} else {
console.error("Error while processing event from subscription");
reply.code(500).send();
}
}
});
Can any one guide me where I am doing the mistakes. It's a simple fastify application. Do I am making any mistake in coding or any configuration.

Resources