DNS Round Robin failover doesn't work with mqtt.js - node.js

I have a NodeJS Application that uses mqtt.js to connect to an emqx cluster.
The MQTT cluster contains of 2 nodes which I try to provide failover for using DNS Round Robin. So I have 1 A-Record (let's say mqtt.example.com) which points to 2 IPs (IP1 and IP2). When both nodes are online also my NodeJS application connects fine and subscribes to the selected topics.
Now on the MQTT nodes I can see what node the application is connected to. When I now stop the node that the application is connected to I expect that it would (sooner or later) fail over to the second active node.
I tested also with Loraserver (Which connected to MQTT) as well as the Node Red implementation of MQTT and both immediately connect to the active node when I stop the node they're connected to.
However my NodeJS application with mqtt.js keeps trying to connect to the node which I just stopped and doesn't try to connect to the active one.
Scenario explanation:
I have 2 Active nodes, IP1 and IP2
I connect Loraserver, Node Red and NodeJS to mqtt.example.com
All 3 applications connect to IP1
I stop IP1 node by shutting down the emqx process
Loraserver and Node Red will immediately connect to IP2 automatically
NodeJS with mqtt.js however keeps showing me error message
Error: connect ECONNREFUSED
with IP1 and doesn't fail over to IP2 (Kept it running for about 20 minutes and nothing happened. DNS lease time is set to 5 minutes if that's of any relavance)
How can I achiever failover using DNS Round Robing for an application using mqtt.js?
Thanks for any help
EDIT: As requested, added the testing code:
const mqtt = require('mqtt');
const tls = require('tls');
const MQTTTOPIC = 'test/upload';
const BROKER_URL = 'tls://mqtt.example.com';
const BROKER_PORT = '8883';
const MQTTUSER = 'username';
const MQTTPASS = 'password';
var mqttoptions = {
clientId: MQTTUSER,
port: BROKER_PORT,
keepalive: 60,
username: MQTTUSER,
password: MQTTPASS
};
var client = mqtt.connect(BROKER_URL, mqttoptions);
client.on('connect', mqtt_connect);
client.on('reconnect', mqtt_reconnect);
client.on('error', mqtt_error);
client.on('message', mqtt_messsageReceived);
client.on('close', mqtt_close);
function mqtt_connect() {
console.log("Connecting MQTT");
client.subscribe(MQTTTOPIC, mqtt_subscribe);
}
function mqtt_subscribe(err, granted) {
console.log("Subscribed to " + MQTTTOPIC);
if (err) { console.error(err); }
}
function mqtt_reconnect(err) {
console.log("Reconnect MQTT");
if (err) { console.error(err); }
client = mqtt.connect(BROKER_URL, mqttoptions);
}
function mqtt_error(err) {
console.error("MQTT Error!");
if (err) { console.error(err); }
}
function after_publish() {
//do nothing
}
function mqtt_close() {
console.warn("Close MQTT");
}
function mqtt_messsageReceived(topic, message, packet) {
console.log("Message: " + message + " --- Received on Topic " + topic);
}
EDIT 2:
In case it matters, I'm running the code with pm2
EDIT 3:
Plus the complete log output:
17|LOCALTE | Connecting MQTT
17|LOCALTE | Subscribed to test/upload
17|LOCALTE | Close MQTT
17|LOCALTE | Reconnect MQTT
17|LOCALTE | Error: connect ECONNREFUSED IP1:8883
17|LOCALTE | at Object.exports._errnoException (util.js:1034:11)
17|LOCALTE | at exports._exceptionWithHostPort (util.js:1057:20)
17|LOCALTE | at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1096:14)
17|LOCALTE | MQTT Error!
17|LOCALTE | { Error: connect ECONNREFUSED IP1:8883
17|LOCALTE | at Object.exports._errnoException (util.js:1034:11)
17|LOCALTE | at exports._exceptionWithHostPort (util.js:1057:20)
17|LOCALTE | at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1096:14)
17|LOCALTE | code: 'ECONNREFUSED',
17|LOCALTE | errno: 'ECONNREFUSED',
17|LOCALTE | syscall: 'connect',
17|LOCALTE | address: 'IP1',
17|LOCALTE | port: 8883 }
17|LOCALTE | Close MQTT
17|LOCALTE | Reconnect MQTT
[...]

Firstly you should not be calling connect() in the on.('reconnect') callback. The library will handle all of this for you.
...
function mqtt_reconnect(err) {
console.log("Reconnect MQTT");
if (err) { console.error(err); }
}
...
If it still doesn't work once you've removed this, as a work around you can list a group of servers that the library will round robin on it's own.
var mqttoptions = {
clientId: MQTTUSER,
port: BROKER_PORT,
keepalive: 60,
username: MQTTUSER,
password: MQTTPASS
servers:[
{
protocol: 'mqtts',
host: 'ip-address-1',
port: 8883
},
{
protocol: 'mqtts',
host: 'ip-address-2',
port: 8883
}
]
}

Related

How to Use Node SMPP to Connect to InetLab SMPP Server

I am trying to build an SMS an SMS Client using NodeJS which should connect to ``Inetlab ```` SMPP Server to send Short messages. I downloaded Inetlab SMPP client and Server and run them both. When I try to connect and send an SMS from the client via port 7777 (Not that it matters), The connection is bound successfully and the message is sent across.
My problem is when I try connect to the same local SMPP server via a client that I have bult with NodeJS using the node-smpp library, the connection fails even though I am using the same localhost and port 7777.
Below is my connection code:
module.exports = class{
constructor(){
this.session = null
this.smppConfig = {
url:"smpp://localhost:7777",
auto_enquire_link_period:10000,
debug:true
}
}
StartSmppSession= ()=>{
return new Promise( async(resolve, reject)=>{
try{
console.log(this.smppConfig)
this.session = smpp.connect(this.smppConfig,()=>{
this.session.bind_transceiver({
system_id:process.env.SMPP_SYSTEM_ID,
password:process.env.SMPP_PASSWORD
},(pdu)=>{
if(pdu.command_status === 0){
resolve({message:"Connection bound successfully!"})
}else{
reject({message:"Failed to bind!",pdu})
}
})
})
}catch(err){
//reject(err.message)
}
})
}
sendSMS = (payload)=>{
return new Promise(async (resolve, reject)=>{
try{
//payload = {destination_addr:"phone_number", "short_message":"The message here"}
this.session.submit_sm(payload, async (pdu)=>{
pdu.command_status === 0 ? resolve("Message successfully sent!") : reject("Failed to send SMS!")
})
}catch(err){
reject(err.message)
}
})
}
}
When I invoke the StartSmppSession() function in my controller, I get the following error log:
2022-12-06T09:30:40.973Z - cli - 315644 - socket.error - connect ECONNREFUSED ::1:7777 - {"errno":-4078,"code":"ECONNREFUSED","syscall":"connect","address":"::1","port":7777}
node:events:491
throw er; // Unhandled 'error' event
^
Error: connect ECONNREFUSED ::1:7777
at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1284:16)
Emitted 'error' event on Session instance at:
at Socket.<anonymous> (C:\Users\c.mwale\Desktop\NRB API Test\Notifications Module\SMSGateway\src\SMS_Outgoing\node_modules\smpp\lib\smpp.js:119:8)
at Socket.emit (node:events:513:28)
at emitErrorNT (node:internal/streams/destroy:151:8)
at emitErrorCloseNT (node:internal/streams/destroy:116:3)
at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
errno: -4078,
code: 'ECONNREFUSED',
syscall: 'connect',
address: '::1',
port: 7777
}
I understand that there is a SMPP library for .NET documented on the inetlab website, but I am of the view this the intent of using a C# example was not to dictate the framework nor language of implementation.

Connect to a remote server mongoDB in Docker via ssh in nodeJS using tunnel-ssh, AuthenticationFailed

I am trying to connect to a remote server MongoDB in Docker through ssh in Nodejs as below :
sshConfig = {
username: 'username',
password: 'password',
host: 'host',
port: 22,
dstHost: '172.17.0.3',
dstPort: 27017,
localPort: 5000
};
const uri = 'mongodb://admin:password#localhost:27017/admin';
tunnel(sshConfig, async error => {
if (error) {
throw new Error(`SSH connection error: ${error}`);
}
const client = new MongoClient(uri);
async function run() {
try {
// Connect the client to the server
await client.connect();
// Establish and verify connection
await client.db('admin').command({ ping: 1 });
console.log('Connected successfully to server');
} finally {
// Ensures that the client will close when you finish/error
await client.close();
}
}
await run().catch(console.dir);
});
But I am getting error as below :
MongoServerError: Authentication failed.
at MessageStream.messageHandler (/node_modules/mongodb/src/cmap/connection.ts:740:20)
at MessageStream.emit (node:events:390:28)
at MessageStream.emit (node:domain:475:12)
at processIncomingData (/node_modules/mongodb/src/cmap/message_stream.ts:167:12)
at MessageStream._write (/node_modules/mongodb/src/cmap/message_stream.ts:64:5)
at writeOrBuffer (node:internal/streams/writable:389:12)
at _write (node:internal/streams/writable:330:10)
at MessageStream.Writable.write (node:internal/streams/writable:334:10)
at Socket.ondata (node:internal/streams/readable:754:22)
at Socket.emit (node:events:390:28) {
ok: 0,
code: 18,
codeName: 'AuthenticationFailed'
},
and I open http://localhost:5000/ by browser, it shows that:
It looks like you are trying to access MongoDB over HTTP on the native driver port.
I can connect the database via:
Use MongoDB compass to connect the database via ssh
Use mongo 'mongodb://admin:password#remote-host:27017/admin' in local machine terminal
Use MongoClient(mongodb://admin:password#remote-host:27017/admin) in Nodejs without ssh-tunnel
Use mongo 'mongodb://admin:password#localhost:27017/admin' in both remote host and remote host docker contaniner
I am sure the password is correct.

could connect to kafka instance in node?

I have two computer running in the local network, one computer is installed with kafka instance on 192.168.1.3:9092. In another computer a short test program is running a kafka client to connect kafka instance and subscribe a topic.
my kafka-node is latest version, v4.1.3
const kafka = require('kafka-node');
const bp = require('body-parser');
//const config = require('./config');
try {
const Consumer = kafka.HighLevelConsumer;
const client = new kafka.KafkaClient("kafkaHost: '192.168.1.3:9092'");
let consumer = new kafka.Consumer(
client,
[{ topic: "dbserver1", partition: 0 }],
{
autoCommit: true,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024 * 1024,
encoding: 'utf8',
fromOffset: false
}
);
consumer.on('message', async function(message) {
console.log('here');
console.log(
'kafka-> ',
message.value
);
})
consumer.on('error', function(err) {
console.log('error', err);
});
}
catch(e) {
console.log(e);
}
The code is shown above. However the code is always telling me
{ Error: connect ECONNREFUSED 127.0.0.1:9092
at Object._errnoException (util.js:992:11)
at _exceptionWithHostPort (util.js:1014:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1186:14)
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 9092 }
why it is showing 127.0.0.1 not 192.168.1.3?
Based on the comments, you're running the Debezium Kafka docker container... From the Debezium Docker tutorial...
If we wanted to connect to Kafka from outside of a Docker container, then we’d want Kafka to advertise its address via the Docker host, which we could do by adding -e ADVERTISED_HOST_NAME= followed by the IP address or resolvable hostname of the Docker host, which on Linux or Docker on Mac this is the IP address of the host computer (not localhost).
Sounds like your node code is not running in a container, or at least not on the same machine / Docker network
So you'll have to add -e ADVERTISED_HOST_NAME=192.168.1.3 to your docker run command

How to make a SSH to postgres server with help of ssh2 library and query using pg libary?

I am trying to make a connection to Postgres remote host using the SSH tunnel from node application(library:ssh2) and query the data(lib:pg).
Before I used mysql2(node library) make a tunnel using ssh2(node library) and connected to Mysql remote host as well as able to query the data. But I want to do the same in Postgres remote host I am not able to connect to it.
pg Client obj config does not support the stream from ssh2 forwardOut..!
Or, Is there any library available to make this happen instead of that ssh2, pg
PS: In mysql2 when I try to make a connection with a mysql config with stream: stream key value.
var Clientssh = require('ssh2').Client;
var conn = new Clientssh();
conn.on('ready', function() {
console.log('Client :: ready');
conn.forwardOut(
'127.0.0.1',
5434,
'localhost',
5432,
function(err, stream) {
if (err) throw err;
let conf = {
host: 'localhost',
port: 5434,
user: 'test',
database: 'test',
password: 'test'
}
let remoteConnection = new Client(conf);
remoteConnection.connect(function(err) {
if (err) {
console.log(err);
console.log("Unable to connect to postgre");
res.send(err);
} else {
remoteConnection.query('SELECT * FROM test', function(err, testResult) {
remoteConnection.end();
if (err) {
console.log("Unable to fetch data");
res.send(err);
} else {
console.log("Succcess");
res.send(testResult);
}
});
}
});
});
}).connect({
host: 'hostaddress',
port: 'hostport',
username: 'hostusername',
privateKey: require('fs').readFileSync('path/for/key')
});
It shows the Connection terminated unexpectedly from this line
Client :: ready
Error: Connection terminated unexpectedly
at Connection.con.once (/node-postgres/node_modules/pg/lib/client.js:235:9)
at Object.onceWrapper (events.js:286:20)
at Connection.emit (events.js:198:13)
at Channel.<anonymous> (/node-postgres/node_modules/pg/lib/connection.js:131:10)
at Channel.emit (events.js:203:15)
at endReadableNT (_stream_readable.js:1129:12)
at process._tickCallback (internal/process/next_tick.js:63:19)
Unable to connect to postgre

Port 9200 Refused Connection?

I'm trying to implement an elasticsearch client in NodeJS on a Cloud9 workspace and I'm just trying to get it running. My app runs on port 5678 and my MongoDB runs on 27017. I have tried searching for other answers, but I haven't really found anything particularly useful. This is the error message that I receive when trying to connect to localhost:9200.
Elasticsearch ERROR: 2015-06-26T04:24:19Z
Error: Request error, retrying -- connect ECONNREFUSED
at Log.error (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/log.js:213:60)
at checkRespForFailure (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/transport.js:192:18)
at HttpConnector.<anonymous> (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/connectors/http.js:153:7)
at ClientRequest.wrapper (/home/ubuntu/workspace/node_modules/elasticsearch/node_modules/lodash/index.js:3128:19)
at ClientRequest.emit (events.js:95:17)
at Socket.socketErrorListener (http.js:1552:9)
at Socket.emit (events.js:95:17)
at net.js:441:14
at process._tickCallback (node.js:442:13)
Elasticsearch TRACE: 2015-06-26T04:24:19Z
-> HEAD http://localhost:9200/
<- 0
Elasticsearch WARNING: 2015-06-26T04:24:19Z
Unable to revive connection: http://localhost:9200/
Elasticsearch WARNING: 2015-06-26T04:24:19Z
No living connections
Trace: elasticsearch cluster is down!
at Server (/home/ubuntu/workspace/app.js:32:13)
at respond (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/transport.js:251:9)
at sendReqWithConnection (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/transport.js:171:7)
at next (/home/ubuntu/workspace/node_modules/elasticsearch/src/lib/connection_pool.js:213:7)
at process._tickCallback (node.js:442:13)
.
My code for the elastic search client is very simple
var client = new elasticsearch.Client({
hosts: 'localhost:9200',
log: 'trace'
});
client.ping({
// ping usually has a 3000ms timeout
requestTimeout: 30000,
// undocumented params are appended to the query string
hello: "elasticsearch!"
}, function (error) {
if (error) {
console.trace('elasticsearch cluster is down!');
} else {
console.log('All is well');
}
});
If I try to connect to localhost:5678, I don't get an error refused, but the elastic cluster is still down? Any suggestions would be helpful, thanks :)
//Client.js
const es = require('elasticsearch');
const esClient = new es.Client({
host: {
protocol: 'http',
host: 'localhost',
port: 9200
},
log: 'trace'
});
module.exports = esClient;
//ping.js
const esClient = require('./client');
esClient.ping({
// ping usually has a 3000ms timeout
requestTimeout: 3000
}, function (error) {
if (error) {
console.trace('elasticsearch cluster is down!');
} else {
console.log('All is well');
}
});
Hey to anyone who wants to know what I did. I basically just downloaded elasticsearch onto cloud9 and ran it. Then I pinged the port accordingly and it worked. Looks like a noobie mistake on my part :P
In my case, stock elasticsearch with everything in default and using the JS client results in this error.
(short term) fix: http.cors.enabled and https.cors.allow-origin settings

Resources