How to Use Node SMPP to Connect to InetLab SMPP Server - node.js

I am trying to build an SMS an SMS Client using NodeJS which should connect to ``Inetlab ```` SMPP Server to send Short messages. I downloaded Inetlab SMPP client and Server and run them both. When I try to connect and send an SMS from the client via port 7777 (Not that it matters), The connection is bound successfully and the message is sent across.
My problem is when I try connect to the same local SMPP server via a client that I have bult with NodeJS using the node-smpp library, the connection fails even though I am using the same localhost and port 7777.
Below is my connection code:
module.exports = class{
constructor(){
this.session = null
this.smppConfig = {
url:"smpp://localhost:7777",
auto_enquire_link_period:10000,
debug:true
}
}
StartSmppSession= ()=>{
return new Promise( async(resolve, reject)=>{
try{
console.log(this.smppConfig)
this.session = smpp.connect(this.smppConfig,()=>{
this.session.bind_transceiver({
system_id:process.env.SMPP_SYSTEM_ID,
password:process.env.SMPP_PASSWORD
},(pdu)=>{
if(pdu.command_status === 0){
resolve({message:"Connection bound successfully!"})
}else{
reject({message:"Failed to bind!",pdu})
}
})
})
}catch(err){
//reject(err.message)
}
})
}
sendSMS = (payload)=>{
return new Promise(async (resolve, reject)=>{
try{
//payload = {destination_addr:"phone_number", "short_message":"The message here"}
this.session.submit_sm(payload, async (pdu)=>{
pdu.command_status === 0 ? resolve("Message successfully sent!") : reject("Failed to send SMS!")
})
}catch(err){
reject(err.message)
}
})
}
}
When I invoke the StartSmppSession() function in my controller, I get the following error log:
2022-12-06T09:30:40.973Z - cli - 315644 - socket.error - connect ECONNREFUSED ::1:7777 - {"errno":-4078,"code":"ECONNREFUSED","syscall":"connect","address":"::1","port":7777}
node:events:491
throw er; // Unhandled 'error' event
^
Error: connect ECONNREFUSED ::1:7777
at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1284:16)
Emitted 'error' event on Session instance at:
at Socket.<anonymous> (C:\Users\c.mwale\Desktop\NRB API Test\Notifications Module\SMSGateway\src\SMS_Outgoing\node_modules\smpp\lib\smpp.js:119:8)
at Socket.emit (node:events:513:28)
at emitErrorNT (node:internal/streams/destroy:151:8)
at emitErrorCloseNT (node:internal/streams/destroy:116:3)
at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
errno: -4078,
code: 'ECONNREFUSED',
syscall: 'connect',
address: '::1',
port: 7777
}
I understand that there is a SMPP library for .NET documented on the inetlab website, but I am of the view this the intent of using a C# example was not to dictate the framework nor language of implementation.

Related

How can I make my KafkaJS consumer wait indefinitely for a broker to come online?

I have a KafkaJS consumer configured as follows:
// Create the kafka client
const kafka = new Kafka({
clientId,
brokers,
});
// Create the consumer
const consumer = this.kafka.consumer({
groupId,
heartbeatInterval: 3000,
sessionTimeout: 30000,
});
// Connect the consumer
consumer.connect().then(async (res) => {
await this.consumer.run({
eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
this.subscriptionRegistrations[topic](topic, partition, message);
},
}).catch((err) => {
console.log('Error running consumer!', err);
});
}).catch(async (err) => {
console.log('Error connecting consumer!', err);
})
Currently, when starting the application I get several connection errors like these:
{"level":"ERROR","timestamp":"2023-01-27T23:29:58.214Z","logger":"kafkajs","message":"[BrokerPool] Failed to connect to seed broker, trying another broker from the list: Connection error: connect ECONNREFUSED 127.0.0.1:9092","retryCount":0,"retryTime":246}
{"level":"ERROR","timestamp":"2023-01-27T23:29:58.463Z","logger":"kafkajs","message":"[Connection] Connection error: connect ECONNREFUSED 127.0.0.1:9092","broker":"localhost:9092","clientId":"CLIENT_ID_TEST","stack":"Error: connect ECONNREFUSED 127.0.0.1:9092\n at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1157:16)"}
The above errors are expected as I am not running a broker. Then followed by:
KafkaJSNonRetriableError
Caused by: KafkaJSConnectionError: Connection error: connect ECONNREFUSED 127.0.0.1:9092
at Socket.onError (/path/to/project/node_modules/kafkajs/src/network/connection.js:210:23)
at Socket.emit (node:events:526:28)
at emitErrorNT (node:internal/streams/destroy:157:8)
at emitErrorCloseNT (node:internal/streams/destroy:122:3)
at processTicksAndRejections (node:internal/process/task_queues:83:21)
[ERROR] 23:29:59 KafkaJSNumberOfRetriesExceeded: Connection error: connect ECONNREFUSED 127.0.0.1:9092
At which point the application hangs. It doesn't crash. I also get the error I logged in the catch block of consumer.connect():
Error connecting consumer KafkaJSNonRetriableError
Caused by: KafkaJSConnectionError: Connection error: connect ECONNREFUSED 127.0.0.1:9092
at Socket.onError (/path/to/project/node_modules/kafkajs/src/network/connection.js:210:23)
... 3 lines matching cause stack trace ...
at processTicksAndRejections (node:internal/process/task_queues:83:21) {
name: 'KafkaJSNumberOfRetriesExceeded',
retriable: false,
helpUrl: undefined,
retryCount: 5,
retryTime: 3636,
[cause]: KafkaJSConnectionError: Connection error: connect ECONNREFUSED 127.0.0.1:9092
at Socket.onError (/path/to/project/node_modules/kafkajs/src/network/connection.js:210:23)
at Socket.emit (node:events:526:28)
at emitErrorNT (node:internal/streams/destroy:157:8)
at emitErrorCloseNT (node:internal/streams/destroy:122:3)
at processTicksAndRejections (node:internal/process/task_queues:83:21) {
retriable: true,
helpUrl: undefined,
broker: 'localhost:9092',
code: 'ECONNREFUSED',
[cause]: undefined
}
}
I would like my application to be able to start up before the Kafka broker it is configured to connect to. In this scenario, KafkaJS would continue to retry the connection indefinitely until a broker is available. Ideally this would also work as a reconnection strategy if the broker goes down KafkaJS would continue to try to reconnect indefinitely until the broker come back online. From what I have read in the docs, this is how it is supposed to behave, but it's not doing that for me. Perhaps I have set up my client incorrectly. Thank you for the input!
I'd recommend using AdminClient instead to "ping" the cluster, first.
But, you can retry the connection after catching the KafkaJSNumberOfRetriesExceeded error.
const consumer = ...
while (true) {
try {
await consumer.connect();
break;
} catch (err) {
console.log('Error connecting consumer!', err);
if (err instanceof KafkaJSNonRetriableError && err.name === 'KafkaJSNumberOfRetriesExceeded') {
console.log('retrying connection...');
continue;
}
console.error('unknown error ' + err);
break;
}
}
await consumer.run() ...
Ideally this would also work as a reconnection strategy if the broker goes down KafkaJS would continue to try to reconnect indefinitely until the broker come back online
Or... You run more than one broker, and therefore have a highly available Kafka cluster that clients can connect to.
If you ran your code in Docker/Kubernetes with a restart policy, you could just let the process crash, and a new container would restart on its own. You could implement the same with supervisord.

fetch graphql data from nodejs to nextjs (SSR)

I have nextjs Project, where I use SSR for fetch data from nodejs(graphql) but it works only localhost(dev/production), When I upload the project on vercel I get "500: Internal Server Error", I will share my code and required details below:
I use "graphql-request" on Nextjs
import { gql, request } from "graphql-request";
export async function getServerSideProps(context) {
const endPoint = "http://localhost:4000/";
const query = gql`
query {
exactlyUser(id: "63c3cf0aea56b467893c92d3") {
username
}
}
`;
const data = await request(endPoint, query);
return {
props: {
data: data,
},
};
}
I also give you vercel Error:
[HEAD] /_next/data/U_aH1k5dl2EQA1IzXATL6/index.json
2023-01-17T16:52:11.277Z ddd8e632-1ffa-4772-94c0-aa01e2348bb7 ERROR FetchError: request to http://localhost:4000/ failed, reason: connect ECONNREFUSED 127.0.0.1:4000
at ClientRequest.<anonymous> (/var/task/node_modules/node-fetch/lib/index.js:1491:11)
at ClientRequest.emit (node:events:513:28)
at Socket.socketErrorListener (node:_http_client:494:9)
at Socket.emit (node:events:513:28)
at emitErrorNT (node:internal/streams/destroy:151:8)
at emitErrorCloseNT (node:internal/streams/destroy:116:3)
at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
type: 'system',
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
page: '/'
}
RequestId: ddd8e632-1ffa-4772-94c0-aa01e2348bb7 Error: Runtime exited with error: exit status 1
Runtime.ExitError
Seems like when you deploy the Next.js app to Vercel, the API is not accessible. This is the cause of the "500: Internal Server Error" you're seeing.
Try to change the API endpoint URL to the publicly accessible URL of your GraphQL API, instead of using "http://localhost:4000/". This should allow the Next.js app to fetch data from the API when it's running on Vercel.
Avoid hardcoding the API endpoint.

Error: read EINVAL when connecting to MongoDB with mongoose and SSL

When I try to connect to my MongoDB that requires SSL, my NodeJs app crashes on the following method:
conn = await mongoose.connect(process.env.DB_HOST, {
tlsCAFile: __dirname + '/ca-certificate.crt',
useNewUrlParser: true,
useUnifiedTopology: true
})
and I get the following error in stderr.log:
events.js:377
throw er; // Unhandled 'error' event
^
Error: read EINVAL
at Pipe.onStreamRead (internal/stream_base_commons.js:209:20)
Emitted 'error' event on Socket instance at:
at emitErrorNT (internal/streams/destroy.js:106:8)
at emitErrorCloseNT (internal/streams/destroy.js:74:3)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
errno: -22,
code: 'EINVAL',
syscall: 'read'
}
The interesting this is that this works just fine on my local windows machine, but crashes when deployed to A2Hosting shared hosting.
Also I am able to connect successfully (even on A2hosting) when connecting without mongoose like so:
const client = new MongoClient(uri);
try {
await client.connect();
const db = client.db('egomenu');
console.log('connected successfully');
} finally {
await client.close();
}
I am using mongoose: ^6.3.1 and node: 14.20.1 on A2hosting.
I believe that the error is generated when trying to read the .crt file during connection; however cannot figure out what is causing it.
Any help would be greatly appreciated :)

Connect to a remote server mongoDB in Docker via ssh in nodeJS using tunnel-ssh, AuthenticationFailed

I am trying to connect to a remote server MongoDB in Docker through ssh in Nodejs as below :
sshConfig = {
username: 'username',
password: 'password',
host: 'host',
port: 22,
dstHost: '172.17.0.3',
dstPort: 27017,
localPort: 5000
};
const uri = 'mongodb://admin:password#localhost:27017/admin';
tunnel(sshConfig, async error => {
if (error) {
throw new Error(`SSH connection error: ${error}`);
}
const client = new MongoClient(uri);
async function run() {
try {
// Connect the client to the server
await client.connect();
// Establish and verify connection
await client.db('admin').command({ ping: 1 });
console.log('Connected successfully to server');
} finally {
// Ensures that the client will close when you finish/error
await client.close();
}
}
await run().catch(console.dir);
});
But I am getting error as below :
MongoServerError: Authentication failed.
at MessageStream.messageHandler (/node_modules/mongodb/src/cmap/connection.ts:740:20)
at MessageStream.emit (node:events:390:28)
at MessageStream.emit (node:domain:475:12)
at processIncomingData (/node_modules/mongodb/src/cmap/message_stream.ts:167:12)
at MessageStream._write (/node_modules/mongodb/src/cmap/message_stream.ts:64:5)
at writeOrBuffer (node:internal/streams/writable:389:12)
at _write (node:internal/streams/writable:330:10)
at MessageStream.Writable.write (node:internal/streams/writable:334:10)
at Socket.ondata (node:internal/streams/readable:754:22)
at Socket.emit (node:events:390:28) {
ok: 0,
code: 18,
codeName: 'AuthenticationFailed'
},
and I open http://localhost:5000/ by browser, it shows that:
It looks like you are trying to access MongoDB over HTTP on the native driver port.
I can connect the database via:
Use MongoDB compass to connect the database via ssh
Use mongo 'mongodb://admin:password#remote-host:27017/admin' in local machine terminal
Use MongoClient(mongodb://admin:password#remote-host:27017/admin) in Nodejs without ssh-tunnel
Use mongo 'mongodb://admin:password#localhost:27017/admin' in both remote host and remote host docker contaniner
I am sure the password is correct.

MongoDB Atlas - How to fix 'TransientTransactionError' on shared web-hosting

TransientTransactionError when attempting to connect from shared web-host to Mongo Atlas
I actually had this working at one point but I had to wipe and restart my project from scratch on my web host. Now seemingly out of nowhere I keep getting the above-mentioned error when trying to connect to Atlas. I have already tried whitelisting IP addresses including both my server IP as well as 0.0.0.0/0 (the 'allow all' wildcard) in Atlas.
I have also tried using the 'full driver' snippet instead of mongoose but still receive the same error. Lastly, I want to point out that the exact same code works just fine on my local machine.
Here's my function:
const mongoose = require("mongoose");
const config = require("./config");
module.exports = cb => {
if (mongoose.connection.readyState < 1) {
mongoose.connect(config.db).then(
() => {
console.log("Database linking successful!");
return cb ? cb : null;
},
err => {
console.log("Failed to connect to database.", err);
}
);
}
};
Here's what the error message looks like:
{ MongoNetworkError: failed to connect to server [<mongodb-shard-goes-here>.mongodb.net:27017] on first connect [MongoNetworkError: connect ECONNREFUSED 54.145.177.180:27017]
at Pool.<anonymous> (/home/titanesp/public_html/server/node_modules/mongodb-core/lib/topologies/server.js:431:11)
at Pool.emit (events.js:182:13)
at connect (/home/titanesp/public_html/server/node_modules/mongodb-core/lib/connection/pool.js:557:14)
at makeConnection (/home/titanesp/public_html/server/node_modules/mongodb-core/lib/connection/connect.js:39:11)
at callback (/home/titanesp/public_html/server/node_modules/mongodb-core/lib/connection/connect.js:261:5)
at TLSSocket.err (/home/titanesp/public_html/server/node_modules/mongodb-core/lib/connection/connect.js:286:7)
at Object.onceWrapper (events.js:273:13)
at TLSSocket.emit (events.js:182:13)
at emitErrorNT (internal/streams/destroy.js:82:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:50:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
name: 'MongoNetworkError',
errorLabels: [ 'TransientTransactionError' ],
[Symbol(mongoErrorContextSymbol)]: {} }

Resources