Error: read ECONNRESET while connection rabbitmq with nodejs - node.js

I've encountered following error message while connection our external RabbitMQ with NodeJS as follow:
Error: read ECONNRESET
at TCP.onStreamRead (internal/stream_base_commons.js:205:27) {
errno: 'ECONNRESET',
code: 'ECONNRESET',
syscall: 'read'
}
and my nodejs code is as follow:
const amqp_url = "amqp://un:pw#sb-mq.com:9901/my-vhost";
amqp.connect(amqp_url, function (error0, connection) {
if (error0) {
throw error0;
}
connection.createChannel(function (error1, channel) {
if (error1) {
throw error1;
}
var queue = 'hello';
var msg = 'Hello World!';
channel.assertQueue(queue, {
durable: false
});
channel.sendToQueue(queue, Buffer.from(msg));
console.log(" [x] Sent %s", msg);
});
setTimeout(function () {
connection.close();
process.exit(0);
}, 500);
});
But the thing is when I've setup RabbidMQ locally with same configuration but using default port (like amqp://un:pw#localhost:5672/my-vhost), it was working perfectly. Please let me know how to troubleshoot that one, thanks.

"ECONNRESET" means the other side of the TCP conversation abruptly closed its end of the connection.
see How do I debug error ECONNRESET in Node.js?
about RabbitMQ check if rabbitmq actually is active in that port, just:
telnet sb-mq.com 9901
from your client machine and check the firewall configuration.
You may have another service running on 9901
ECONNRESET is network problem, rabbitmq can work in different ports without problems

I found that issue has been resolved when I've tried to use amqps instead of amqp.

Related

Error: read EINVAL when connecting to MongoDB with mongoose and SSL

When I try to connect to my MongoDB that requires SSL, my NodeJs app crashes on the following method:
conn = await mongoose.connect(process.env.DB_HOST, {
tlsCAFile: __dirname + '/ca-certificate.crt',
useNewUrlParser: true,
useUnifiedTopology: true
})
and I get the following error in stderr.log:
events.js:377
throw er; // Unhandled 'error' event
^
Error: read EINVAL
at Pipe.onStreamRead (internal/stream_base_commons.js:209:20)
Emitted 'error' event on Socket instance at:
at emitErrorNT (internal/streams/destroy.js:106:8)
at emitErrorCloseNT (internal/streams/destroy.js:74:3)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
errno: -22,
code: 'EINVAL',
syscall: 'read'
}
The interesting this is that this works just fine on my local windows machine, but crashes when deployed to A2Hosting shared hosting.
Also I am able to connect successfully (even on A2hosting) when connecting without mongoose like so:
const client = new MongoClient(uri);
try {
await client.connect();
const db = client.db('egomenu');
console.log('connected successfully');
} finally {
await client.close();
}
I am using mongoose: ^6.3.1 and node: 14.20.1 on A2hosting.
I believe that the error is generated when trying to read the .crt file during connection; however cannot figure out what is causing it.
Any help would be greatly appreciated :)

syscall: 'write', code: 'EPROTO', errno: -4046, or AxiosError: read ECONNRESET

I am trying to test out using proxies for the first time in my coding career and been trying for a while now. However I keep getting this weird error that doesnt make any sense in my head. The proxy I use here is taken from http://free-proxy.cz/en/proxylist/country/US/https/date/all. I have no problem paying for some proxy servers I just wanna make sure it works before paying for anything.
However I keep getting the error AxiosError: read ECONNRESET OR AxiosError: Client network socket disconnected before secure TLS connection was established OR write EPROTO E0DA0000:error:0A00010B:SSL routines:ssl3_get_record:wrong version number:c:\ws\deps\openssl\openssl\ssl\record\ssl3_record.c:355:
I am running the code on Localhost so my connection to the proxy server is not https.
Here is the code I am trying to excecute. I have imported
var HttpsProxyAgent = require('https-proxy-agent');
But this clearly is not the problem.
router.get("/userInvTest", async (req, res) => {
try {
const axiosDefaultConfig = {
proxy: false,
httpsAgent: new HttpsProxyAgent('https://205.134.235.132:3129'),
};
const axiosUse = require('axios').create(axiosDefaultConfig);
const steamInventory = await axiosUse.get("https://steamcommunity.com/inventory/76561198027016127/730/2?l=english&count=5000");
console.log(steamInventory.data)
res.status(200).send(steamInventory.data)
} catch (err) {
console.log(err)
}
})

Socket closed abruptly during opening handshake while trying to connect to RabbitMq

I have a simple NodeJs application that should connect to RabbitMq.
The code:
const amqp = require('amqplib/callback_api');
const amqpUri = "amqp://user:password#localhost:5672"
if (amqpUri == null)
throw Error('Missing AMQP_URI environment variable.');
amqp.connect(amqpUri, function(error0, connection) {
if (error0)
throw error0;
connection.createChannel(function(error1, channel) {
if (error1) {
throw error1;
}
const exchangeName = 'product.event';
const queueName1 = 'create';
const routingKey1 = 'create';
const queueName2 = 'delete';
const routingKey2 = 'delete';
channel.assertExchange(exchangeName, 'topic', {
durable: false,
});
// create
channel.assertQueue(queueName1, {
durable: false,
});
channel.bindQueue(queueName1, exchangeName, routingKey1);
channel.consume(queueName1, (msg) => consumeCreated(channel, msg));
// delete
channel.assertQueue(queueName2, {
durable: false,
});
channel.bindQueue(queueName2, exchangeName, routingKey2);
channel.consume(queueName2, (msg) => consumeDeleted(channel, msg));
});
});
Then run a RabbitMq image with:
docker run -d --hostname my-rabbit --name some-rabbit -p 5672:15672 -e RABBITMQ_DEFAULT_USER=user -e RABBITMQ_DEFAULT_PASS=password rabbitmq:3-management
I can access the rabbitmq and connect with the credentials user/password at http://localhost:5672.
For some reason, I have the error:
/home/hamuto/CLO902-Group35/indexer/app.js:12
throw error0;
^
Error: Socket closed abruptly during opening handshake
at Socket.endWhileOpening (/home/hamuto/CLO902-Group35/indexer/node_modules/amqplib/lib/connection.js:260:17)
at Socket.emit (events.js:326:22)
at endReadableNT (_stream_readable.js:1241:12)
at processTicksAndRejections (internal/process/task_queues.js:84:21)
i see this is not likely the case as your using the correct port, however I was getting the exact same error messages when I tried using HTTP port instead of tpc port.
Again I know this isnt your issue but if guess it could help you pinpoint potential failpoints.

Node.js Error : connect ECONN Refused

I am new to Node.js and am unable to resolve this error:
Error: connect ECONNREFUSED
at errnoException (net.js:901:11)
at Object.afterConnect (as oncomplete) (net.js:892)
The code I was trying out follows :
var async = require('async'),
request = require('request');
function done(err,results) {
if (err) {
throw err;
}
console.log('Done ! results: %j',results);
}
var collection = [1,2,3,4];
function iterator(value,callback) {
request.post({
url: 'http://localhost:8080',
body: JSON.stringify(value)
}, function (err,res,body){
if (err) {
callback(err,body && JSON.parse(body));
}
});
}
async.map(collection,iterator,done);
ECONNREFUSED – Connection refused by server error
A port is being blocked can be the root cause of this issue, check if your connection is being blocked or even the changed default port can also cause this issue. Identify which app/service you are connecting to and its port is being blocked or changed.
And in your case check whether the application is hosted on port: 8080 or not.
But, this most likely occurs with FileZilla.

How to deal with 'read ETIMEDOUT' in Node.js?

I have a pub/sub model using Node.js to transmit data from one client to another client. Besides, the server also records everything received and sends it to new clients.
However, some data corrupted when transfer, and I got error like:
Error with socket!
{ [Error: write EPIPE] code: 'EPIPE', errno: 'EPIPE', syscall: 'write' }
Error with socket!
{ [Error: read ETIMEDOUT] code: 'ETIMEDOUT', errno: 'ETIMEDOUT', syscall: 'read' }
I don't know how to properly handle these errors. It looks like the client is down.
Since the server is only a proxy like a server, it doesn't really know what data means. I have no idea how to validate every data pack before meeting these errors.
Here is my code:
// server is an object inheriting from net.Server
server.on('listening', function() {
var port = server.address().port;
}).on('connection', function(cli) {
cli.socketBuf = new Buffers();
cli.commandStarted = false;
cli.dataSize = 0;
cli.setKeepAlive(true, 10*1000);
cli.setNoDelay(true);
cli.on('connect', function() {
server.clients.push(cli);
}).on('close', function() {
var index = server.clients.indexOf(cli);
server.clients.splice(index, 1);
}).on('data', function (buf) {
server.emit('data', cli, buf);
if(op.autoBroadcast) {
_.each(server.clients, function(c) {
if(c != cli) c.write(buf);
});
}
}).on('error', function(err) {
console.log('Error with socket!');
console.log(err);
});
}).on('error', function(err) {
console.log('Error with server!');
console.log(err);
});
// ...
// room.dataSocket is an instance of server beyond
room.dataSocket.on('data', function(cli, d) {
// bf is a buffered file
bf.append(d);
room.dataFileSize += d.length;
}).on('connection', function(con){
bf.readAll(function(da) {
con.write(da);
});
});
If you get an EPIPE or indeed any error when writing, the peer has closed or the connection has been dropped. So you must close the connection at that point.
If you get a read timeout the inference is that either you have set an unrealistically short timeout or else the peer has failed to deliver in time: in the second case once again you should assume the connection is down, and close it.

Resources