i am creating a new nodejs service that is going receiving requests from the frontend , placing that request on rabbitmq, then wait for the response from a microservice that handles that request and then sending the response back to the requesting front end that i get back from rabbit.
my question is when i create the connection to rabbitmq, something like
amqp.connect(process.env.CLOUDAMQP_URL + "?heartbeat=60",
function(err, conn) {
if (err) {
console.error("[AMQP]", err.message);
return setTimeout(start, 1000);
}
conn.on("error", function(err) {
if (err.message !== "Connection closing") {
console.error("[AMQP] conn error", err.message);
}
});
conn.on("close", function() {
console.error("[AMQP] reconnecting");
return setTimeout(start, 1000);
});
console.log("[AMQP] connected");
amqpConn = conn;
whenConnected(); });
is it best create this connection in app.js (the top level) and pass this s connection around down to the external modules for each request , or should I create a new connection and submit and wait each request?
thanks for any suggestions
The RabbitMQ team monitors this mailing list and only sometimes answers questions on StackOverflow.
Yes, you should share this connection. Opening a new connection for each request will waste resources and time.
Related
I have a back-end server (Nodejs + Express) and front-end clients (reactjs).
I would like to implement sockets to replace some APIs.
I'm using socket.io for the back-end and socket.oi-client for the front.
My question is when clients establish a connection to the server via sockets, do they each run in their own context?
io.on(
"connection",
socketioJwt.authorize({
secret: process.env.secretKey,
timeout: 15000 // 15 seconds to send the authentication message
})
).on("authenticated", socket => {
console.log(`Socket ${socket.id} connected.`);
socket.on("myEvent", async request => {
try {
// update resource in database
socket.broadcast.emit("updated", {
// data
});
} catch (e) {
// log error in database
io.emit("onError", {
// e.message
});
}
});
Given the above code - if two clients were to emit an event to myEvent , wouldn't the latter overwrite the former's data in the database?
I have this database connection. Inside the function where the comment is located, there is a data update cycle for rest api. The data is updated, but when the data in the Oracle database is updated, the connection may fail and after that all subsequent updated data will get undefined. How can you properly connect to the database so that there are no failures?
oracledb.getConnection(
{
user: db.user,
password: db.password,
connectString: db.connectString
},
connExecute
);
function connExecute(err, connection) {
if (err) {
console.error(err.message);
return;
}
sql = `SELECT * FROM db.test`;
connection.execute(sql, {}, { outFormat: oracledb.OBJECT },
function (err, db) {
if (err) {
console.error(err.message);
connRelease(connection);
return;
}
// data update loop
connRelease(connection);
});
}
function connRelease(connection) {
connection.close(
function (err) {
if (err) {
console.error(err.message);
}
});
}
You should be using a connection pool. Connection pools have built-in logic to detect connections with issues and create new connections transparently. See this series on creating a REST API for more details: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Keep in mind that issues can still happen, so you have to handle errors as needed for your application.
Mostly you add listener on connection object and on dissociation or failure again create connection. With minor changes you can adopt this approach and use listeners to check if connection is available if not connect again. There could be several reason that results in connection closing better handle exceptions, check if still connected and reconnect in case of error.
Or you can try this NPM this will do reconnection for you
https://www.npmjs.com/package/oracledb-autoreconnect
Ping me if you need calcification.
var dbConfig = {
host: '----',
user: '----',
password: '----',
database: '----',
port: ----
};
var connection;
function handleDisconnect() {
connection = <obj>.getConnection(dbConfig);
// Recreate the connection, since the old one cannot be reused.
connection.connect( function onConnect(err) {
// The server is either down
if (err) {
// or restarting (takes a while sometimes).
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 10000);
// We introduce a delay before attempting to reconnect,
}
// to avoid a hot loop, and to allow our node script to
});
// process asynchronous requests in the meantime.
// If you're also serving http, display a 503 error.
connection.on('error', function onError(err) {
console.log('db error', err);
if (err.code == 'PROTOCOL_CONNECTION_LOST') {
handleDisconnect();
// lost due to either server restart, or a
} else {
// connnection idle timeout (the wait_timeout
throw err;
// server variable configures this)
}
});
}
handleDisconnect();
I am trying to implement a reconnect mechanism when the connection fails to the rabbitmq queue server.This code is only for consuming messages, Below is my code ( the channel Init function takes care of initialising the consumer and binding to the queue ).
connect() {
let conn = amqp.connect(queueConfig.QUEUE_SERVER_URL + "?heartbeat=60");
return conn;
}
createConnection(){
console.log("Trying to connect amqp");
let self = this;
self.connection = this.connect()
.then(function(connection){
console.log("[AMQP] connected");
connection.on("error",function(err){
if (err.message !== "Connection closing") {
console.error("[AMQP] conn error", err.message);
}
});
connection.on("close", function() {
console.error("[AMQP] reconnecting");
return setTimeout(createConnection, 1000);
});
return connection.createConfirmChannel();
})
.then(self.channelInit);
}
On connection failure I am successfully getting the prompt "[AMQP] reconnecting", but after that queue is not getting reconnected, no other prompts are coming in console log.
Please help.
You have a typo in your method. You need use something like setTimeout(createConnection, 1000); instead of your setTimeout(createConnection(), 1000);
i am trying top get socket.io working from the MeteorJS server. i am using the package from https://github.com/joncursi/socket-io-client, but i am unable to get it to work from the server and i cant figure out why.
i call the "connectToServer" method from the client, then this should contact the remote server and initialise the connection. when the code executes, i can see that the messages being logged, but nothing inside the "socket.on('connect',..." method. this suggests that it isn't making the connection. this is backed up by the fact that when i run code from the client side (using the the socket.io browser javascript code), i am able to connect without issues.
i have code on the server, whever there is an attempt to connect to the socket.io connection. this logged, with the client side code but not the server side code.
can anyone see what i may be doing wrong here?
Meteor.methods({
'connectToServer': function() {
socketioController();
}
});
function socketioController(){
var username = 'asd';
var password = 'asd';
console.log("trying to login with asd");
var socket = io('https://localhost:3001/');
console.log("socket variable set");
socket.on('connect', Meteor.bindEnvironment(function() {
console.log('Connected to the websocket!');
//Meteor.call('methodName1');
// on data event
socket.on('data-event', Meteor.bindEnvironment(function(data) {
console.log(data);
Meteor.call('methodName2');
}, function(e) {
throw e;
}));
// on disconnect
socket.on('disconnect', Meteor.bindEnvironment(function() {
console.log('Disconnected from the websocket!');
Meteor.call('methodName3');
}, function(e) {
throw e;
}));
}, function(e) {
throw e;
}));
}
I'm writing an app which catches tweets with a specific hashtag using the Twitter Streaming API. Each tweet caught must be inserted into my db and I need to perform another query to my db.
This is a not trending hashtag so let's assume there's around 400 tweets caught per hour. I currently open a new connection to my db each time I catch a tweet, process my queries then close the connection.
t.on('tweet', function(tweet) {
client.connect(function(error) {
if (error) {
console.error('Error while connecting to PostgreSQL DB: ', error);
}
else {
client.query('<MyQuery>',
<MyParameters>,
function (err, result) {
if (err) {
return console.error('error running query', err);
}
else {
client.query('<MyQuery>', function (err, result) {
client.end();
if (err) {
return console.error('error running count query', err);
}
else {
io.emit('infos', {infos: {}});
}
return console.log('Tweet added');
}
});
}
});
}
});
});
I post a tweet after running the server, the process is ok, but when posting another one something like 2 minutes later, I get the following output:
Tweet added
events.js:85
throw er; // Unhandled 'error' event
^
Error: write EPIPE
at exports._errnoException (util.js:746:11)
at WriteWrap.afterWrite (net.js:775:14)
Can you tell me what's the best way to have my app always listening to a new tweet and insert it into the DB? This way by creating a new connection each time (but I don't understand why it's crashing) or by opening a single connection at launch and maintain it opened during several weeks (I guess it's not the good option)?
I finally ended up by instantiating a whole new client each time rather than calling connect() and end() methods.
t.on('tweet', function(tweet) {
client = new pg.Client(conString);