amqp.connect is not able to maintain connection alive forever - node.js

My code is as shown below :
rabbitmq.js
const connectRabbitMq = () => {
amqp.connect(process.env.CLOUDAMQP_MQTT_URL, function (err, conn) {
if (err) {
console.error(err);
console.log('[AMQP] reconnecting in 1s');
setTimeout(connectRabbitMq, 1);
return;
}
conn.createChannel((err, ch) => {
if (!err) {
console.log('Channel created');
channel = ch;
connection = conn;
}
});
conn.on("error", function (err) {
if (err.message !== "Connection closing") {
console.error("[AMQP] conn error", err.message);
}
});
conn.on("close", function () {
console.error("[AMQP] reconnecting");
connectRabbitMq();
});
})
};
const sendMessage = () => {
let data = {
user_id: 1,
test_id: 2
};
if (channel) {
channel.sendToQueue(q, new Buffer(JSON.stringify(data)), {
persistent: true
});
}
else {
connectRabbitMq(() => {
channel.sendToQueue(q, new Buffer(JSON.stringify(data)), {
persistent: true
});
})
}
};
const receiveMessage = () => {
if (channel) {
channel.consume(q, function (msg) {
// ch.ack(msg);
console.log(" [x] Received %s", msg.content.toString());
});
}
else {
connectRabbitMq(() => {
channel.consume(q, function (msg) {
// ch.ack(msg);
console.log(" [x] Received %s", msg.content.toString());
});
})
}
}
scheduler.js
let cron = require('node-cron');
const callMethodForeverRabbitMq = () => {
cron.schedule('*/1 * * * * *', function () {
rabbitMqClientPipeline.receiveMessage();
});
};
app.js
rabbitmq.sendMessage();
now what happens here is , the code is not able to maintain the connection alive forever . so is there any way I can keep it alive forever ?

I am not sure if you are using Promise api or callback API.
With Promise API you can do it like this:
const amqp = require('amqplib');
const delay = (ms) => new Promise((resolve => setTimeout(resolve, ms)));
const connectRabbitMq = () => amqp.connect('amqp://127.0.0.1:5672')
.then((conn) => {
conn.on('error', function (err) {
if (err.message !== 'Connection closing') {
console.error('[AMQP] conn error', err.message);
}
});
conn.on('close', function () {
console.error('[AMQP] reconnecting');
connectRabbitMq();
});
//connection = conn;
return conn.createChannel();
})
.then(ch => {
console.log('Channel created');
//channel = ch;
})
.catch((error) => {
console.error(error);
console.log('[AMQP] reconnecting in 1s');
return delay(1000).then(() => connectRabbitMq())
});
connectRabbitMq();
With callback API like this:
const amqp = require('amqplib/callback_api');
const connectRabbitMq = () => {
amqp.connect('amqp://127.0.0.1:5672', function (err, conn) {
if (err) {
console.error(err);
console.log('[AMQP] reconnecting in 1s');
setTimeout(connectRabbitMq, 1000);
return;
}
conn.createChannel((err, ch) => {
if (!err) {
console.log('Channel created');
//channel = ch;
//connection = conn;
}
});
conn.on("error", function (err) {
if (err.message !== "Connection closing") {
console.error("[AMQP] conn error", err.message);
}
});
conn.on("close", function () {
console.error("[AMQP] reconnecting");
connectRabbitMq();
});
})
};
connectRabbitMq();
UPDATE new code with request buffering
const buffer = [];
let connection = null;
let channel = null;
const connectRabbitMq = () => {
amqp.connect('amqp://127.0.0.1:5672', function (err, conn) {
if (err) {
console.error(err);
console.log('[AMQP] reconnecting in 1s');
setTimeout(connectRabbitMq, 1000);
return;
}
conn.createChannel((err, ch) => {
if (!err) {
console.log('Channel created');
channel = ch;
connection = conn;
while (buffer.length > 0) {
const request = buffer.pop();
request();
}
}
});
conn.once("error", function (err) {
channel = null;
connection = null;
if (err.message !== "Connection closing") {
console.error("[AMQP] conn error", err.message);
}
});
conn.once("close", function () {
channel = null;
connection = null;
console.error("[AMQP] reconnecting");
connectRabbitMq();
});
})
};
const sendMessage = () => {
let data = {
user_id: 1,
test_id: 2
};
if (channel) {
channel.sendToQueue(q, new Buffer(JSON.stringify(data)), {
persistent: true
});
}
else {
buffer.push(() => {
channel.sendToQueue(q, new Buffer(JSON.stringify(data)), {
persistent: true
});
});
}
};
const receiveMessage = () => {
if (channel) {
channel.consume(q, function (msg) {
// ch.ack(msg);
console.log(" [x] Received %s", msg.content.toString());
});
}
else {
buffer.push(() => {
channel.consume(q, function (msg) {
// ch.ack(msg);
console.log(" [x] Received %s", msg.content.toString());
});
})
}
};
There are edge cases where this code won't work - for example it won't reestablish queue.consume unless it's called explicitly. But overall this hopefully gives you idea on how to implement proper recovery...

Related

function, that returns Promise

I have a task with a promise and I don't understand how to do it.please help
1.Imports function "func1" from file "script1.js";
const func1 = a => {
switch (a) {
case 'a':
return new Promise((resolve) => {
setTimeout(() => {
resolve('result1');
}, 100);
});
case 'b':
return new Promise((resolve) => {
setTimeout(() => {
resolve('result2');
}, 100);
});
default:
return new Promise((resolve) => {
setTimeout(() => {
resolve('result3');
}, 100);
});
};
};
module.exports = func1;
Reads a string from the "input.txt";
input a
Calls "func1" with an argument equal to the string;
Waits until the received Promise has state: "fulfilled" and then outputs the result to the file "output.txt".
this is how i try to solve but nothing works:
const fs = require('fs')
const func1 =require ("./script1 (1)")
fs.readFile('./input.txt', 'utf8' , (err, data) => {
if (err) {
console.error(err)
return
}
console.log(data)
async function one (data) {
try {
const result = await Promise(func1);
console.log(result);
} catch (err) {
console.log(err)
}}
fs.writeFile("output.txt",one().toString(), function(err)
{
if (err)
{
return console.error(err);
}
})
})
the result must be "result1"
To call a Promise you can use async await and try catch blocks. Once you have the asynchronous result value, you can call fs.writeFile(). Try this:
func1.js:
const func1 = a => {
switch (a) {
case 'a': return new Promise((resolve) => {
setTimeout(() => { resolve('result1'); }, 100);
});
case 'b': return new Promise((resolve) => {
setTimeout(() => { resolve('result2'); }, 100);
});
default: return new Promise((resolve) => {
setTimeout(() => { resolve('result3'); }, 100);
});
};
};
module.exports = func1;
index.js:
const fs = require('fs');
const func1 = require("./demo2.js")
fs.readFile('./input.txt', 'utf8' , async (err, data) => {
if (err) {
console.error(err);
return;
}
//console.log(data)
try {
const result = await func1(data);
console.log(result);
fs.writeFile("output.txt", result, function(err) {
if (err){
return console.error(err);
}
});
} catch (err) {
console.log(err)
}
});
the async/await way is to change const result = await Promise(func1); to const result = await func1(data); you can also use then like this const result = func1(data).then(res => res);
and a better func1 would be
const func1 = a => {
return new Promise((resolve) => {
setTimeout(() => {
switch(a) { // handle cases };
});
});
};
module.exports = func1;
const func1 = a => {
return new Promise((resolve) => {
switch (a) {
case 'a':
setTimeout(() => {
resolve('result1');
}, 100);
case 'b':
setTimeout(() => {
resolve('result2');
}, 100);
default:
setTimeout(() => {
resolve('result3');
}, 100);
};
})
};
module.exports = func1;
const fs = require('fs')
const func1 =require ("./script1 (1)")
fs.readFile("./input.txt", "utf8", (err, data) => {
if (err) {
console.error(err);
return;
}
console.log(data);
async function one(data) {
try {
func1(data).then(result => {
console.log(result)
})
} catch (err) {
console.log(err);
}
}
fs.writeFile("output.txt", one().toString(), function (err) {
if (err) {
return console.error(err);
}
});
});

NodeJS generic-pool how to set request timeout?

I'm new to using the generic-pool. I see that there is a maxWaitingClients setting, but it doesn't specify for how long a request can stay in the queue before it times out. Is there any way for me to specify such a timeout setting?
EDIT: Added my code for how I'm currently using generic-pool:
function createPool() {
const factory = {
create: function() {
return new Promise((resolve, reject) => {
const socket = new net.Socket();
socket.connect({
host: sdkAddress,
port: sdkPort,
});
socket.setKeepAlive(true);
socket.on('connect', () => {
resolve(socket);
});
socket.on('error', error => {
reject(error);
});
socket.on('close', hadError => {
console.log(`socket closed: ${hadError}`);
});
});
},
destroy: function(socket) {
return new Promise((resolve) => {
socket.destroy();
resolve();
});
},
validate: function (socket) {
return new Promise((resolve) => {
if (socket.destroyed || !socket.readable || !socket.writable) {
return resolve(false);
} else {
return resolve(true);
}
});
}
};
return genericPool.createPool(factory, {
max: poolMax,
min: poolMin,
maxWaitingClients: poolQueue,
testOnBorrow: true
});
}
const pool = createPool();
async function processPendingBlocks(ProcessingMap, channelid, configPath) {
setTimeout(async () => {
let nextBlockNumber = fs.readFileSync(configPath, "utf8");
let processBlock;
do {
processBlock = ProcessingMap.get(channelid, nextBlockNumber);
if (processBlock == undefined) {
break;
}
try {
const sock = await pool.acquire();
await blockProcessing.processBlockEvent(channelid, processBlock, sock, configPath, folderLog);
await pool.release(sock);
} catch (error) {
console.error(`Failed to process block: ${error}`);
}
ProcessingMap.remove(channelid, nextBlockNumber);
fs.writeFileSync(configPath, parseInt(nextBlockNumber, 10) + 1);
nextBlockNumber = fs.readFileSync(configPath, "utf8");
} while (true);
processPendingBlocks(ProcessingMap, channelid, configPath);
}, blockProcessInterval)
}
Since you are using pool.acquire(), you would use the option acquireTimeoutMillis to set a timeout for how long .acquire() will wait for an available resource from the pool before timing out.
You would presumably add that option here:
return genericPool.createPool(factory, {
max: poolMax,
min: poolMin,
maxWaitingClients: poolQueue,
testOnBorrow: true,
acquireTimeoutMillis, 3000 // max time to wait for an available resource
});

Nodejs Multiple pools created on refresh

I have several DBs for which i am using connection pools in node.js. Every time i refresh page i think pools are created again. i refresh page 3 times and 3 times promises resolved. i have removed several databases just to make it little bit easier to read here.
and if i un-comment connection close line my app crashes. i can't seem to figure out why
const config = require("../config/config");
const oracledb = require("oracledb");
var crm1connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.crm1.user,
password: config.crm1.password,
connectString: config.crm1.connectString,
poolAlias: config.crm1.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("CRM1 Promise resolved")
});
});
var query2connPromise = new Promise((resolve, reject) => {
oracledb.createPool({
user: config.query2.user,
password: config.query2.password,
connectString: config.query2.connectString,
poolAlias: config.query2.poolAlias,
poolMin: 0,
poolMax: 10,
poolTimeout: 300
}, (error, pool) => {
if (error) {
reject(err);
}
resolve("QUERY2 Promise resolved --------")
});
});
var promiseArray = [crm1connPromise, crm2connPromise, crm3connPromise, crm4connPromise, csfp1connPromise, csfp2connPromise, csfp3connPromise, csfp4connPromise, cact1connPromise, cact2connPromise, cact3connPromise, cact4connPromise, cospconnPromise, cchnconnPromise, bbaseconnPromise, bcdrconnPromise, vcdbconnPromise, crptconnPromise, query2connPromise];
function getDBConnection (dbname) {
return new Promise((resolve, reject) => {
try {
Promise.all(promiseArray).then((message) => {
console.log(message);
const pool = oracledb.getPool(dbname);
pool.getConnection( (err, connection) => {
if (err) {
reject(err);
console.log(err);
}
resolve(connection);
});
});
} catch (error) {
reject(error);
}
});
}
module.exports.query = function(dbname, sql, bind = []){
return new Promise ((resolve,reject) =>{
var conn
try {
getDBConnection(dbname).then((connection) =>{
connection.execute(sql,bind,(err,result)=>{
if (err){
reject(err);
}
resolve(result);
})
//connection.close(0);
})
} catch (error) {
reject(error);
}
})
}
you can use 'Singleton'
please google 'Singleton pattern' and examples.
like this:
dataBaseManager.js:
'use strict'
var Singleton = (function () {
var instance;
function createInstance() {
var object = new dataBaseManager();
return object;
}
return {
getInstance: function () {
if (!instance) {
instance = createInstance();
}
return instance;
}
};
})();
function dataBaseManager() {
this.connected = false;
this.client = null;
this.dataBase = null;
//public methods
this.connect = function () {
try {
your_database.connect({}, (err, client) => {
if (err) {
this.connected = false;
this.client = null;
this.dataBase = null;
return;
}
this.connected = true;
this.client = client;
this.dataBase = client.db();
});
} catch (error) {
}
};
this.disconnect = function () {
try {
if (this.client) {
this.client.close();
this.connected = false;
this.client = null;
this.dataBase = null;
}
} catch (error) {
}
}
}
module.exports = Singleton;
repository.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
your_get_dample_data_from_data_base_func = function (data) {
dataBaseManager.dataBase
.find({})
.toArray(function (err, result) {
if (err) {
return callback(err, null);
}
callback(null, result);
});
};
index.js:
const dataBaseManager = require("./dataBaseManager").getInstance();
function connect() {
dataBaseManager.connect();
}
function disconnect() {
dataBaseManager.disconnect();
}
Look at the node-oracledb example webappawait.js which starts the pool outside the web listener code path.
async function init() {
try {
await oracledb.createPool({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString
});
const server = http.createServer();
server.on('error', (err) => {
console.log('HTTP server problem: ' + err);
});
server.on('request', (request, response) => {
handleRequest(request, response);
});
await server.listen(httpPort);
console.log("Server is running at http://localhost:" + httpPort);
} catch (err) {
console.error("init() error: " + err.message);
}
}
async function handleRequest(request, response) {
. . .
}

Streaming data from oracle db to browser with node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

Node.js tcp connection

How to send "End" message after resolving promise? Sometimes I can send 2 "end" messages out of 4, sometimes 3. Files from FTP are being downloaded and it's ok. The only thing that doesn't work is sending "end" message after downloading a file. Do you have any idea why this code doesn't work properly?
This code was updated:
const ftp = require("jsftp");
const fs = require("fs");
const net = require("net");
const mkdirp = require("mkdirp");
class ftpCredentials {
constructor(host) {
this.user = "xxx";
this.pass = "xxx";
this.host = host;
}
}
const downloadFromFTP = (credentials, file) => {
const client = new ftpCredentials(credentials.host);
const ftpClient = new ftp(client);
return new Promise((res, rej) => {
let buf = null;
ftpClient.get(file, (err, stream) => {
if (!err && typeof stream !== "undefined") {
// Events
stream.on("data", (data) => {
if (buf === null) buf = new Buffer(data);
else buf = Buffer.concat([buf, data]);
});
stream.on("close", (err) => {
if (err) rej("FILE_ERROR");
const actualPath = `${credentials.path}/${file}`;
fs.writeFile(actualPath, buf, "binary", (err) => {
if (err) rej(err);
ftpClient.raw("quit", (err, data) => {
if (err) rej(err)
res(file);
});
});
});
// Resume stream
stream.resume();
} else {
rej("STREAM_ERROR");
}
});
})
}
const handleSavingFile = (credentials, filesOnFTP) => {
mkdirp(credentials.path, () => {
fs.readdir(credentials.path, (err, fileNames) => {
if (err) return err;
const needToConnectToFTP = filesOnFTP.filter(name => fileNames.indexOf(name) !== -1).length === 0;
const socketForEndMsg = net.createConnection(18005, credentials.host, () => {
Promise.all(filesOnFTP.map((file) => {
return new Promise((resolve, reject) => {
// The problem is here:
const socketWrite = socketForEndMsg.write(`End|ftp://${credentials.host}/${file}`, "UTF16LE");
resolve(socketWrite);
// Events
socketForEndMsg.on("error", () => {
console.log("Problem with sending End message!");
reject();
});
});
})).then(() => {
socketForEndMsg.end();
}).catch((err) => {
console.log(err);
});
});
})
})
}
const getScheme = (credentials) => {
const socketForData = net.createConnection(18005, credentials.host, () => socketForData.write("Scheme", "UTF16LE"));
// Events
socketForData.on("close", () => console.log("TCP Connection closed"));
socketForData.on("error", err => console.log(err));
socketForData.on("data", (data) => {
socketForData.end();
const toUTF16Format = Buffer.from(data).toString("UTF16LE");
const arrayFromTCPMessage = toUTF16Format.split(/\||;/);
const filteredImages = arrayFromTCPMessage.filter(item => item.startsWith("scheme"))
const isOK = arrayFromTCPMessage[0] === "OK";
if (isOK) {
handleSavingFile(credentials, filteredImages);
}
})
}
module.exports = getScheme;
Error message: Error: This socket is closed
at Socket._writeGeneric (net.js:722:18)
at Socket._write (net.js:782:8)
at doWrite (_stream_writable.js:407:12)
at writeOrBuffer (_stream_writable.js:393:5)
at Socket.Writable.write (_stream_writable.js:290:11)
at Promise (xxx\getScheme.js:56:29)
at new Promise (<anonymous>)
at Promise.all.filesOnFTP.map (xxx\getScheme.js:54:18)
at Array.map (<anonymous>)
at Socket.net.createConnection (xxx\getScheme.js:52:32)
I see that, you like to listen to error event & that made you to use Promise to catch the error. But, the placement of the error event handler registration is wrong, as it is inside .map function call. So, error event will be registered number of times of filesOnFTP length.
I've moved that error handler to next line & using writable flag to see, if the socket is still writable before writing to it. I have also added few more event handlers, which will give you more information about the socket status(for debugging, you can remove them later).
const handleSavingFile = (credentials, filesOnFTP) => {
mkdirp(credentials.path, () => {
fs.readdir(credentials.path, (err, fileNames) => {
if (err) return err;
const needToConnectToFTP = filesOnFTP.filter(name => fileNames.indexOf(name) !== -1).length === 0;
const socketForEndMsg = net.createConnection(18005, credentials.host, () => {
for(let file of filesOnFTP) {
// Before write to socket, check if it is writable still!
if(socketForEndMsg.writable) {
socketForEndMsg.write(`End|ftp://${credentials.host}/${file}`, "UTF16LE");
}
else {
console.log('Socket is not writable! May be closed already?');
}
}
});
// This is the correct place for the error handler!
socketForEndMsg.on("error", (error) => {
console.log("Problem with sending End message!", error);
});
socketForEndMsg.on("close", () => {
console.log("Socket is fully closed!");
});
socketForEndMsg.on("end", () => {
console.log("The other end of the socket has sent FIN packet!");
});
});
});
}
Let me know if this works!
Thanks!
You can try to wait for connection event on socketForEndMsg and then start sending your data
const handleSavingFile = (credentials, filesOnFTP) => {
mkdirp(credentials.path, () => {
fs.readdir(credentials.path, (err, fileNames) => {
if (err) return err;
const needToConnectToFTP = filesOnFTP.filter(name => fileNames.indexOf(name) !== -1).length === 0;
const socketForEndMsg = net.createConnection(18005, credentials.host);
socketForEndMsg.on('connect', () => {
Promise.all(filesOnFTP.map((file) => {
return new Promise((resolve, reject) => {
// The problem is here:
const socketWrite = socketForEndMsg.write(`End|ftp://${credentials.host}/${file}`, "UTF16LE");
resolve(socketWrite);
// Events
socketForEndMsg.on("error", () => {
console.log("Problem with sending End message!");
reject();
});
});
})).then(() => {
socketForEndMsg.end();
}).catch((err) => {
console.log(err);
});
})
})
})
}

Resources