Managing requests from Front-End with node-oracledb in backend - node.js

I am writing a new method that connects to the database, runs SQL queries and closes that connection every time a request is made from front-end. Connection to database is established and closed again and again for every query that I want to run. Does this impact the performance? Is this the right way or there is a better way to do this?
app.post('/register', (req, res) =>{
registerDb().then(resp => {
res.json({"message" : resp})})
.catch(err => {
console.log(err);
})
})
app.post('/signin', (req, res) => {
checkAuth(req.body).then(response => {
res.send(response);
})
.catch(err => {
console.log(err);
});
})
app.listen(4000);
async function registerDb() {
let conn;
try {
conn = await oracledb.getConnection(config)
let result = await conn.execute(
`INSERT INTO "User" VALUES (name, email, id, password, age)`,
);
console.log("Rows inserted: " + result.rowsAffected); // 1
console.log("ROWID of new row: " + result.lastRowid);
return result.rowsAffected;
} catch (err) {
console.log('Ouch!', err)
return err.message;
} finally {
if (conn) { // conn assignment worked, need to close
await conn.close()
}
}
}
async function checkAuth(data) {
let conn;
try {
conn = await oracledb.getConnection(config)
let result = await conn.execute(
`Select name
from "User"
where email = :email and password = :password`,
{
email : {val: data.email},
password: {val: data.password}
}
);
return result.rows;
} catch (err) {
console.log('Ouch!', err)
return err.message;
} finally {
if (conn) { // conn assignment worked, need to close
await conn.close()
}
}
}

Opening and closing connections to the DB impacts performance. A process needs to be started on the DB host, memory has to be allocated and initialized. And the reverse at connection close. Since these are new connections, they can't reuse some cached data for statement execution.
From the node-oracledb pooling documentation:
When applications use a lot of connections for short periods, Oracle recommends using a connection pool for efficiency.
Review that manual, and look at examples like webapp.js.
Make sure you increase UV_THREADPOOL_MAX for apps that have multiple connections open.

Related

close pool in mssql nodejs

I use MySQL in NodeJS to connect to the SQL Server. I use below code for get cities for example and I have a question, this is a correct way to close the connection after query has done?
I ask to be sure. thanks.
const getCities = async (req, resp) => {
try {
const { proId } = req.params;
await sql.connect(sqlConfig).then(async (pool) => {
return await pool
.request()
.input("proId", sql.BigInt, proId)
.execute("getCities")
.then((result) => {
resp.status(200).send(result.recordsets[0]);
})
.catch((err) => {
resp.status(500).send({ msg: serverError, err: err });
})
.finally(() => {
pool.close();
});
});
} catch (err) {
return resp.status(500).send({ msg: serverError, err: err });
}
};
because of this answer link we should not close pool after every request

Nodejs - Wrong usage of promises

I have the following async method:
alreadyLoaded: async function (id) {
const pool = await poolPromise;
return pool.request()
.input('idParameter', id)
.query('SELECT count(*) AS value FROM dbo.partidos WHERE id=#idParameter')
.then(result => {
console.log(result.recordset[0].value)
result.recordset[0].value > 0
}).catch(function(err) {
console.log(err.message);
});
Invoked in another one:
processMatches: function(payload) {
payload.matches.forEach(p => {
if(partidosRepository.alreadyLoaded(p.id))
{
console.log("El partido ya fue cargado.");
return;
}
The alreadyLoaded method checks if some record is already inserted on database, and it's invoked inside another method to validate. The problem here is that the processMatches method continues processing records before the alreadyLoaded finishs with the current one.
I'm having a wrong manage of promises here, Can anyone help me solve this out?
Here's the database connection:
const poolPromise = new sql.ConnectionPool(config)
.connect()
.then(pool => {
console.log('Connected to localhost '+ config.database +' database');
return pool;
})
.catch(err => console.log('Database connection failed. Error: ', err));
module.exports = {
sql, poolPromise
}
Why don't you build your if statement like this
if(await partidosRepository.alreadyLoaded(p.id))
also keep in mind you are not returning boolean value here
}).catch(function(err) {
console.log(err.message);
});

What is the difference between pool.query and client.query with the Node pg library?

I am trying to understand pool.query and client.query and the differences between the two and when to use which.
I have an express application that runs the following code at a certain end-point.
// I am just using params for to a quick setup
router.get("/:username/:password", (req, res) => {
const { username, password } = req.params;
// crypt & gen_salt function from pgcrypto
client // *** here ***
.query(
` SELECT username, email, password FROM users
WHERE users.username = $1
AND users.password = crypt($2, password)
`, [username, password]
)
.then(user => {
console.log("Users ==>", user.rows);
const userCount = user.rows.length;
if (userCount < 1) {
res.status(204).json({ userFound: "No User Found" });
} else {
res.status(200).json(user.rows[0]);
}
})
.catch(err => console.log(err));
});
// index.js (starting point of application)
require("dotenv").config({ debug: true });
const app = require("./middleware");
// Port Listener
app.listen(process.env.PORT, () =>
console.log(
"Server running on PORT <======= " + process.env.PORT + " =======>"
)
);
I hit the API endpoints up and Both ways return the exact same result, and I have read that using pooling over instantiating a new client instance is better, but I'm honestly not sure if that is true since Node should keep the same client instance because of the way it's being exported as it is beyond my current knowledge base. The readings I found on this topic were kind of sparse, so if any one has any articles they would recommend then I'd be happy to check them out as well.
I believe both are the same. But pool.query will allow you to execute a basic single query when you need to execute from a client that would be accessed from the pool of client threads.
So pool.query could be used to directly run the query rather than acquiring a client and then running the query with that client.
Acquiring Client from Pool
const { Pool } = require('pg')
const pool = new Pool()
pool.connect((err, client, release) => {
if (err) {
return console.error('Error acquiring client', err.stack)
}
client.query('SELECT NOW()', (err, result) => { // Default client query which is same as that used when connect to DB with one client.
release()
if (err) {
return console.error('Error executing query', err.stack)
}
console.log(result.rows)
})
})
Same as above except without pool
const { Client } = require('pg').Client
const client = new Client()
client.connect()
client.query('SELECT NOW()', (err, res) => {
if (err) throw err
console.log(res)
client.end()
})
Directly calling query on pool
const { Pool } = require('pg')
const pool = new Pool()
// Direct query without acquiring client object.
pool.query('SELECT $1::text as name', ['brianc'], (err, result) => {
if (err) {
return console.error('Error executing query', err.stack)
}
console.log(result.rows[0].name) // brianc
})

Route returning empty array even though inserting into db seems to be working

I'm learning how to use Sqlite3 with Node, and I'm running into a strange issue. In componentWillMount() on my react front end's main App.js, I make an axios request to the route /all so I can populate a contact list.
What's weird is that, when I hit my other route, /add with a different axios request when I add a contact, it reaches my then() as such,
axios
.post('/add', contactData)
.then(res =>
console.log(`Contact ${contactData.name} added successfully`)
)
.catch(err => console.log('Error encountered: ', err));
With a slight delay too, because I setState before making my axios request, which makes me think that the contact is added into the contacts table.
But when I access localhost:5000/all directly, I receive an empty array [] as the response. I'm not sure what's going on.
Here's my server.js
const express = require('express');
const sqlite3 = require('sqlite3');
const path = require('path');
const cors = require('cors');
const dbName = 'my.db';
const tableName = 'Contacts';
const dbPath = path.resolve(__dirname, dbName);
const app = express();
const port = process.env.PORT || 5000;
app.use(cors());
app.listen(port, () => console.log(`Server running on port ${port}`));
app.get('/all', (req, res) => {
let db = new sqlite3.Database(dbPath);
let sql = `SELECT number FROM ${tableName}`;
db.run(
`CREATE TABLE IF NOT EXISTS ${tableName}(name text, number text, address text)`
);
db.all(sql, [], (err, rows) => {
if (err) {
return res.status(500).json(err);
} else {
return res.json(rows);
}
});
});
app.post('/add', (req, res) => {
let db = new sqlite3.Database(dbPath);
db.run(
`INSERT INTO ${tableName}(name, number, address) VALUES(${req.name},${
req.number
},${req.address})`,
[],
err => {
if (err) return res.status(500).json(err);
}
);
return res.json({ msg: 'success' });
});
Edit:
I should note that when I navigate to /all I get this,
and when I try to post to /add, I get the error
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
No where am I sending multiple responses though.
I would not init your db and create your table every time you hit /all.
Try this:
// get this out of the `/all` route, no need to initialize the db object over and over again
let db = new sqlite3.Database(dbPath);
// also leave this outside the `/all` route:
// no need to create the table over and over again.
db.run(
`CREATE TABLE IF NOT EXISTS ${tableName}(name text, number text, address text)`
);
app.get('/all', (req, res) => {
let sql = `SELECT number FROM ${tableName}`;
// according to the sqlite3 api, the second parameter is optional, so just leave it out:
db.all(sql, (err, rows) => {
if (err) return res.status(500).json(err); // if you use return, you don't need 'else' because the code will never reach it.
res.json(rows)
});
});
Your /add route also looks a bit off. Try this:
app.post('/add', (req, res) => {
// let db = new sqlite3.Database(dbPath); // remove this as you already defined it at the beginning of your code.
db.run(
`INSERT INTO ${tableName}(name, number, address) VALUES(${req.name},${req.number},${req.address})`,
err => {
if (err) return res.status(500).json(err);
res.json({ msg: 'success' }); // put the success in the callback (after the query is run!), else, when you get an error, express.js will try to send an error message AND a success message giving you the error "Can't send headers after they are sent"
}
);
});
You can fix this issue Using Async-Await in Node.js.
JavaScript is asynchronous in nature and so is Node. Asynchronous programming is a design pattern which ensures the non-blocking code execution.
The non-blocking code does not prevent the execution of a piece of code. In general, if we execute in Synchronous manner i.e one after another we unnecessarily stop the execution of those codes which is not depended on the one you are executing.
Asynchronous does exactly opposite, the asynchronous code executes without having any dependency and no order. This improves system efficiency and throughput.
But in some case, we need to wait for the response.
app.get('/all',async (req, res) => {
let db = new sqlite3.Database(dbPath);
let sql = `SELECT number FROM ${tableName}`;
await db.run(
`CREATE TABLE IF NOT EXISTS ${tableName}(name text, number text, address text)`
);
await db.all(sql, [], (err, rows) => {
if (err) {
return res.status(500).json(err);
} else {
return res.json(rows);
}
});
});
app.post('/add',async (req, res) => {
let db = new sqlite3.Database(dbPath);
await db.run(
`INSERT INTO ${tableName}(name, number, address) VALUES(${req.name},${
req.number
},${req.address})`,
[],
err => {
if (err) return res.status(500).json(err);
}
);
return res.json({ msg: 'success' });
})
await db.all(sql, [], async (err, rows) => {
if (err) {
await return res.status(500).json(err);
} else {
await return res.json(rows);
}
});

Mongo makes new connection on each page refresh

When I load my page I want some products to be shown, so I make a GET request and it retrieves them from the database. However, when I refresh the page I notice the old connection remains. How to make sure the old connections close?
Here's my code:
const MongoClient = require('mongodb').MongoClient;
const connection = (closure) => {
return MongoClient.connect(config.connectionString, (err, client) => {
if (err) {
return winston.log('error', now() + err);
}
closure(client);
});
};
...
router.get('/products', (req, res) => {
connection((client) => {
client.db('dbname').collection('collectionname')
.find({})
.toArray()
.then((products) => {
response.data = products;
response.message = "Products retrieved successfully!"
res.json(response);
})
.catch((err) => {
winston.log('error', now() + err);
sendError(err, res);
});
});
});
Well, each time your /products route is called, you do create a new MongoClient instance. In that extent to limit the number of connection to your Database, you may either connect once, and save your MongoClient instance:
let client = undefined;
const connection = (closure) => {
// Return the client if any...
if(client) return closure(client);
return MongoClient.connect(config.connectionString, (err, c) => {
if (err) {
return winston.log('error', now() + err);
}
// Save the client.
client = c;
closure(client);
});
};
...or simply close the MongoClient connection you instantiated once you're done with it:
router.get('/products', (req, res) => {
connection((client) => {
client.db('dbname').collection('collectionname')
.find({})
.toArray()
.then((products) => {
response.data = products;
response.message = "Products retrieved successfully!"
// Close the MongoClient...
client.close();
res.json(response);
})
.catch((err) => {
winston.log('error', now() + err);
sendError(err, res);
// Close the MongoClient...
client.close();
});
});
});
I would advise you to go with the first solution: The MongoClient maintains a connection pool, so having multiple clients does not have any advantages. In addition, it allows you to check whether or not the DB is remotely available, before executing anything else as well (just connect to the DB on your app init(), and save the client instance, and you'll be done).

Resources