Multi-database private REST API in Node.js - node.js

I'm looking to set up a REST API for a number of my apps to consume & publish data to/from Mongodb Atlas. I have a number of organisations as customers, and have a number of different contracts (to build information models) with each customer, each of which must be kept in separate databases (for various reasons I shalln't go into here). Its basically an information model management system (lets call it IMMS) where every model has the same schema.
In Atlas I'm modelling this as:
Organisations - one to represent the system (IMMS).
Projects - one per Customer Organisation.
Clusters - generally just one per project, but could be more in some exceptions.
Databases - one per contract (model).
Collections - 5 per db, common across the whole IMMS.
The client app which my customers use allows them to select a file to open, representing a specific database, which contains the (private) connection details for connecting to the relevant cluster & db. The app will bake the relevant cluster & db values into the API requests when it posts or gets data (post & get are all we use, there's no updating or deleting required).
I intend to host this API on a publicly accessible API, but have it set up to only respond to my authorised user(s) using my client app(s).
My initial stab at prototyping such a service on Node can be seen below (note: I'm a total node newbie):
//app.js
const Express = require("express");
const BodyParser = require("body-parser");
const Mongo = require("mongodb");
const port = process.env.PORT || 5000;
const URL_TEMPLATE = "mongodb+srv://writer:writer#<db-cluster>.mongodb.net/test?retryWrites=true&w=majority";
var app = Express();
app.use(BodyParser.json());
app.use(BodyParser.urlencoded({ extended: true }));
app.post("/:cluster/:db/party", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").insertOne(request.body, (error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result.result);
});
});
});
app.get("/:cluster/:db/party", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").find({}).toArray((error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result);
});
});
});
app.get("/:cluster/:db/party/:id", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").findOne({ "_id": new Mongo.ObjectId(request.params.id) }, (error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result);
});
});
});
app.listen(port, () => {
console.log(`Server running at http://localhost:${port}`);
});
I have a few concerns which I'd be grateful for advice / feedback on:
Most examples I've seen open a single mongo connection in the app.listen function & appear to hold it open indefinitely. My approach needs to switch between clusters & databases on each request (and indeed does, above). Is this a good or bad idea, from a performance & reliability perspective?
I've put the :cluster & :db identifiers in each API route, but it will be common to all requests. Should this be baked into the header instead, and/or is there a neater way of doing this?
With the code above, I can see I'm getting quite a lot of repeated code on each route. Any ideas for quick / easy refactorings I could do to incorporate a function along the lines of GetCollection(request)?
I'm toying with having my GET routes allow a mongo query object to be passed (instead of the plain old :id selector route), which would permit a lot of flexibility to my end-client app(s). Again, does that sound like a good / bad idea? I see mongo query anywhere does something similar & handles user roles/permissions (though my apps deal with user permissions etc so I'm not sure I'd benefit from stitch & query anywhere)?
Any help / advice on any of the above would be much appreciated. Thanks. :)

I've now come to a solution on this after investigating / researching / prototyping further myself, so am publishing it here for any others with similar queries...
Mongoose's createConnection() method was what I was after. I ended up using the default connection mongoose.connect() for my main system database (managing users, organisations, invoices etc), and then created a database service class to provide connections to multiple other databases on a request by request basis. Something along th lines of the following:
"use strict";
const mongoose = require('mongoose');
const options = { useNewUrlParser: true, useUnifiedTopology: true, useCreateIndex: true, bufferCommands: false };
module.exports = async function ( connectString ) {
let conn;
let models;
try
{
if( !conn )
{
conn = await mongoose.createConnection( connectString, options );
conn.model( 'Current', require('./model') );
conn.model( 'History', require('./history') );
console.log( 'Connected: ' + connectString );
}
if( !models )
{
models = {
Current: conn.model( 'Current' ),
History: conn.model( 'History' )
};
console.log( 'Models compiled: Current, History' );
}
return models;
}
catch ( err )
{
throw err;
}
};
That would allow the server API to manage the model connections, however I'm considering delegating the model db connections to the front-end clients, allowing them to connect directly by connection string.
Anyhow, I hope the above is useful.

Related

Should I open/close database connection after every query/insert?

Hey everyone i'm developer a simple app in last days using nodejs and create this function to return client instance from mongodb
const mongodb = require("mongodb");
const { db } = require("../config/env");
const conection = async () => {
try {
const client = await mongodb.MongoClient.connect(db.uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
return client;
} catch (error) {
throw error;
}
};
module.exports = conection;
and i make this simple function for acess data layer and return records instered
const index = async ({ limit = 10, offset = 0, filter = {} }) => {
const client = await conection();
if (filter._id) {
filter._id = mongodb.ObjectID(filter._id);
}
try {
const collection = client.db("api").collection("user");
const data = await collection
.find({ ...filter })
.skip(offset)
.limit(limit)
.toArray();
return data;
} catch (error) {
throw new Error(error);
} finally {
await client.close();
}
};
I would like to know if I really need to make the connection and close it with each query or should I keep the connection open
NOTE: in this case I am using a simple Atlas cluster (free) but I would like to know if I should do this also when working with sql banks like postgres
Don't close your connection unless you are exiting your app, and then make sure you do. Ensure that you are using the same connection when you come back to do more I/O. Database connections are resource-intensive and should be established as few times as possible and shared as much as possible. You can also get middleware problems with connections like ODBC if you don't pick up existing connections and get weird errors like connection pools running out. Get to know your connector and how to use it most effectively, it will be a rewarding activity :-)
You can use the mongoose module for managing MongoDB.
Installation
npm install mongoose
Usage
mongoose.connect('mongodb://localhost/test', { useNewUrlParser: true });
I am sure that Mongoose can help you solve your issues.
It's a good practice to do so. so that after every operation(insert etc) you close the connection and before every operation(insert etc) you reopen the connection.

Node.js mongoose unable to 'save()' a document on MongoDB database

My problem is quite peculiar, as I believe to have done everything "by the book".
I'm able to successfully connect to a MongoDB cluster I've created through MongoDB Atlas. When I make a 'POST' request to save a choice that was picked from an array of choices, I successfully create a document through the Model specified below. I then try to save that document to MongoDB by calling the 'save()' method, but it hangs and nothing comes out of it (even if I use a 'catch' to see if any errors occurred).
I'm completely lost as to what is wrong, and what I need to do to solve this. I was hoping you could give me some pointers.
MongoDB connection, schema, and model:
const mongoose = require('mongoose');
const URL = process.env.MONGODB_URL;
mongoose.connect(URL, { useNewUrlParser: true, useUnifiedTopology: true })
.then(() => {
console.log('Successfully connected to our MongoDB database.');
}).catch((error) => {
console.log('Could not connect to our MongoDB database.', error.message);
});
const choicesMadeSchema = new mongoose.Schema({
allChoices: Array,
pickedChoice: String
});
const ChoiceMade = mongoose.model('ChoiceMade', choicesMadeSchema);
module.exports = ChoiceMade; // Exports our 'ChoiceMade' constructor, to be used by other modules.
index.js:
/* 1 - Setting things up */
require('dotenv').config();
const express = require('express');
const server = express();
const PORT = process.env.PORT;
const parserOfRequestBody = require('body-parser');
server.use(parserOfRequestBody.json());
/* 2 - Retrieving all the data we need from our 'MongoDB' database */
// Imports the 'mongoose' library, which will allow us to easily interact with our 'MongoDB' database.
const mongoose = require('mongoose');
// Imports our 'ChoiceMade' constructor.
const ChoiceMade = require('./database/database.js');
// Will hold the five latest choices that have been made (and thus saved on our 'MongoDB' database).
let fiveLatestChoicesMade;
// Retrieves the five latest choices that have been made (and thus saved on our 'MongoDB' database).
ChoiceMade.find({}).then((allChoicesEverMade) => {
const allChoicesEverMadeArray = allChoicesEverMade.map((choiceMade) => {
return choiceMade.toJSON();
});
fiveLatestChoicesMade = allChoicesEverMadeArray.slice(allChoicesEverMadeArray.length - 5).reverse();
console.log("These are the five latest choices that have been made:", fiveLatestChoicesMade);
mongoose.connection.close();
});
/* 3 - How the server should handle requests */
// 'GET' (i.e., 'retrieve') requests
server.get('/allChoicesMade', (request, response) => {
console.log("This is the data that will be sent as a response to the 'GET' request:", fiveLatestChoicesMade);
response.json(fiveLatestChoicesMade);
});
// 'POST' (i.e., 'send') requests
server.post('/allChoicesMade', (request, response) => {
const newChoiceMadeData = request.body;
if (Object.keys(newChoiceMadeData).length === 0) {
return response.status(400).json({ error: "No data was provided." });
}
const newChoiceMade = new ChoiceMade({
allChoices: newChoiceMadeData.allChoices,
pickedChoice: newChoiceMadeData.pickedChoice
});
console.log("This is the new 'choice made' entry that we are going to save on our 'MongoDB' database:", newChoiceMade); // All good until here
newChoiceMade.save().then((savedChoiceMade) => {
console.log('The choice that was made has been saved!');
response.json(savedChoiceMade);
mongoose.connection.close();
}).catch((error) => {
console.log('An error occurred:', error);
});
});
/* 4 - Telling the server to 'listen' for requests */
server.listen(PORT, () => {
console.log("Our 'Express' server is running, and listening for requests made to port '" + PORT + "'.");
});
SOLUTION TO THE PROBLEM
In my code's section 2, I was mistakenly closing the connection upon retrieving all the data I need to make my app work. I was doing this (...)
// Retrieves the five latest choices that have been made (and thus saved on our 'MongoDB' database).
ChoiceMade.find({}).then((allChoicesEverMade) => {
const allChoicesEverMadeArray = allChoicesEverMade.map((choiceMade) => {
return choiceMade.toJSON();
});
fiveLatestChoicesMade = allChoicesEverMadeArray.slice(allChoicesEverMadeArray.length - 5).reverse();
console.log("These are the five latest choices that have been made:", fiveLatestChoicesMade);
mongoose.connection.close(); // This should not be here!!!
});
(...) when I should be doing
// Retrieves the five latest choices that have been made (and thus saved on our 'MongoDB' database).
ChoiceMade.find({}).then((allChoicesEverMade) => {
const allChoicesEverMadeArray = allChoicesEverMade.map((choiceMade) => {
return choiceMade.toJSON();
});
fiveLatestChoicesMade = allChoicesEverMadeArray.slice(allChoicesEverMadeArray.length - 5).reverse();
console.log("These are the five latest choices that have been made:", fiveLatestChoicesMade);
// Now that I don't have mongoose.connection.close(), everything's OK!
});
Basically, and in my particular case, I was closing my connection to the MongoDB database after retrieving data from it, and then trying to add a new record to it when I didn't have a connection to it anymore.

many queries postgres (node), no parallel queries?

I am running a node server with the postgres-node (pg) package.
I wrote a program, which requests n-queries (for instance 20,000) at once to my postgres database.
When I do this with several clients who want to query 20,000 at once too, there is no parallelity. That means, the requests of the second client will be queued until the first client finished all his queries.
Is this a normal behavior for postgres? If yes, how can I prevent that one user gets all the ressources (and the others have to wait) if there is no parallelity?
This is my code:
const express = require('express');
const app = express();
const { Pool } = require("pg");
const pool = new Pool();
benchmark(){
pool.connect((err, client, done) => {
if (err) throw err;
client.query("SELECT * from member where m_id = $1", [1], (err, res) => {
done();
if (err) {
console.log(err.stack);
} else {
console.log(res.rows[0]);
}
});
});
}
app.get('/', function(req, res) {
for(let i=0;i<20000;i++){
benchmark();
}
});
First you need to create a connection pool, here's an example with node's pg in a separate module (node-pg-sql.js) for convenience:
node-pg-sql.js:
const { Pool } = require('pg');
const pool = new Pool(fileNameConfigPGSQL);
module.exports = {
query: (text, params, callback) => {
const start = Date.now()
return pool.query(text, params, (err, res) => {
const duration = Date.now() - start
// console.log('executed query', { text, duration, rows: res.rowCount })
callback(err, res)
})
},
getClient: (callback) => {
pool.connect((err, client, done) => {
const query = client.query.bind(client)
// monkey patch
client.query = () => {
client.lastQuery = arguments
client.query.apply(client, arguments)
}
// Timeout 5 sek
const timeout = setTimeout(() => {
// console.error('A client has been checked out for more than 5 seconds!')
// console.error(`The last executed query on this client was: ${client.lastQuery}`)
}, 5000)
const release = (err) => {
// 'done' Methode - returns client to the pool
done(err)
// clear Timeouts
clearTimeout(timeout)
// reset der Query-Method before Monkey Patch
client.query = query
}
callback(err, client, done)
})
}
}
In your postgresql.conf (on linux normally under /var/lib/pgsql/data/postgresql.conf) set max-connection to the desired value:
max_connection = 300
Keep in mind:
Each PostgreSQL connection consumes RAM for managing the connection or the client using it. The more connections you have, the more RAM you will be using that could instead be used to run the database.
While increasing your max-connections, you need to increase shared_buffers and kernel.shmmax as well in order for the client-connection increase to be effective .
Whenever you want to run a query from in one of your routes/endpoints just require the separate client-pool-file like:
const db = require('../../../node-pg-sql');
module.exports = (router) => {
router.get('/someRoute', (req, res) => {
console.log(`*****************************************`);
console.log(`Testing pg..`);
let sqlSelect = `SELECT EXISTS (
SELECT 1
FROM pg_tables
WHERE schemaname = 'someschema'
)`;
db.query(sqlSelect, (errSelect, responseSelect) => {
if (errSelect) {
/* INFO: Error while querying table */
console.log(`*****************************************`);
console.log(`ERROR WHILE CHECKING CONNECTION: ${errSelect}`);
}
else {
// INFO: No error from database
console.log(`*****************************************`);
console.log(`CONNECTION TO PGSQL WAS SUCCESSFUL..`);
res.json({ success: true, message: responseSelect, data:responseSelect.rows[0].exists });
}
})
});
}
EDIT:
"there is no parallelity.."
Node is asynchronous, you can either work with promises or spawn more clients/pools and tune your max-connections (as explained in my answer, but keep performance of your host-machine in mind), but with multiple clients running around 20.000 queries, they won't resolve with a result instantly or parallel. What is the exact goal you try to achieve?
"Is this a normal behavior for postgres?"
This is due to node's event-loop as well as due to certain performance-limitation of the host-machine running the Postgres.

Restful Api express postgres database

I´m developing a rest full api with node and exrpess, my database is postgresql, I need to use the postgres package pg-promise.
I know that I need to connect my app with the database in the app.js file, but my question is, How I should use this connection in my endpoints.
I have routes and I am using controllers.
For example
app.js
//in this file, suppously I have to to the connection
const db = pgp('postgres://john:pass123#localhost:5432/products');
app.use('/products', productsRoute);
products.js (route)
router.get('/', ProductsController.get_all_products);
products.js (controller)
exports.get_all_products = (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
}
How do I get access to the connection to do something like
db.any('SELECT * FROM products WHERE active = $1', [true])
.then(function(data) {
// success;
})
.catch(function(error) {
// error;
});
From the controller.
Update
Ok, I´m using now node-prostgres, pg. I saw is better, Thanks for the advice people.
I want to create one time de db instance, and call it anywhere, in specific in the controllers
Could I use app.local to save my client?, connect, do a query and then close it. Do this anywhere
I haven't used pg-promise.
If it helps, you can use PostgreSQL client for Node.js. You can also use async/await with it.
Instead of a router, you can use Express middle-ware straightaway as follows.
//app.js:
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 1234
const db = require('./dbconnector')
//...omitted for brevity`
// 'db' is exported from a file such as
// dbconnector.js.
app.get('/products', db.getProducts)
//In dbconnector.js:
const Pool = require('pg').Pool
const pool = new Pool({
user: 'postgres',
host: 'localhost',
database: 'mydb',
password: 'mypwd',
port: 5432,
})
const getProducts = (request, response) => {
pool.query('SELECT * FROM products ORDER BY id
ASC', (error, results) => {
if (error) {
throw error
}
response.status(200).json(results.rows)
})
}
// ...omitted for brevity
module.exports = {
getProducts
}
For modular design, please use a separate file (not app.js/index.js/server.js) for db connections as best practice and require that in your main app.js.
Here is help on pg module.
Here's an example how to use it:
// mydb.js
async function someDbQuery() {
let result;
try {
result = db.any('SELECT * FROM products WHERE active = $1', [true])
} catch (e) {
throw e
}
return result;
}
module.exports = someDbQuery;
// in your controller after importing
const { someDbQuery } = require('./mydb.js')
exports.get_all_products = async (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
try {
const result = await someDbQuery();
// use result here
} catch (e) {
// handle error
console.error(e)
}
}
Side note:
From the docs pg-promise
Built on top of node-postgres
node-postgres now supports promise too.
You do not need to do anything, pg-promise manages connections automatically. It will be allocated for the query and released right after. See examples.

One app with nodejs, different datababase per client

I want to create an application with nodejs in which different companies/clients connect but use different databases.
For example:
Application nodejs running on localhost: 3001
Mongo server running at localhost: 27017
A client (CLIENT1) accesses the nodejs application and modifies data
in its database -> localhost:27017/client1
Another client (CLIENT2) does the same and accesses the application
nodejs but modifies its data in localhost:27017/client2
And so on for every customer who signs up for the application.
--------EDIT----------
I've been testing things to get what I wanted and I think I've come up with a possible solution. The solution would be to create a connection to each database access. And when you have finished that access disconnect. I do not know if it is a good solution but I think it can be worth:
index.js
var express = require('express');
var app = express();
var repository = require('./demoqueryrepository')
app.get('/createdb', function (req, res) {
//TODO: With JWT decode get id client and pass like as param to repository
repository.crearDemo(req.query.id, function (err, resp) {
if (err) console.log(err)
else res.send("resp");
})
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
demomodel.js
var mongo = require('mongoose');
var Schema = mongo.Schema;
module.exports = mongo.model('demodto', new Schema({
Name: { type: String },
Code: { type: Number },
}));
demoqueryrepository.js
var _demo = require('./demoquerydto');
var mongo = require('mongoose')
var mongoconnect = require('./mongoconnect')
module.exports = {
crearDemo: function (idclient, callback) {
let newdemo = new _demo({
Name: " Demo " + idclient,
Code: idclient
})
mongoconnect.connect(idclient);
newdemo.save(function (error) {
if (error) callback(error, null);
else {
callback(null, "success");
mongo.disconnect();
}
})
}
}
mongoconnect.js
var mongo = require('mongoose')
module.exports = {
connect: function (idclient) {
mongo.connect('mongodb://localhost:27017/' + idclient, { useMongoClient: true }, function (err, res) {
if (err) console.log(err);
else console.log("Connected to db")
});
}
}
when i launch requests:
localhost:3000/createdb?id=12
localhost:3000/createdb?id=13
localhost:3000/createdb?id=14
On the database server the databases are created with those id's
What you are trying to do is make a multi-tenant nodejs application.
The approach you are taking has few disadvantages:
There is one common database for a user-id which will tell which db to connect to and then one per client. This means you have n+1 connection.
Your application will not scale as either you will always over provision/ under provision your databases or worse deploy changes for every new client on-boarding.
Have you considered having just one database as the schema is the same? The common fears of one client having to access data can be taken care of if you put default scope of search per client.
I had the same issue and wrote a blog post about it.

Resources