I want to create an application with nodejs in which different companies/clients connect but use different databases.
For example:
Application nodejs running on localhost: 3001
Mongo server running at localhost: 27017
A client (CLIENT1) accesses the nodejs application and modifies data
in its database -> localhost:27017/client1
Another client (CLIENT2) does the same and accesses the application
nodejs but modifies its data in localhost:27017/client2
And so on for every customer who signs up for the application.
--------EDIT----------
I've been testing things to get what I wanted and I think I've come up with a possible solution. The solution would be to create a connection to each database access. And when you have finished that access disconnect. I do not know if it is a good solution but I think it can be worth:
index.js
var express = require('express');
var app = express();
var repository = require('./demoqueryrepository')
app.get('/createdb', function (req, res) {
//TODO: With JWT decode get id client and pass like as param to repository
repository.crearDemo(req.query.id, function (err, resp) {
if (err) console.log(err)
else res.send("resp");
})
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
demomodel.js
var mongo = require('mongoose');
var Schema = mongo.Schema;
module.exports = mongo.model('demodto', new Schema({
Name: { type: String },
Code: { type: Number },
}));
demoqueryrepository.js
var _demo = require('./demoquerydto');
var mongo = require('mongoose')
var mongoconnect = require('./mongoconnect')
module.exports = {
crearDemo: function (idclient, callback) {
let newdemo = new _demo({
Name: " Demo " + idclient,
Code: idclient
})
mongoconnect.connect(idclient);
newdemo.save(function (error) {
if (error) callback(error, null);
else {
callback(null, "success");
mongo.disconnect();
}
})
}
}
mongoconnect.js
var mongo = require('mongoose')
module.exports = {
connect: function (idclient) {
mongo.connect('mongodb://localhost:27017/' + idclient, { useMongoClient: true }, function (err, res) {
if (err) console.log(err);
else console.log("Connected to db")
});
}
}
when i launch requests:
localhost:3000/createdb?id=12
localhost:3000/createdb?id=13
localhost:3000/createdb?id=14
On the database server the databases are created with those id's
What you are trying to do is make a multi-tenant nodejs application.
The approach you are taking has few disadvantages:
There is one common database for a user-id which will tell which db to connect to and then one per client. This means you have n+1 connection.
Your application will not scale as either you will always over provision/ under provision your databases or worse deploy changes for every new client on-boarding.
Have you considered having just one database as the schema is the same? The common fears of one client having to access data can be taken care of if you put default scope of search per client.
I had the same issue and wrote a blog post about it.
Related
Is it possible to adapt a json from a live api according to the changes in the database?
server.js
const connection = mongoose.connection;
connection.once("open", () => {
//Live Stream - Posts
const observePosr_changes = connection.collection("posts").watch();
//Observe change in Data Base
observePosr_changes.on("change", (change) => {
//console.log('changes right now ->',change);
switch (change.operationType) {
//create request
case "insert":
//Create posts -> operationType function
break;
//patch/put request
case "update":
//Update posts -> operationType function
break;
//delete request
case "delete":
//Update posts -> operationType function
break;
}
});
});
I found using the documentation from mongodb a method by which I can detect live the changes in db atnci when post / patch / delete
controller/postController.js
//Create a new post - tahe all value and add into DB
exports.createPost = catchAsync(async(req,res)=>{
const create = await Post.create(req.body);
res.status(201).json({
status:"success",
data:create
});
});
//Get Information from DB
exports.getAllPosts = catchAsync(async(req,res,next)=>{
const getAll = await Post.find()
res.status(200).json({
status:"success",
data:{
post:getAll
}
});
});
Is there a possibility to use the socket in this situation to make the application live.
That is, at the moment the mobile application and the website to see the newly added content must refresh.
you want to configure the server first
io = socket(server); -- server : express or any other
io.on("connection", function (socket) {
//console.log("Made socket connection");
});
so you can connect the socket from your client app using unqiue event name
this.socket = io.connect(YOUR_URL);
this.socket.on(HERE_YOUR_EVENT_NAME, (data: any) => {
-- your get the data here
});
when ever you want to send the data to client app emit the data using event name in server side using below code
io.emit(event_name, data);
I'm looking to set up a REST API for a number of my apps to consume & publish data to/from Mongodb Atlas. I have a number of organisations as customers, and have a number of different contracts (to build information models) with each customer, each of which must be kept in separate databases (for various reasons I shalln't go into here). Its basically an information model management system (lets call it IMMS) where every model has the same schema.
In Atlas I'm modelling this as:
Organisations - one to represent the system (IMMS).
Projects - one per Customer Organisation.
Clusters - generally just one per project, but could be more in some exceptions.
Databases - one per contract (model).
Collections - 5 per db, common across the whole IMMS.
The client app which my customers use allows them to select a file to open, representing a specific database, which contains the (private) connection details for connecting to the relevant cluster & db. The app will bake the relevant cluster & db values into the API requests when it posts or gets data (post & get are all we use, there's no updating or deleting required).
I intend to host this API on a publicly accessible API, but have it set up to only respond to my authorised user(s) using my client app(s).
My initial stab at prototyping such a service on Node can be seen below (note: I'm a total node newbie):
//app.js
const Express = require("express");
const BodyParser = require("body-parser");
const Mongo = require("mongodb");
const port = process.env.PORT || 5000;
const URL_TEMPLATE = "mongodb+srv://writer:writer#<db-cluster>.mongodb.net/test?retryWrites=true&w=majority";
var app = Express();
app.use(BodyParser.json());
app.use(BodyParser.urlencoded({ extended: true }));
app.post("/:cluster/:db/party", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").insertOne(request.body, (error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result.result);
});
});
});
app.get("/:cluster/:db/party", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").find({}).toArray((error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result);
});
});
});
app.get("/:cluster/:db/party/:id", (request, response) => {
var client = Mongo.MongoClient;
client.connect(URL_TEMPLATE.replace("<db-cluster>", request.params.cluster), { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if(error) { throw error; }
client.db(request.params.db).collection("parties").findOne({ "_id": new Mongo.ObjectId(request.params.id) }, (error, result) => {
if(error) { return response.status(500).send(error); }
response.send(result);
});
});
});
app.listen(port, () => {
console.log(`Server running at http://localhost:${port}`);
});
I have a few concerns which I'd be grateful for advice / feedback on:
Most examples I've seen open a single mongo connection in the app.listen function & appear to hold it open indefinitely. My approach needs to switch between clusters & databases on each request (and indeed does, above). Is this a good or bad idea, from a performance & reliability perspective?
I've put the :cluster & :db identifiers in each API route, but it will be common to all requests. Should this be baked into the header instead, and/or is there a neater way of doing this?
With the code above, I can see I'm getting quite a lot of repeated code on each route. Any ideas for quick / easy refactorings I could do to incorporate a function along the lines of GetCollection(request)?
I'm toying with having my GET routes allow a mongo query object to be passed (instead of the plain old :id selector route), which would permit a lot of flexibility to my end-client app(s). Again, does that sound like a good / bad idea? I see mongo query anywhere does something similar & handles user roles/permissions (though my apps deal with user permissions etc so I'm not sure I'd benefit from stitch & query anywhere)?
Any help / advice on any of the above would be much appreciated. Thanks. :)
I've now come to a solution on this after investigating / researching / prototyping further myself, so am publishing it here for any others with similar queries...
Mongoose's createConnection() method was what I was after. I ended up using the default connection mongoose.connect() for my main system database (managing users, organisations, invoices etc), and then created a database service class to provide connections to multiple other databases on a request by request basis. Something along th lines of the following:
"use strict";
const mongoose = require('mongoose');
const options = { useNewUrlParser: true, useUnifiedTopology: true, useCreateIndex: true, bufferCommands: false };
module.exports = async function ( connectString ) {
let conn;
let models;
try
{
if( !conn )
{
conn = await mongoose.createConnection( connectString, options );
conn.model( 'Current', require('./model') );
conn.model( 'History', require('./history') );
console.log( 'Connected: ' + connectString );
}
if( !models )
{
models = {
Current: conn.model( 'Current' ),
History: conn.model( 'History' )
};
console.log( 'Models compiled: Current, History' );
}
return models;
}
catch ( err )
{
throw err;
}
};
That would allow the server API to manage the model connections, however I'm considering delegating the model db connections to the front-end clients, allowing them to connect directly by connection string.
Anyhow, I hope the above is useful.
I´m developing a rest full api with node and exrpess, my database is postgresql, I need to use the postgres package pg-promise.
I know that I need to connect my app with the database in the app.js file, but my question is, How I should use this connection in my endpoints.
I have routes and I am using controllers.
For example
app.js
//in this file, suppously I have to to the connection
const db = pgp('postgres://john:pass123#localhost:5432/products');
app.use('/products', productsRoute);
products.js (route)
router.get('/', ProductsController.get_all_products);
products.js (controller)
exports.get_all_products = (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
}
How do I get access to the connection to do something like
db.any('SELECT * FROM products WHERE active = $1', [true])
.then(function(data) {
// success;
})
.catch(function(error) {
// error;
});
From the controller.
Update
Ok, I´m using now node-prostgres, pg. I saw is better, Thanks for the advice people.
I want to create one time de db instance, and call it anywhere, in specific in the controllers
Could I use app.local to save my client?, connect, do a query and then close it. Do this anywhere
I haven't used pg-promise.
If it helps, you can use PostgreSQL client for Node.js. You can also use async/await with it.
Instead of a router, you can use Express middle-ware straightaway as follows.
//app.js:
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 1234
const db = require('./dbconnector')
//...omitted for brevity`
// 'db' is exported from a file such as
// dbconnector.js.
app.get('/products', db.getProducts)
//In dbconnector.js:
const Pool = require('pg').Pool
const pool = new Pool({
user: 'postgres',
host: 'localhost',
database: 'mydb',
password: 'mypwd',
port: 5432,
})
const getProducts = (request, response) => {
pool.query('SELECT * FROM products ORDER BY id
ASC', (error, results) => {
if (error) {
throw error
}
response.status(200).json(results.rows)
})
}
// ...omitted for brevity
module.exports = {
getProducts
}
For modular design, please use a separate file (not app.js/index.js/server.js) for db connections as best practice and require that in your main app.js.
Here is help on pg module.
Here's an example how to use it:
// mydb.js
async function someDbQuery() {
let result;
try {
result = db.any('SELECT * FROM products WHERE active = $1', [true])
} catch (e) {
throw e
}
return result;
}
module.exports = someDbQuery;
// in your controller after importing
const { someDbQuery } = require('./mydb.js')
exports.get_all_products = async (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
try {
const result = await someDbQuery();
// use result here
} catch (e) {
// handle error
console.error(e)
}
}
Side note:
From the docs pg-promise
Built on top of node-postgres
node-postgres now supports promise too.
You do not need to do anything, pg-promise manages connections automatically. It will be allocated for the query and released right after. See examples.
I have a node.js application that uses a mongodb database that I've created. Within it, I have a simple collection named comments with the contents { "author": "me", "comment": "this is a comment" } when I call db.comments.find({}).
However, when I attempt to access this collection for display within a jade view I have, it times out after an incrediable amount of time. Console.log for the error object shows it's either a MongoError or connection was destroyed by application. The question I have is why this is happening? I have no errant while loops and connection parameteres seem to check out. Here's what I have to connect with, stored in app.js
var app = express();
var mongodb = require('mongodb'),
serverdb = new mongodb.Server('127.0.0.1', 27017, {}),
db = new mongodb.Db('acl', serverdb, {safe:true});
app.use(function(req,res,next){
req.db = db;
next();
});
and the code I have in the middleware file, stored as a js file in /routes
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res) {
var db = req.db;
var collection = db.collection('comments');
collection.find().toArray(function(err, docs) {
console.log("Printing docs from Array");
if (err) {
console.log(err);
} else {
console.log(docs);
}
});
db.close();
});
module.exports = router;
Like #legalize said, its best to get a mongo connection pool going instead of opening and closing the connection on every request. Perhaps something like this SO answer
As far as why you are getting errors, its probably because your db.close() needs to be in the collection.find().toArray() callback because otherwise it'll start closing the connection before the query even happens.
Lastly, you need to render the template somewhere so the response gets sent back to the client.
Putting it all together, you probably want something like this:
router.get('/', function(req, res) {
var db = req.db;
var collection = db.collection('comments');
collection.find().toArray(function(err, docs) {
console.log("Printing docs from Array");
db.close();
if (err) {
console.log(err);
} else {
console.log(docs);
res.render( 'yourJadeTemplate', { docs : docs } );
}
});
});
(but you really don't want to be closing the connection for every request, especially because you aren't opening it for every request)
Oddly enough replacing this code
var mongodb = require('mongodb'),
serverdb = new mongodb.Server('127.0.0.1', 27017, {}),
db = new mongodb.Db('acl', serverdb, {safe:true});
with this
var db = require("mongojs").connect("localhost:27017/acl", ["comments"]);
made all the difference. No more timeouts. A bit of tweeking to get it to return data.
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };