I'm trying this approach, but I'm not sure if that creates a new connection every time.
getMongoClient.js
const { MongoClient } = require('mongodb');
const serverURL = process.env['mongoServerURL']
module.exports = async function (){
const mongoClient = await new MongoClient(serverURL);
await mongoClient.connect();
return mongoClient;
}
then in the app.js
const getMongoClient = require("./_helpers/getMongoClient.js")
module.exports = getMongoClient();
then in a database service.js
async function syncGuilds(client){
const mongoClient = await require("../app.js")
... some database operations
}
module.exports = syncGuilds
Node modules are singleton by themselves, you don't need to worry about them. When your modules is once evaluated, it won't be evaluated again. So you will always receive same instance of the module i.e it won't create multiple instance of mongo connection.
You can check this and this link for more details.
It won't create a new connection every time, If you want you can specify max connection pool size in options default value is 5. Check below link
https://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html#connection-pool-configuration
const mongodb = require("mongodb");
let client = new mongodb.MongoClient(
"url",
{
tls: true,
auth: { user: "myuser", password: "mypassword" },
useNewUrlParser: true,
useUnifiedTopology: true,
poolSize: 1,
maxPoolSize: 1,
}
);
Related
I'm writing a Node.js cli in which I've to read from one Mongo Atlas DB and write to another Mongo Atlas DB. I'll be reading documents from one db and writing equivalent documents in the other db, one document at a time. I've two separate connection files like this:
ReadDB.js:
require('dotenv').config();
const mongoose = require('mongoose');
const read_db_url = process.env.READDB_URI;
const readDB = async () => {
try {
await mongoose.connect(read_db_url,
{
useNewUrlParser: true,
useUnifiedTopology: true,
dbName: "dbProd"
}
);
} catch (err) {
console.error(err);
}
}
module.exports = readDB
WriteDB.js:
require('dotenv').config();
const mongoose = require('mongoose');
const write_db_url = process.env.WRITEDB_URI;
const writeDB = async () => {
try {
await mongoose.connect(write_db_url,
{
useNewUrlParser: true,
useUnifiedTopology: true,
dbName: "dbQA"
}
);
} catch (err) {
console.error(err);
}
}
module.exports = writeDB
This what I've so far for the main application (cli.js):
cli.js:
require('dotenv').config();
const mongoose = require('mongoose');
const connectReadDB = require('./ReadDB.js');
const connectWriteDB = require('./WriteDB.js');
connectReadDB();
connectWriteDB();
const findProduct = async (productId) => {
products = await Products.find({_id:productId});
}
I guess my confusion is how Node.js will know which db to read from to begin with? Will I need separate set of models, one for read and one for write? How can I establish two simultaneous connections in the same Node.js app?
Mongoose handling connections via connections pool http://mongoosejs.com/docs/connections.html
You can use server: {poolSize: 5} option for increase/decrease pool (number of parallel connections)
If you need connections to different databases look here Mongoose and multiple database in single node.js project
Example of multiple connections:
const mongoose = require('mongoose')
const connection = mongoose.createConnection('mongodb://localhost/db1');
const connection2 = mongoose.createConnection('mongodb://localhost/db2');
const Schema = new mongoose.Schema({})
const model1 = connection.model('User', Schema);
const model2 = connection2.model('Item', Schema);
model1.find({}, function() {
console.log("this will print out last");
});
model2.find({}, function() {
console.log("this will print out first");
});
I would like to connect to my Atlas cluster only once per instance running Cloud Functions.
Here is my code for an instance :
const MongoClient = require("mongodb").MongoClient;
const client = new MongoClient("myUrl", {
useNewUrlParser: true,
useUnifiedTopology: true,
});
exports.myHttpMethod = functions.region("europe-west1").runWith({
memory: "128MB",
timeoutSeconds: 20,
}).https.onCall((data, context) => {
console.log("Data is: ", data);
client.connect(() => {
const testCollection = client.db("myDB").collection("test");
testCollection.insertOne(data);
});
});
And i would like to avoid the client.connect() in each function call that seems to be really too much.
I would like to do something like this :
const MongoClient = require("mongodb").MongoClient;
const client = await MongoClient.connect("myUrl", {
useNewUrlParser: true,
useUnifiedTopology: true,
});
const db = client.db("myDB");
exports.myHttpMethod = functions.region("europe-west1").runWith({
memory: "128MB",
timeoutSeconds: 20,
}).https.onCall((data, context) => {
console.log("Data is: ", data);
const testCollection = db.collection("test");
testCollection.insertOne(data);
});
But i can't await like this.
In my AWS Lambda functions (running in python) i have not this issue and i am able to connect only once per instance, so i guess there is an equivalent but i don't know much JS / Node JS.
You can store your database client as a global variable. From the documentation,
Cloud Functions often recycles the execution environment of a previous invocation. If you declare a variable in global scope, its value can be reused in subsequent invocations without having to be recomputed.
Try refactoring the code as shown below:
import * as functions from "firebase-functions";
import { MongoClient } from "mongodb";
let client: MongoClient | null;
const getClient = async () => {
if (!client) {
const mClient = new MongoClient("[MONGODB_URI]", {});
client = await mClient.connect();
functions.logger.log("Connected to MongoDB");
} else {
functions.logger.log("Using existing MongoDB connection");
}
functions.logger.log("Returning client");
return client;
};
export const helloWorld = functions.https.onRequest(
async (request, response) => {
const db = (await getClient()).db("[DATABASE]");
const result = await db.collection("[COLLECTION]").findOne({});
response.send("Hello from Firebase!");
}
);
This should reuse the connection for that instance.
I am creating a blog website where you can create your own blog and update it at anytime. However, I want to add a log-in system so that users can have their own separate blog page. My problem is that I have yet to find a way to use both the login database and the blog database in my website. Right now I am using mongoose to connect to mongodb atlas, is there a way to use something like mongoose.connect for multiple databases?
You just have two different mongoose schemas, one for the blogs and the other for the log in. They will be separated in the database, you don't need to have two separate ones.
Here is an example using mongodb package.
I believe you can use the same logic with Mongoose.
import { Db, MongoClient, MongoError } from "mongodb";
const mongoURIs: MongoDBsInterface = {
global: {
uri: process.env.MONGODB_URI_DEFAULT,
db: process.env.MONGODB_DB_NAME,
},
secondary: {
uri: process.env.MONGODB_URI_DEFAULT,
db: "secondaryDatabaseInTheSameCluster",
},
secondary: {
uri: "secondClusterURI",
db: "anotherDatabase",
},
};
// Connection parameters
const param = {
numberOfRetries: 20,
auto_reconnect: true,
useNewUrlParser: true,
useUnifiedTopology: true,
};
// Function to start multiple databases
export const startDatabases = (): Promise<MongoDBsDetailInterface> => {
return new Promise((resolve) => {
const promises = [];
logger.info("### Connect to MongoDBs");
// Loop into each mongoURIs
for (const lang in mongoURIs) {
if (mongoURIs[lang]) {
promises.push(connectToOneDB(lang, mongoURIs[lang]));
}
}
Promise.all(promises).then((res: MongoDBDetailInterface[]) => {
const dbs: MongoDBsDetailInterface = {};
// tslint:disable-next-line: prefer-for-of
for (let i: number = 0; i < res.length; i++) {
dbs[res[i].lang] = res[i].db;
}
resolve(dbs);
});
});
};
export const connectToOneDB = (
lang: string,
dbValue: MongoDBInterface
): Promise<MongoDBDetailInterface> => {
return new Promise(async (resolve) => {
logger.info(` - Connect to ${lang} in db ${dbValue.db} (${dbValue.uri})`);
// Connect to the db
const client = new MongoClient(dbValue.uri, param);
let db = null;
try {
await client.connect();
db = await client.db(dbValue.db);
} catch (e) {
logger.error(e.message);
client.close();
// you should change that to handle it for your usage
process.exit(1);
}
logger.info(` - Connected to ${lang}`);
return resolve({ lang, db });
});
};
And then you can use
const dbs = await startDatabases();
And put it in your global context.
You then just need to select the collection you want
dbs.global.collection("...")
I've tried to develop a server with nodejs that is link with NOSQL DB, in particular I've developped it with MONGODB. I'm not sure I manage to connect it well due to the fact if I run the code below it should return the id of the element inserted but it prints in output a blank, in the case I substitute the async with a normal development it return the following error "
MongoError: Topology is closed, please connect " .I've tried to use the lines of code with /the comment below the code/ but it returns another error.
I think I don't connect well mongodb with my server but I don't know I follow the commands deliver in mongodb step by step.(create a cluster(free),create a user,create a db and then a collection and then connect use the information set before.)
I said it because I refresh the cluster on mongodb but I don't obtain any change
const MongoClient = require('mongodb').MongoClient;
const password = encodeURIComponent('22');
const user=encodeURIComponent('id');
const dbs = encodeURIComponent('users');
const uri = "mongodb+srv://${user}:${password}#cluster/${dbs}?retryWrites=true&w=majority";
const client = new MongoClient(uri, { useNewUrlParser: true ,useUnifiedTopology:true, connectTimeoutMS: 30000 , keepAlive: 1});
(async()=>{
await client.connect();
const databse=client.db("users");
const collection=databse.collection("readers");
const result= await collection.insertOne( {
"name":"Chocolate",
"ingredients":[
"eggs","chocolates"
]
});
console.log(result.insertedId);
//client.close();
});
/*
client.connect(err => {
const collection2 = client.db("users").collection("readers");
// perform actions on the collection object
var myobj = { name: "Ajeet Kumar", age: "28", address: "Delhi" };
collection2.insertOne(myobj, function(err, res) {
if (err) throw err;
console.log("1 record inserted");
//client.close();
});
});
*/
I see a few problems with the code.
There is no reason for you to use encodeURIComponent with static strings. But hopefully, that's just a placeholder for some real credentials that are going to come from an environment variable.
The variable uri is being assigned with a static string that contains a templating pattern, but it is using double quotes instead of backticks, so it won't interpolate the variables, which in turn will generate an invalid connection string.
The same variable uri, after having replaced the double quotes " with the backticks ` might still not work because of the strange hostname "cluster" that should come from some sort of settings of env var as well.
The code seems to have the intent of using an Immediately Invoked Function Expression (IIFE for short) but it is in fact not even calling the function because the parenthesis pair is missing after the function block.
Assuming your credentials are correct the updated code below should do the trick for you:
const MongoClient = require('mongodb').MongoClient
const password = encodeURIComponent('22')
const user = encodeURIComponent('id')
const dbName = encodeURIComponent('users')
const host = process.env.MONGO_HOST || 'localhost' // or whatever is the hostname you want
const uri = `mongodb+srv://${user}:${password}#${host}/${dbName}?retryWrites=true&w=majority`
const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, connectTimeoutMS: 30000, keepAlive: 1 })
;(async () => {
await client.connect()
const databse = client.db('users')
const collection = databse.collection('readers')
const doc = {
name: 'Chocolate',
ingredients: [
'eggs', 'chocolates'
]
}
const result = await collection.insertOne(doc)
console.log({ result })
console.log(result.insertedId)
client.close()
})()
I am trying to initialize a DB connection in a module and pass to other modules.
My DB connection module (rdsconnection.js) is:
const { Pool } = require('pg');
let dbParams = require('./configparams').getDbParams();
let schema = (dbParams.db_schema === "" || dbParams.db_schema === undefined) ? "public." : dbParams.db_schema + ".";
// Connect to AWS RDS
let pool;
if(!pool)
{
console.log("PG pool not initialized. Initializing now.");
pool = new Pool({
user: dbParams.username,
host: dbParams.host,
database: dbParams.dbname,
password: dbParams.password,
port: dbParams.port
});
}
else
{
console.log("PG already initialized.");
}
module.exports = { pool, schema }
I want to do to
const { pool, schema } = require('../modules/rdsconnection');
in multiple modules and want to return only a single pool connection object.
I am assuming the if/else I am doing is wrong way to go about this.
What is the proper way to initialize an object only once in a module?
Does the code in the module gets run each time another module calls require()?
Thank you very much.
You're overcomplicating it. Node.js modules are only loaded once and then cached for reuse if they are requireed multiple times in a program. You can simplify it to this:
const { Pool } = require('pg');
const dbParams = require('./configparams').getDbParams();
const schema = `${dbParams.db_schema || 'public'}.`;
// Connect to AWS RDS
const pool = pool = new Pool({
user: dbParams.username,
host: dbParams.host,
database: dbParams.dbname,
password: dbParams.password,
port: dbParams.port
});
module.exports = { pool, schema };