Generating Selection List with Cosmos DocumentDB - node.js

I am looking to generate a selection list with data from a Cosmos documentDB, each list will be independent of each other. I am currently only able to generate one list and the second list is a replica of the first, it won't read both. Any help will be appreciated, thanks!
App.js
const CosmosClient = require('#azure/cosmos').CosmosClient
const config = require('./config')
const TaskList = require('./routes/tasklist')
const TaskDao = require('./models/taskDao')
const express = require('express')
const path = require('path')
const logger = require('morgan')
const cookieParser = require('cookie-parser')
const bodyParser = require('body-parser')
const app = express()
// view engine setup
app.set('views', path.join(__dirname, 'views'))
app.set('view engine', 'jade')
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: false }))
app.use(cookieParser())
app.use(express.static(path.join(__dirname, 'public')))
//Todo App:
const cosmosClient = new CosmosClient({
endpoint: config.host,
key: config.authKey
})
const taskDao = new TaskDao(cosmosClient, config.databaseId, config.containerId)
const taskDao2 = new TaskDao(cosmosClient, config.databaseId2, config.containerId2)
const taskList = new TaskList(taskDao,taskDao2)
taskDao
.init(err => {
console.error(err)
})
.catch(err => {
console.error(err)
console.error(
'Shutting down because there was an error settinig up the database.'
)
process.exit(1)
})
taskDao2
.init(err => {
console.error(err)
})
.catch(err => {
console.error(err)
console.error(
'Shutting down because there was an error settinig up the database.'
)
process.exit(1)
})
app.get('/', (req, res, next) => taskList.showClosurePlanList(req, res).catch(next))
app.get('/', (req, res, next) => taskList.showTestList2(req, res).catch(next))
app.post('/addtask', (req, res, next) => taskList.addTask(req, res).catch(next))
app.set('view engine', 'jade')
// catch 404 and forward to error handler
app.use(function(req, res, next) {
const err = new Error('Not Found')
err.status = 404
next(err)
})
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message
res.locals.error = req.app.get('env') === 'development' ? err : {}
// render the error page
res.status(err.status || 500)
res.render('error')
})
module.exports = app
taskDao.js
// #ts-check
const CosmosClient = require('#azure/cosmos').CosmosClient
const debug = require('debug')('todo:taskDao')
// For simplicity we'll set a constant partition key
const partitionKey = undefined
class TaskDao {
/**
* Manages reading, adding, and updating Tasks in Cosmos DB
* #param {CosmosClient} cosmosClient
* #param {string} databaseId
* #param {string} containerId
*/
constructor(cosmosClient, databaseId, containerId) {
this.client = cosmosClient
this.databaseId = databaseId
this.collectionId = containerId
this.database = null
this.container = null
}
async init() {
debug('Setting up the database...')
const dbResponse = await this.client.databases.createIfNotExists({
id: this.databaseId
})
this.database = dbResponse.database
debug('Setting up the database...done!')
debug('Setting up the container...')
const coResponse = await this.database.containers.createIfNotExists({
id: this.collectionId
})
this.container = coResponse.container
debug('Setting up the container...done!')
}
async find(querySpec, querySpec2) {
debug('Querying for items from the database')
if (!this.container) {
throw new Error('Collection is not initialized.')
}
const { resources } = await this.container.items.query(querySpec,querySpec2).fetchAll()
return resources
}
async addItem(item) {
debug('Adding an item to the database')
item.date = Date.now()
item.completed = false
const { resource: doc } = await this.container.items.create(item)
return doc
}
async getItem(itemId) {
debug('Getting an item from the database')
const { resource } = await this.container.item(itemId, partitionKey).read()
return resource
}
}
module.exports = TaskDao
tasklist.js
const TaskDao = require("../models/taskDao");
class TaskList {
/**
* Handles the various APIs for displaying and managing tasks
* #param {TaskDao} taskDao
*/
constructor(taskDao,taskDao2) {
this.taskDao = taskDao;
this.taskDao2 = taskDao2;
}
async showClosurePlanList(req, res) {
const querySpec = {
query: "SELECT * FROM root r WHERE r.ClosurePlanList=#ClosurePlanList",
parameters: [
{
name: "#ClosurePlanList",
value: "yes"
}
]
};
const items = await this.taskDao2.find(querySpec);
res.render("index", {
title: "Form",
tasks: items
});
}
async showTestList2(req, res) {
const querySpec2 = {
query: "SELECT * FROM root r WHERE r.completed=#completed",
parameters: [
{
name: "#completed",
value: true
}
]
};
const items2 = await this.taskDao.find(querySpec2);
res.render("index", {
title: "Form",
tasks: items2
});
}
async addTask(req, res) {
const item = req.body;
await this.taskDao.addItem(item);
res.redirect("/");
}
}
module.exports = TaskList;
So I have one list - Closure Plan List, that currently looks at ClosurePlanList DocumentDB (which is perfect).
How do I get the second list - Test List 2, to look at the TestList2 DocumentDB?
I attempted to add do something like this but it is only reading one app.get at a time. I'm thinking this is where my issue is. Which ever app.get is on top seems to be the only one that is read.
app.get('/', (req, res, next) => taskList.showClosurePlanList(req, res).catch(next))
app.get('/', (req, res, next) => taskList.showTestList2(req, res).catch(next))
async find(querySpec, querySpec2) {
debug('Querying for items from the database')
if (!this.container) {
throw new Error('Collection is not initialized.')
}
const { resources } = await this.container.items.query(querySpec,querySpec2).fetchAll()
return resources
}
async showTestList2(req, res) {
const querySpec2 = {
query: "SELECT * FROM root r WHERE r.completed=#completed",
parameters: [
{
name: "#completed",
value: true
}
]
};
const items2 = await this.taskDao.find(querySpec2);
res.render("index", {
title: "Form",
tasks: items2
});
}
Example from here: https://learn.microsoft.com/en-us/azure/cosmos-db/sql-api-nodejs-application#_Toc395783181

If you want to get data and show them in : http://localhost:3000 , you should modify both views/index.jade and showTasks function in routes/tasklist.js.
I implemented a simple demo for you. Firstly, go to routes/tasklist.js, replace showTasks function with code below :
async showTasks(req, res) {
const querySpec = {
query: "SELECT * FROM root r WHERE r.completed=#completed",
parameters: [
{
name: "#completed",
value: false
}
]
};
const items = await this.taskDao.find(querySpec);
const itemsFromCollection1 = await this.taskDao.find("SELECT * FROM r");
const itemsFromCollection2 = await this.taskDao2.find("SELECT * FROM r");
res.render("index", {
title: "My ToDo List ",
tasks: items,
list1 : itemsFromCollection1,
list2 : itemsFromCollection2
});
}
Go to views/index.jade , append the code below to show your list1 and list2 data:
label data from collection1
select(id="demo1")
-for(var i = 0;i<list1.length;i++){
option(value="#{i}") #{list1[i].name}
-}
label data from collection2
select(id="demo2")
-for(var i = 0;i<list2.length;i++){
option(value="#{i}") #{list2[i].name}
-}
With steps are done, you can run the project and test it :
Hope it helps . Pls mark it as an answer if it is helpful for you . If you have any further questions, pls post a new query and let me know the link .

Related

req is "undefined" in one middleware and not in another

I am trying to use sharp in my MERN application, I sent a request from my frontend and it is undefined in my sharp middleware but if I get rid of the sharp middleware the req is defined later on. If I log the request in createCountry, the body is defined, if I log it in convertToWebP, it is not.
the route is the one that says "/new" below:
const express = require("express");
const router = express.Router();
const { storage } = require("../imageupload/cloudinary.js");
const multer = require("multer");
const {
getCountry,
createCountry,
getCountries,
updateCountry,
deleteCountry,
getAllCountries,
} = require("../controllers/country.js");
const {convertToWebP} = require('../middlewares/toWebP')
const { isLoggedIn, authorizeCountry, validateCountry } = require("../middlewares/auth");
const catchAsync = require("../utils/catchAsync");
const ExpressError = require("../utils/ExpressError");
const upload = multer({ storage: storage });
router.get("/", getCountries);
router.get('/getAll', getAllCountries);
router.post("/new", isLoggedIn, converToWebP, upload.array("images"), createCountry);
router.get("/:countryId", getCountry);
router.patch("/:countryId", validateCountry, authorizeCountry, upload.array("images", 8), updateCountry);
router.delete("/:countryId", authorizeCountry, deleteCountry);
module.exports = router;
the code for create country is here:
exports.createCountry = async (req, res) => {
const { name, description, tags, location, cjLink } = req.body;
const creator = req.user._id;
const images = req.files.map((file) => {
return { image: file.path, publicId: file.filename };
});
try {
const geoData = await geocoder
.forwardGeocode({
query: req.body.location,
limit: 1,
})
.send();
const geometry = geoData.body.features[0].geometry;
const country = new Country({
name,
description,
tags,
creator,
location, //: //geometry
geometry,
url: '',
cjLink: cjLink,
});
const overall = new Overall({
name,
description,
tags,
creator,
location, //: //geometry
geometry,
url: '',
cjLink: cjLink,
});
country.images.push(...images);
country.headerImage.push(...images);
const data = await country.save();
overall.url = `/country/${data._id}`
data.url = `/country/${data._id}`
overall.save();
data.save();
return res.status(201).json(data);
} catch (error) {
return console.log("error during create country", error);
}
};
And lastly the code for the convertToWebP is here:
const sharp = require("sharp");
const { cloudinary } = require("../imageupload/cloudinary");
exports.convertToWebP = async (req, res, next) => {
try {
req.files = await Promise.all(req.files.map(async (file) => {
const buffer = await sharp(file.buffer)
.toFormat('webp')
.toBuffer();
return { ...file, buffer, originalname: `${file.originalname}.webp` };
}));
next();
} catch (error) {
res.status(500).json({ message: error.message });
}
};
Any help is appreciated! I tried console.log as described above, I tried to change the order of the middleware and that does not work either, and I tried logging the req.body directly from the route and it came up as an empty object
You cannot acces req.files before you use multer middleware
You have to reorder
router.post("/new", isLoggedIn, upload.array("images"), converToWebP, createCountry);

The link to go to the `ejs` page does not work

I can't go to another ejs page.
Description
The link to go to the ejs page does not work
I have an existing project... Everything works in it...
Example: a transition is made from Index -> to a subpage.
In the current state of the project, the subpage is located at localhost:3000/0/articles/14
The subpage is filled with data from the database.
I did.
created my own Index page;
Index page- opens;
created my own About Subpage;
Result: switching from the Index page to About does not work.
Index.ejs
<h1>Index page</h1>
<a href="/about" >About-1. Описание</a> </br>
<a href="http://localhost:3000/About/" >About-2. Описание</a>
I added to the file routes.js
.get('/about', (req, res) => {
res.render('about');
})
The whole code routes.js
const multer = require('multer');
const rand = require('randomstring');
const filesStorage = multer.diskStorage({
destination: (req, file, next) => {
next(null, 'static/uploads/files');
},
filename: (req, file, next) => {
const ext = file.originalname.split('.').pop();
next(null, rand.generate({
length: 32,
charset: 'alphabetic'
}) + '.' + ext);
}
});
const filesUpload = new multer({
storage: filesStorage
});
const site = {
main: require('./controllers/main')
};
const cms = {
articles: require('./controllers/cms/articles'),
files: require('./controllers/cms/files'),
lang: require('./controllers/cms/lang'),
slideshow: require('./controllers/cms/slideshow')
};
module.exports = (app, passport) => {
app
.get('/', site.main.lang)
.get('/video', site.main.video)
.get('/slideshow', site.main.slideshow)
.get('/:lang', site.main.index)
/*articles*/
.get('/:lang/articles', site.main.index)
.get('/:lang/articles/:id', site.main.article)
.get('/:lang/panomuseum', site.main.panomuseum)
.get('/:lang/panomuseum/2', site.main.panomuseum2)
.get('/:lang/panotheatre', site.main.panotheatre)
/*My*/
// .get('/:lang/articles', site.main.index)
.get('/Index', site.main.index)
.get('/history', site.main.history)
// .get('/history', (req, res) => {
// res.render('history');
// })
.get('/about', (req, res) => {
res.render('about');
})
;
app
.get('/cms/lang', cms.lang.index)
.post('/cms/lang', filesUpload.any(), cms.lang.save)
.get('/cms/:lang/articles', cms.articles.index)
.post('/cms/articles/saveOrder', cms.articles.saveOrder)
.get('/cms/:lang/articles/add', cms.articles.add)
.post('/cms/:lang/articles/add', filesUpload.any(), cms.articles.postAdd)
.get('/cms/:lang/articles/:id/edit', cms.articles.edit)
.post('/cms/:lang/articles/:id/edit', filesUpload.any(), cms.articles.postEdit)
.get('/cms/:lang/articles/:id/delete', cms.articles.delete)
.get('/cms/:lang/articles/:id', cms.articles.subArticle)
.get('/cms/:lang/articles/add/:id', cms.articles.add)
.post('/cms/files/delete', cms.files.delete)
.post('/cms/files/saveFile', filesUpload.single('file'), cms.files.saveFile)
.post('/cms/files/saveThumb', filesUpload.single('thumb'), cms.files.saveThumb)
.get('/cms/slideshow', cms.slideshow.index)
.post('/cms/slideshow/save', filesUpload.any(), cms.slideshow.save);
return app;
controllers\main.js
const db = require('../db');
const fs = require('fs');
const path = require('path');
const config = require('../config.js');
class Main {
async video(req, res, next) {
const videoFolder = './static/video'
let videos = []
fs.readdirSync(videoFolder).forEach((file) => {
let extension = path.extname(file)
let filename = path.basename(file, extension)
videos.push({
file,
filename: parseInt(filename),
})
})
videos = videos.sort((a, b) => {
return a.filename - b.filename
})
return res.render('video', {
domain: config.express.domain,
videos,
})
}
async panomuseum(req, res) {
const article = await db.article.getByID(req.params.lang);
const sub = await db.article.getRoot(req.params.lang);
const files = await db.files.getByOwnerId(req.params.lang);
const lang = await db.lang.getById(req.params.lang);
return res.render('panomuseum', {
article,
sub,
files,
lang
});
}
async panomuseum2(req, res) {
const article = await db.article.getByID(req.params.lang);
const sub = await db.article.getRoot(req.params.lang);
const files = await db.files.getByOwnerId(req.params.lang);
const lang = await db.lang.getById(req.params.lang);
return res.render('panomuseum2', {
article,
sub,
files,
lang
});
}
async panotheatre(req, res) {
const article = await db.article.getByID(req.params.lang);
const sub = await db.article.getRoot(req.params.lang);
const files = await db.files.getByOwnerId(req.params.lang);
const lang = await db.lang.getById(req.params.lang);
return res.render('panotheatre', {
article,
sub,
files,
lang
});
}
async index(req, res) {
const article = await db.article.getByID(req.params.lang);
const sub = await db.article.getRoot(req.params.lang);
const files = await db.files.getByOwnerId(req.params.lang);
const lang = await db.lang.getById(req.params.lang);
const timeout = await db.settings.getByID("timeout");
const caption = await db.settings.getByID("caption");
return res.render("index", {
article,
sub,
files,
lang,
timeout,
caption,
domain: req.app.get("domain"),
});
}
// async history(req, res) {
// // const article = await db.article.getByID(req.params.lang);
// // const sub = await db.article.getRoot(req.params.lang);
// // const files = await db.files.getByOwnerId(req.params.lang);
// // const lang = await db.lang.getById(req.params.lang);
// // const timeout = await db.settings.getByID("timeout");
// // const caption = await db.settings.getByID("caption");
// return res.render("history", {
// domain: req.app.get("domain")
// });
// }
async history(req, res) {
console.log('Request for history page recieved');
return res.render("history");
}
async about(req, res) {
console.log('Request for about page recieved');
return res.render("about");
}
async menu(req, res) {
return res.render("menu", {
domain: req.app.get("domain"),
});
}
async slideshow(req, res) {
const slideshow = await db.files.getSlideshow();
const timer = await db.settings.getByID("timer");
return res.render("slideshow", {
slideshow,
timer,
domain: req.app.get("domain"),
});
}
async slide(req, res) {
const slideshow = await db.files.getByID(req.params.id);
const timer = await db.settings.getByID("timer");
return res.render("slideshow", {
slideshow: [slideshow],
timer,
domain: req.app.get("domain"),
});
}
async article(req, res) {
const article = await db.article.getByID(req.params.id);
const sub = await db.article.getSub(req.params.id);
const files = await db.files.getByOwnerId(req.params.id);
const id = req.params.id;
const lang = await db.lang.getById(req.params.lang);
const timeout = await db.settings.getByID("timeout");
const caption = await db.settings.getByID("caption");
return res.render("index", {
id,
article,
sub,
files,
lang,
timeout,
caption,
domain: req.app.get("domain"),
});
}
async lang(req, res) {
const langs = await db.lang.getAll();
let activeCount = 0;
for (let lang of langs) {
if (lang.value == 1) {
activeCount++;
}
}
if (activeCount == 0) {
return res.redirect("/0");
} else if (activeCount == 1) {
for (let lang of langs) {
if (lang.value == 1) {
return res.redirect("/" + lang.id);
}
}
}
const timeout = await db.settings.getByID("timeout");
return res.render("lang", {
langs,
timeout,
});
}
async openSlide(req, res) {
console.log("openSlide");
let files = await db.files.getSyncSmartHome();
parentIO.sockets.in("client").emit("goToUrl", {
message: "/slide/" + files[parseInt(req.params.id)].id,
});
return res.json({
success: true,
});
}
async openSlideshow(req, res) {
console.log("open slideshow");
parentIO.sockets.in("client").emit("goToUrl", {
message: "/slideshow",
});
return res.json({
success: true,
});
}
}
module.exports = new Main();

node js rest api for crud application

I have started off with a REST API creation using Node.js. I have designed create and read functions and tested functionality using postman. Can you suggest how to add update and delete functions to it?
My dboperations.js file
var config = require('./dbconfig');
const sql = require('mssql/msnodesqlv8');
async function getFilms(){
try{
let pool = await sql.connect(config);
let films = await pool.request().query("SELECT * from Film8node");
return films.recordsets;
}
catch (error){
console.log(error);
}
}
async function getFilm(Film_id){
try{
let pool = await sql.connect(config);
let films = await pool.request()
.input('input_parameter',sql.Int,Film_id)
.query("SELECT * from Film8node where Film_id = #input_parameter"); ;
return films.recordsets;
}
catch (error){
console.log(error);
}
}
async function addFilm(Film8node){
try{
let pool = await sql.connect(config);
let insertFilm = await pool.request()
.input('film_name',sql.VarChar,Film8node.film_name)
.input('actor',sql.VarChar,Film8node.actor)
.input('actress',sql.VarChar,Film8node.actress)
.input('pub_date',sql.VarChar,Film8node.pub_date)
.input('director',sql.VarChar,Film8node.director)
.input('producer',sql.VarChar,Film8node.producer)
.input('prod_cost',sql.VarChar,Film8node.prod_cost)
.input('dist_cost',sql.VarChar,Film8node.dist_cost)
.input('category',sql.VarChar,Film8node.category)
.input('cert_category',sql.VarChar,Film8node.cert_category)
.input('poster',sql.VarBinary,Film8node.poster)
.query("INSERT into Film8node(film_name,actor,actress,pub_date,director,producer,prod_cost,dist_cost,category,cert_category,poster) values(#film_name,#actor,#actress,#pub_date,#director,#producer,#prod_cost,#dist_cost,#category,#cert_category,#poster)");
return insertFilm.recordsets;
}
catch (error){
console.log(error);
}
}
async function updateFilm(Film8node,Film_id){
try{
let pool = await sql.connect(config);
let updFilm = await pool.request()
.input('Film_id',sql.Int,Film_id)
.input('film_name',sql.VarChar,Film8node.film_name)
.input('actor',sql.VarChar,Film8node.actor)
.input('actress',sql.VarChar,Film8node.actress)
.input('pub_date',sql.VarChar,Film8node.pub_date)
.input('director',sql.VarChar,Film8node.director)
.input('producer',sql.VarChar,Film8node.producer)
.input('prod_cost',sql.VarChar,Film8node.prod_cost)
.input('dist_cost',sql.VarChar,Film8node.dist_cost)
.input('category',sql.VarChar,Film8node.category)
.input('cert_category',sql.VarChar,Film8node.cert_category)
.input('poster',sql.VarBinary,Film8node.poster)
.query("UPDATE Film8node set film_name=#film_name,actor=#actor,actress=#actress,pub_date=#pub_date,director=#director,producer=#producer,prod_cost=#prod_cost,dist_cost=#dist_cost,category=#category,cert_category=#cert_category,poster=#poster where Film_id=#Film_id");
return updFilm.recordsets;
}
catch (error){
console.log(error);
}
}
module.exports = {
getFilms : getFilms,
getFilm : getFilm,
addFilm : addFilm,
updateFilm : updateFilm
}
My api.js file
var Db = require('./dboperations');
var Filmnode = require('./Film8node');
const dboperations = require('./dboperations');
var express = require('express');
var bodyParser = require('body-parser');
var cors = require('cors');
var app = express();
var router = express.Router();
app.use(bodyParser.urlencoded({extended: true}));
app.use(bodyParser.json());
app.use(cors());
app.use('/api',router);
router.use((request,response,next) => {
console.log('middleware');
next();
})
router.route('/Films').get((request,response) => {
dboperations.getFilms().then(result => {
response.json(result[0]);
})
})
router.route('/Films/:Film_id').get((request,response)=>{
dboperations.getFilm(request.params.Film_id).then(result => {
response.json(result[0]);
})
})
router.route('/Films').post((request,response) => {
let Film8node = {...request.body}
dboperations.addFilm(Film8node).then(result => {
response.status(201).json(result);
})
})
router.route('/Films/:Film_id').put((request,response)=>{
let Film8node = {...request.body}
dboperations.updateFilm(Film8node,request.params.Film_id).then(result => {
response.status(201).json(result);
})
})
var port = process.env.PORT || 8090;
app.listen(port);
console.log('Film API is running at ' + port);
dboperations.getFilms().then(result => {
console.log(result);
})
My Film8node.js file
class Filmnode{
constructor(Film_id,film_name,actor,actress,pub_date,director,producer,prod_cost,dist_cost,category,cert_category,poster){
this.Film_id=Film_id;
this.film_name=film_name;
this.actor=actor;
this.actress=actress;
this.pub_date=pub_date;
this.director=director;
this.producer=producer;
this.prod_cost=prod_cost;
this.dist_cost=dist_cost;
this.category=category;
this.cert_category=cert_category;
this.poster=poster;
}
}
module.exports = Filmnode;
Can anyone help me in adding a update and delete functions to this?
Is my ..[updateFilm].. method correct in "dboperations.js" file?
Also about the ...[ router.route('/Films/:Film_id').put((request,response)]... part in "api.js" file?

Post Request stuck on Postman

I am learning to build API using Node.js, Express and MongoDB. I am on early learning phase.
My code are as follow.
const app = express();
const mongojs = require("mongojs");
const { body, param, validationResult } = require("express-validator");
const db = mongojs("travel", ["records"]);
const bodyParser = require("body-parser");
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.get("/api/records", function (req, res) {
const options = req.query;
const sort = options.sort || {};
const filter = options.filter || {};
const limit = 2;
const page = parseInt(options.page) || 1;
const skip = (page - 1) * limit;
for (var i in sort) {
sort[i] = parseInt(sort[i]);
}
db.records
.find(filter)
.sort(sort)
.skip(skip)
.limit(limit, function (err, data) {
if (err) {
return res.sendStatus(500);
} else {
return res.status(200).json({
meta: {
filter,
sort,
skip,
limit,
page,
total: data.length,
},
data,
links: {
self: req.originalUrl,
},
});
}
});
});
app.post("/api/records", function (req, res) {
db.records.insert(req.body, function (err, data) {
if (err) {
return res.status(500);
}
const _id = data._id;
res.append("Location", "/api/records/" + _id);
return res.send(201).json({
meta: { _id },
data,
});
});
});
app.listen(8000, function () {
console.log("Server running at port 8000...");
});
When I try GET request on Postman, it works fine. But when I try POST request, it shows like the following. error msg image link.
Please let me know what is wrong here.
Thanks in advance

Close MongoDB connection after retrieving data [duplicate]

I've been reading and reading and still am confused on what is the best way to share the same database (MongoDb) connection across whole NodeJs app. As I understand connection should be open when app starts and reused between modules. My current idea of the best way is that server.js (main file where everything starts) connects to database and creates object variable that is passed to modules. Once connected this variable will be used by modules code as necessary and this connection stays open. E.g.:
var MongoClient = require('mongodb').MongoClient;
var mongo = {}; // this is passed to modules and code
MongoClient.connect("mongodb://localhost:27017/marankings", function(err, db) {
if (!err) {
console.log("We are connected");
// these tables will be passed to modules as part of mongo object
mongo.dbUsers = db.collection("users");
mongo.dbDisciplines = db.collection("disciplines");
console.log("aaa " + users.getAll()); // displays object and this can be used from inside modules
} else
console.log(err);
});
var users = new(require("./models/user"))(app, mongo);
console.log("bbb " + users.getAll()); // not connected at the very first time so displays undefined
then another module models/user looks like that:
Users = function(app, mongo) {
Users.prototype.addUser = function() {
console.log("add user");
}
Users.prototype.getAll = function() {
return "all users " + mongo.dbUsers;
}
}
module.exports = Users;
Now I have horrible feeling that this is wrong so are there any obvious problems with this approach and if so how to make it better?
You can create a mongoUtil.js module that has functions to both connect to mongo and return a mongo db instance:
const MongoClient = require( 'mongodb' ).MongoClient;
const url = "mongodb://localhost:27017";
var _db;
module.exports = {
connectToServer: function( callback ) {
MongoClient.connect( url, { useNewUrlParser: true }, function( err, client ) {
_db = client.db('test_db');
return callback( err );
} );
},
getDb: function() {
return _db;
}
};
To use it, you would do this in your app.js:
var mongoUtil = require( 'mongoUtil' );
mongoUtil.connectToServer( function( err, client ) {
if (err) console.log(err);
// start the rest of your app here
} );
And then, when you need access to mongo somewhere else, like in another .js file, you can do this:
var mongoUtil = require( 'mongoUtil' );
var db = mongoUtil.getDb();
db.collection( 'users' ).find();
The reason this works is that in node, when modules are require'd, they only get loaded/sourced once so you will only ever end up with one instance of _db and mongoUtil.getDb() will always return that same instance.
Note, code not tested.
There are many ways this could be tweaked to accept configuration objects in places, but overall it's similar to how you have your code laid out, albeit with more modern JS syntax. Could easily be rewritten to prototypes and callbacks, if that's your requirement.
mongo.js
const { MongoClient } = require('mongodb');
const config = require('./config');
const Users = require('./Users');
const conf = config.get('mongodb');
class MongoBot {
constructor() {
const url = `mongodb://${conf.hosts.join(',')}`;
this.client = new MongoClient(url, conf.opts);
}
async init() {
await this.client.connect();
console.log('connected');
this.db = this.client.db(conf.db);
this.Users = new Users(this.db);
}
}
module.exports = new MongoBot();
Users.js
class User {
constructor(db) {
this.collection = db.collection('users');
}
async addUser(user) {
const newUser = await this.collection.insertOne(user);
return newUser;
}
}
module.exports = User;
app.js
const mongo = require('./mongo');
async function start() {
// other app startup stuff...
await mongo.init();
// other app startup stuff...
}
start();
someFile.js
const { Users } = require('./mongo');
async function someFunction(userInfo) {
const user = await Users.addUser(userInfo);
return user;
}
Here's how I do it with contemporary syntax, based on go-oleg's example. Mine is tested and functional.
I put some comments in the code.
./db/mongodb.js
const MongoClient = require('mongodb').MongoClient
const uri = 'mongodb://user:password#localhost:27017/dbName'
let _db
const connectDB = async (callback) => {
try {
MongoClient.connect(uri, (err, db) => {
_db = db
return callback(err)
})
} catch (e) {
throw e
}
}
const getDB = () => _db
const disconnectDB = () => _db.close()
module.exports = { connectDB, getDB, disconnectDB }
./index.js
// Load MongoDB utils
const MongoDB = require('./db/mongodb')
// Load queries & mutations
const Users = require('./users')
// Improve debugging
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at:', p, 'reason:', reason)
})
const seedUser = {
name: 'Bob Alice',
email: 'test#dev.null',
bonusSetting: true
}
// Connect to MongoDB and put server instantiation code inside
// because we start the connection first
MongoDB.connectDB(async (err) => {
if (err) throw err
// Load db & collections
const db = MongoDB.getDB()
const users = db.collection('users')
try {
// Run some sample operations
// and pass users collection into models
const newUser = await Users.createUser(users, seedUser)
const listUsers = await Users.getUsers(users)
const findUser = await Users.findUserById(users, newUser._id)
console.log('CREATE USER')
console.log(newUser)
console.log('GET ALL USERS')
console.log(listUsers)
console.log('FIND USER')
console.log(findUser)
} catch (e) {
throw e
}
const desired = true
if (desired) {
// Use disconnectDB for clean driver disconnect
MongoDB.disconnectDB()
process.exit(0)
}
// Server code anywhere above here inside connectDB()
})
./users/index.js
const ObjectID = require('mongodb').ObjectID
// Notice how the users collection is passed into the models
const createUser = async (users, user) => {
try {
const results = await users.insertOne(user)
return results.ops[0]
} catch (e) {
throw e
}
}
const getUsers = async (users) => {
try {
const results = await users.find().toArray()
return results
} catch (e) {
throw e
}
}
const findUserById = async (users, id) => {
try {
if (!ObjectID.isValid(id)) throw 'Invalid MongoDB ID.'
const results = await users.findOne(ObjectID(id))
return results
} catch (e) {
throw e
}
}
// Export garbage as methods on the Users object
module.exports = { createUser, getUsers, findUserById }
If you are using Express, then you can use mongo-express-req module that allows you to get db connection in request object.
Install
npm install --save mongo-express-req
server.js
var app = require('express')();
var mongoExpressReq = require('mongo-express-req');
app.use(mongoExpressReq('mongodb://localhost/test'));
routes/users.js
app.get('/', function (req, res, next) {
req.db // => Db object
});
Note: mongo-express-req is fork of not maintained express-mongo-db.
A tested solution based on the accepted answer:
mongodbutil.js:
var MongoClient = require( 'mongodb' ).MongoClient;
var _db;
module.exports = {
connectToServer: function( callback ) {
MongoClient.connect( "<connection string>", function( err, client ) {
_db = client.db("<database name>");
return callback( err );
} );
},
getDb: function() {
return _db;
}
};
app.js:
var createError = require('http-errors');
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
var logger = require('morgan');
var app = express();
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
var mongodbutil = require( './mongodbutil' );
mongodbutil.connectToServer( function( err ) {
//app goes online once this callback occurs
var indexRouter = require('./routes/index');
var usersRouter = require('./routes/users');
var companiesRouter = require('./routes/companies');
var activitiesRouter = require('./routes/activities');
var registerRouter = require('./routes/register');
app.use('/', indexRouter);
app.use('/users', usersRouter);
app.use('/companies', companiesRouter);
app.use('/activities', activitiesRouter);
app.use('/register', registerRouter);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404));
});
// error handler
app.use(function(err, req, res, next) {
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
res.status(err.status || 500);
res.render('error');
});
//end of calback
});
module.exports = app;
activities.js -- a route:
var express = require('express');
var router = express.Router();
var mongodbutil = require( '../mongodbutil' );
var db = mongodbutil.getDb();
router.get('/', (req, res, next) => {
db.collection('activities').find().toArray((err, results) => {
if (err) return console.log(err)
res.render('activities', {activities: results, title: "Activities"})
});
});
router.post('/', (req, res) => {
db.collection('activities').save(req.body, (err, result) => {
if (err) return console.log(err)
res.redirect('/activities')
})
});
module.exports = router;
Here is my setup in 2020:
./utils/database.js
const { MongoClient } = require('mongodb');
class Mongo {
constructor () {
this.client = new MongoClient("mongodb://127.0.0.1:27017/my-app", {
useNewUrlParser: true,
useUnifiedTopology: true
});
}
async main () {
await this.client.connect();
console.log('Connected to MongoDB');
this.db = this.client.db();
}
}
module.exports = new Mongo();
/app.js
const mongo = require('./utils/database');
const express = require('express');
const app = express();
const boot = async () => {
await mongo.main();
app.listen(3000);
};
boot();
go-oleg is basically right, but in these days you (probably) dont want use "mongodb" itself, rather use some framework, which will do a lot of "dirty work" for you.
For example, mongoose is one of the most common. This is what we have in our initial server.js file :
const mongoose = require('mongoose');
const options = {server: {socketOptions: {keepAlive: 1}}};
mongoose.connect(config.db, options);
This is everything what is needed to set it up. Now use this anywhere in your code
const mongoose = require('mongoose');
And you get that instance you set up with mongoose.connect
I´m late to the party, but hopefully this answer will help someone, this is a functional code:
db.js
const MongoClient = require("mongodb").MongoClient
const urlMongo = "mongodb://localhost:27017"
var db;
function connectToServer( callback ) {
MongoClient.connect(urlMongo, { useUnifiedTopology: true , useNewUrlParser: true }, function( err, client ) {
db = client.db('auth');
return callback( err );
})
}
function getDb() {
return db
}
module.exports = {connectToServer, getDb}
We export one function to connect to the mongo and another to get de instanceof the connection.
app.js
const express = require('express')
const app = express()
const mongo = require('./db.js');
mongo.connectToServer( function( err) {
if (err) console.log(err);
const auth = require('./modulos')
app.post('/login', (req, res) => { auth.login(req, res)})
app.listen(3000, function () { console.log('Corriendo en puerto 3000')})
});
We must do the require of the auth module after we initiallize the connection, otherwise the getDb function will return undefined.
module.js
const db = require('../db.js').getDb()
const usuariosCollection = db.collection('usuarios')
function login(req, res){
usuariosCollection.find({ 'username': 'Fran' }).toArray(function (err, doc) {
...
})
}
As this is tagged with Express, I thought I would mention that Express has a built in feature to share data between routes. There is an object called app.locals. We can attach properties to it and access it from inside our routes. You simply instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection can now be accessed within your routes as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require additional modules.
Initialize the connection as a promise:
const MongoClient = require('mongodb').MongoClient
const uri = 'mongodb://...'
const client = new MongoClient(uri)
const connection = client.connect() // initialized connection
And then call the connection whenever you wish you perform an action on the database:
// if I want to insert into the database...
const connect = connection
connect.then(() => {
const doc = { id: 3 }
const db = client.db('database_name')
const coll = db.collection('collection_name')
coll.insertOne(doc, (err, result) => {
if(err) throw err
})
})
Here's a suggestion using TypeScript and ES6 features and syntax:
db.ts
import { Db, MongoClient } from 'mongodb'
let client: MongoClient
let db: Db
const connectToDatabase = async () => {
client = new MongoClient('databaseURI')
await client.connect()
db = client.db('dbname')
}
export {
connectToDatabase,
client,
db,
}
index.ts
import express from 'express'
import { someRouter } from './routes/someRoute'
import { connectToDatabase } from './db'
connectToDatabase().then(() => {
const app = express()
app.use('/someRoute', someRouter)
const port = process.env.PORT || 5000
app.listen(port, () => {
console.log(`Server is listening on port ${port}`)
})
})
routes/someRoute.ts
import express from 'express'
import { db } from '../db'
const someRouter = express.Router()
someRouter.route('/')
.get(async (req, res) => {
const results = await db.collection('collectionName').find().toArray()
return res.send(results)
})
export {
someRouter,
}
we can create a dbconnection file like dbconnection.js
const MongoClient = require('mongodb').MongoClient
const mongo_url = process.env.MONGO_URL;
module.exports = {
connect: async function(callback) {
var connection;
await new Promise((resolve, reject) => {
MongoClient.connect(mongo_url, {
useNewUrlParser: true
}, (err, database) => {
if (err)
reject();
else {
connection = database;
resolve();
}
});
});
return connection;
}
};
and then use this file in the your app like
var connection = require('../dbconnection');
and then use like this inside your async function
db = await connection.connect();
hope this will work
I find this works well :)
mongoUtil.ts
import { MongoClient } from 'mongodb';
const uri =
'MONGOSTRING';
let connPoolPromise: any = null;
const mongoPoolPromise = () => {
if (connPoolPromise) return connPoolPromise;
connPoolPromise = new Promise((resolve, reject) => {
const conn = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
if (conn.isConnected()) {
return resolve(conn);
} else {
conn
.connect()
.then(() => {
return resolve(conn.db('DATABASENAME'));
})
.catch(err => {
console.log(err);
reject(err);
});
}
});
return connPoolPromise;
};
export = {
mongoPoolPromise,
};
anyFile.ts
const { mongoPoolPromise } = require('./mongoUtil');
async function getProducts() {
const db = await mongoPoolPromise();
const data = await db
.collection('myCollection')
.find({})
.toArray();
console.log(data);
return data;
}
export { getProducts };
I'm a bit late for this, but I'll add my solution too. It's a much noobier approach compared to the answers here.
Anyway if you are using MongoDB version 4.0 and Node.js 3.0 (or higher versions) you can use isConnected() function from the MongoClient.
const MongoClient = require('mongodb').MongoClient;
const uri = "<your connection url>";
const client = new MongoClient(uri, { useNewUrlParser: true });
if (client.isConnected()) {
execute();
} else {
client.connect().then(function () {
execute();
});
}
function execute() {
// Do anything here
// Ex: client.db("mydb").collection("mycol");
}
This worked fine for me. Hope it helps.
Based on accepted answers, I use a simple approach. But use this only if you want to use db inside function which will be executed after some time. For ex: In express route functions, it is the easiest approach you can take.
mongo.js
const MongoClient = require("mongodb").MongoClient
var db
const connectDb = (callback) => {
if (db) return callback()
MongoClient.connect( uri, {ops},
(err, database) => {
if (err) return console.log(err)
db = database.db("dbName")
console.log("Database Connected")
callback()
}
)
}
const getDb = (collectionToGet) => {
return db.collection(collectionToGet)
}
module.exports = {
connectDb,
getDb,
}
Now, in other files where you want the db object,
user.js
const { connectDb, getDb } = require('mongo.js')
var db // store db object in this object
connectDb(() => ( db = getDb("user") ))
app.get('/', (req, res) => {
// do something with req
db.insert({})
// do something with res
}
If you opt for using mongoose in your application edit your app.js file with the following snippet
app.js
const mongoose = require('mongoose');
mongoose.connect('mongodb://localhost:27017/Your_Data_Base_Name', {useNewUrlParser:true})
.then((res) => {
console.log(' ########### Connected to mongDB ###########');
})
.catch((err) => {
console.log('Error in connecting to mongoDb' + err);
});`
Next Step:
Define Models for your application require them and perform CRUD operation directly for example
blogSchema.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const blogSchema = new Schema({
_id : mongoose.Schema.Types.ObjectId,
title : {
type : 'String',
unique : true,
required : true
},
description : String,
comments : [{type : mongoose.Schema.Types.ObjectId, ref: 'Comment'}]
});
module.exports = mongoose.model('Blog', blogSchema);
Usage
createBlog.js
const Blog = require('../models/blogSchema');
exports.createBlog = (req, res, next) => {
const blog = new Blog({
_id : new mongoose.Types.ObjectId,
title : req.body.title,
description : req.body.description,
});
blog.save((err, blog) => {
if(err){
console.log('Server Error save fun failed');
res.status(500).json({
msg : "Error occured on server side",
err : err
})
}else{
//do something....
}
U don't need to connect to mogoDB always ....
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/';
var Pro1;
module.exports = {
DBConnection:async function()
{
Pro1 = new Promise(async function(resolve,reject){
MongoClient.connect(url, { useNewUrlParser: true },function(err, db) {
if (err) throw err;
resolve(db);
});
});
},
getDB:async function(Blockchain , Context)
{
bc = Blockchain;
contx = Context;
Pro1.then(function(_db)
{
var dbo = _db.db('dbname');
dbo.collection('collectionname').find().limit(1).skip(0).toArray(function(err,result) {
if (err) throw err;
console.log(result);
});
});
},
closeDB:async function()
{
Pro1.then(function(_db){
_db.close();
});
}
};
const express = require('express')
const server = express()
const mongoClient = require('./MongoDB.js').client
const port = 3000
;(async () => {
await mongoClient.connect()
server.listen(port, () => console.log(`Server is listening on port ${port}!`))
})().catch(console.error)
You can use the Singleton Design Pattern to achive cross file usage of your MongoDB connection.
Init.mjs
/* ################ Controller ################ */
import ctrlLib from '../../controller/lib.mjs';
/* ################ MongoDB ################ */
import mongodb from 'mongodb';
/* ################ Logs ################ */
import log from 'fancy-log';
import chalk from 'chalk';
/** Init MongoDB connection */
export class Init {
/**
* Check if its first time usage of this class.
If true set class instance to this that we always get same instance.
* Then get MongoDB details from config.yml and set as global.
* In the last step we return the class instance.
*/
constructor() {
if (Init.instance == null) Init.instance = this;
const config = ctrlLib.getConfig();
this.MongoURL = config.MongoDB.url;
this.MongoName = config.MongoDB.dbname;
({MongoClient: this.MongoClient} = mongodb);
return Init.instance;
}; // constructor(){
/** Connect to Database and return connection */
async connect() {
try {
const client = await this.MongoClient.connect(
this.MongoURL, {useNewUrlParser: true, useUnifiedTopology: true},
);
this.connection = {'db': client.db(this.MongoName), 'client': client};
return this.connection;
} // try {
catch (e) {
log( `${chalk.red.bold('❌ ERROR')} while try to connect to MongoDB DB
${chalk.white.bold('Error:\n')} ${e}` );
} // catch (e) {
}; // async connect() {
/**
* Return connection for cross file usage
* #return {object}
*/
getConnection() {return this.connection;};
}; // export class Init {
app.mjs
Make sure to 1x time create your MongoDB connection anywhere inside of your project that you can use it later in other files.
/* ################ Services ################ */
import {Init} from './Init.mjs';
(async ()=>{
await new Init().connect();
})().catch(e=>{log('app.mjs - Catch error: ' + e);});
anyOtherFile.mjs
/* ################ Services ################ */
import {Init} from './Init.mjs';
/** Subclass of Search which contains lib functions */
class Lib {
/**
* Find data by using search query and return result.
* #param {string} collection - Name of collection
* #param {object} query - Search query
*/
async findOne(collection, query) {
const connection = new Init().getConnection();
return await connection.db.collection(collection).findOne(query);
}; // async findOne() {
}; // class Lib {
Updated for 2022 MongoClient new updates
MongoUtil.js (For database connection and return database instance)
const { MongoClient } = require('mongodb');
const uri = "your database connection url";
var _db;
module.exports = {
connectToServer: function (callback) {
MongoClient.connect(uri, { useNewUrlParser: true }, function (err, client) {
_db = client.db('testdb');
return callback(err);
});
},
getDb: function () { //this returns database instance
return _db;
}
};
app.js (You can use in any routes or js by importing mongoUtil)
var mongoUtil = require('./mongoUtil');
mongoUtil.connectToServer(function (err, client) {
if (err) console.log(err);
console.log(`server is running`);
insertData(); //or do functions and db queries in any js
});
async function insertData() { //Functions should be async
var database = mongoUtil.getDb();
var movies = database.collection('movies');
const doc = {
title: "Movie title",
content: "Movie content",
}
const result = await movies.insertOne(doc);
console.log(`A document was inserted with the _id: ${result.insertedId}`);
}
I tried #go-oleg answer and it works pretty well. Inside getDb() , I make sure _db must be defined. And if not defined, I call the connectToServer() so that it will get defined again. After this I don't have to call connectToServer() in the app.js which makes my code clean.
let getDb = async() => {
if(_db) {
return _db
} else {
_db = await connectToServer()
return _db
}
}
And then, I simply call getDb() everywhere. Also, What I observed, It takes about 64ms on first call. After first call it takes about, 2-6ms everytime.
I answered here because i have less reputation to comment.
all after long effort my working by this operational method:
Please follow this link this is also good solution:
https://mrvautin.com/re-use-mongodb-database-connection-in-routes/
Folks, in 2022 there is no need for reconnection logic, the Node.js MongoDB driver handles this all for you (v4+).
You can simply connect as described in the official docs. Put this in a db.js file, then you can import client or db anywhere in your app:
import { MongoClient, ServerApiVersion } from 'mongodb'
const uri = `mongodb+srv://...`;
// Create a new MongoClient
export const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
export const db = client.db('your_db');
When making queries, use try/catch to capture potential connection errors.
try {
const res = await db.collection("testdata").insertOne({test: Math.random()});
console.log('Inserted', res);
} catch(e) {
console.error('MONGO ERROR', e);
}
AFAIK, the Mongo driver will keep retrying forever if the connection is lost.
Try it yourself: put the above code in a setInterval and turn off your internet connection for a while then turn it back on, Mongo will automatically reconnect, even after hours of downtime. It will even submit some queries that were made while the connection was down.
Updated for 2023
MongoDB Connection
const { MongoClient, ServerApiVersion } = require('mongodb');
const dbconfig = require('./config');
module.exports = {
client: client = new MongoClient(dbconfig.uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 }),
connectToDB: async () => {
try {
await client.connect()
console.log('connected!')
} catch (err) {
console.log('Err', err)
}
}
}
In your controller
const db = require('../config/mongodb.connection')
const hha_data = db.client.db('hha-sit').collection('hnh-data')
exports.addNewCustomer = async (req, res) => {
try {
await db.connectToDB()
let result = await hha_data.findOne({}, { account_id: 'MDB829001337' })
console.log('result', result)
} catch (err) {
console.error('Connection Error !', err)
} finally {
await db.client.close()
}
res.send('Hi')
}
Please feel free to revise it if you have any suggestions. :)
This approach is correct, and it can be improved in the following ways:
1.Wrap the MongoClient connect function inside a module and export it as a singleton object to be used across your application. This way, you can make sure only one connection is established to the MongoDB server and is reused across your modules.
2.Add error handling to your code to handle potential issues like a connection failure.
3.Use the MongoDB native driver's connection pooling feature instead of maintaining a single connection throughout the application's lifetime, as this can lead to resource exhaustion and poor performance.
This is an example of a improved implementation:
const MongoClient = require('mongodb').MongoClient;
let _db;
const connectToDb = async (url) => {
if (db) return db;
let client;
try {
client = await MongoClient.connect(url, {
useNewUrlParser: true,
useUnifiedTopology: true
});
_db = client.db();
} catch (err) {
console.error('Error connecting to MongoDB: ', err);
process.exit(1);
}
return _db;
};
module.exports = connectToDb;
const connectToDb = require('./db');
const userModule = async (app) => {
const db = await connectToDb('mongodb://localhost:27017/marankings');
return {
addUser: () => console.log('add user'),
getAll: () => 'all users'
};
};
module.exports = userModule;
const userModule = require('./userModule');
(async () => {
const users = await userModule();
console.log(users.getAll());
})();

Resources