This is my account.js file
accounts.mongoConnect = function (){
MongoClient.connect(MongoConfig.uri, { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if (error) {
throw error;
}
database = client.db(MongoConfig.dbName);
collection = database.collection(MongoConfig.collections[0]);
console.log("Connected to `" + MongoConfig.dbName + "`!");
//console.log(collection)
});
}
and this is my companies.js file
companies.mongoConnect = function (){
MongoClient.connect(MongoConfig.uri, { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if (error) {
throw error;
}
database = client.db(MongoConfig.dbName);
console.log(MongoConfig.collections);
collection = database.collection(MongoConfig.collections[1]);
console.log("Companies.js Connected to `" + MongoConfig.dbName + "`!");
});
}
When i run my app.js there is console log from account.js only, there is no console.log from companies.js
There's nothing problem with your code. I have checked it look pretty fine from my side.
Have you done setting up the connection?
Should be like this, connected mongodb to your app node js.
httpsServer.listen(serverConfig.port, () => {
accounts.mongoConnect();
companies.mongoConnect();
});
Related
I am trying to share the Mongo connection with other modules in my Node.js project. I keep getting either undefined or is not a function when attempting to use the exported client. I also had a question around detecting if the connection is in fact open before performing operations on the database.
It seems like using the app.locals would be the proper way to share the connection but I could not get that working either. Below is what I have at the moment. I've tried this many ways. Most of what I can find online seems to export the Mongo Node driver's method, not the connection itself. The idea is to connect once and never disconnect until the app shuts down.
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
async function connect () {
app.locals.dbConnected = false;
try {
await client.connect();
app.locals.dbConnected = true;
module.exports = client;
} catch (e) {
console.error(e);
}
};
then in another module do something like:
await client.db('syslogs').collection('production').insertOne(doc);
Is it possible to share the connection?
Could do something like below:
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
let __inst = null;
export default new Promise((resolve, reject) => {
if (__inst !== null) resolve(__inst);
// the open event is the key here
// like this we can handle error, close etc thru events as well
client.open((err, mongoInst) => {
if (err) reject(err);
__inst = mongoInst;
resolve(__inst);
});
});
Then in other module you can use the export client like you want.
Thanks.
I just got it working using app.locals.
index.js
const { MongoClient } = require("mongodb");
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
(async () => {
app.locals.dbConnected = false;
try {
await client.connect();
console.log("Connected to DB");
app.locals.client = client;
app.listen(PORT, HOST, () => {
console.log(`Running on http://${HOST}:${PORT}`);
});
} catch (e) {
console.error(e);
}
})();
Then in my module:
async function index (req, res) {
try {
let db = req.app.locals.client.db("admin");
await db.command({ ping: 1 });
console.log("pinged admin database");
}catch(err) {
console.log(err);
}
}
Im trying to decouple my express server start from the mongodb connection process .
mongodb.connect(process.env.CONNECTIONSTRING, { useNewUrlParser: true, useUnifiedTopology: true }, function (err, client) {
if (err) {
throw new Error(err)
}
module.exports = client
const server = require("./server")
server.start(opts, t => {
console.log(`server is up 4000`)
})
})
so instead of this single file I would like to have two files one used for mongodb connection , and other for starting the server . when i did this I got error related to mongodb, I think because the server started even before the mongodb conection was established.
any idea on how to solve this
Wrap it in a promise and call it wherever you want
Create a file name db.js it whatever else you want and require in the file that you need it. Then wrap the callback in a promise and export it for usage outside the file. Example above.
function initMongo() {
mongodb.connect(process.env.CONNECTIONSTRING, { useNewUrlParser: true, useUnifiedTopology: true }, function (err, client) {
return new Promise((resolve, reject) => {
if (err) {
return reject(err);
}
return resolve(client)
})
})
}
module.exports = { initMongo };
Then in your init function, you could call
const server = require("./server");
const mongoDb = require("./db");
async init() {
let client;
try {
client = await mongoDb.initMongo()
} catch(e) {
// could not connect to db
}
server.start(opts, t => {
console.log(`server is up 4000`)
})
}
I'm trying to build an application which at the moment simply console logs Mongo documents in the client browser. I understand the Mongo change streams fine on the server side and have the following code to watch for inserts into the collection and when a change occurs, the dataset is reloaded:
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
const changeStream = collection.watch();
changeStream.on('change', next => {
// If there is a change in the collection, reload the data.
reload();
});
});
function reload() {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
console.log(docs)
});
});
};
However, I'm struggling to replicate the same on the client side. So far what I have done is created an XHR request in a client side js file which looks like this:
$(window).on('load', () => {
function load_options() {
var data = null;
var xhr = new XMLHttpRequest();
xhr.withCredentials = true;
xhr.addEventListener("readystatechange", function () {
if (this.readyState === 4) {
if (this.responseText === "") {
} else {
data = $.parseJSON(this.responseText);
$.each(data, function (i, item) {
console.log(item)
});
}
}
});
xhr.open("GET", "/dashboard/load-options");
xhr.setRequestHeader("cache-control", "no-cache");
xhr.send(data);
};
load_options();
});
And I have changed the server side code to look like this:
router.get('/load-options', (req, res) => {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
const changeStream = collection.watch();
changeStream.on('change', next => {
// If there is a change in the collection, reload the data.
reload();
});
});
function reload() {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
res.send(docs);
});
});
};
});
The desired outcome from this is for every time I insert a new document into the collection, the console logs the entire collection again with the new changes. However, This works for the first insert and the console logs the collection, but after that I'm getting the following error:
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
I know this is because my server side code now has
res.send(docs)
So I'm trying to send the headers again every time after the first request. My issue is that I don't know how I should be sending the data over from the server to the client to prevent this error.
Can anyone guide me in the right direction?
Sorry, too long answer for comments.
It means that your response has been sent with res.send(docs) once done, you canot modify the response (it has been sent).
Some possible solutions are :
use a websocket
use a polling interval on /load-options
use a stream on the output to send data to the client from the mongo stream.
The last solution which is non well known would be :
const out = new Readable();
const reload = () => {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
out.push(docs);
});
});
};
changeStream.on('change', next => {
reload();
});
out._read = () => {};
res.type('application/json').send(out);
My advice would be also to avoid the mongo.connect each time a data is modified.
When am trying to save a document in a collection or find the list of collections in mongodb running remotely doesnt work but works when am connecting to mongo db locally. save doesnt show if it throwed an error or it was successful.
Your help is appreciated.
below is my code
var mongoose = require('mongoose');
var dbName = 'OST';
var connectionString = 'mongodb://vm-1b98-f53f.nam.nsroot.net:32017/OST?ssl=true&sslverifycertificate=false';
mongoose.connect(connectionString);
mongoose.connection.on('error', function (err) {
console.log('Mongoose default connection error: ' + err);
});
mongoose.connection.on('connected', function () {
console.log('Mongoose default connection open to ' + connectionString);
var Region = require('./Region.js');
var r = new Region({ NodeID: 8687, RegionName: 'EMEA' });
r.save(function (err, regionObj) {
if (err) {
console.log('error');
throw err;
} else {
console.log('saved successfully:', regionObj);
}
});
mongoose.connection.db.collectionNames(function (error, names) {
if (error) {
console.log(error);
} else {
names.map(function (cname) {
console.log(cname.name);
});
}
});
});
process.on('SIGINT', function () {
mongoose.connection.close(function () {
process.exit(0);
});
});
The following code tries to load from a csv file to a mongodb instance running on localhost.
Problem - It loads a different number of documents on every run (always less than total number of records in csv).
var csv = require('csv');
var server = new Server('localhost', 27017, {auto_reconnect: true, poolSize: 1});
var db = new Db('test', server);
db.open(function(err, db, onemore) {
if(!err) {
//Database connection is established.
db.collection('teststocks', function(err, collection) {
if(!err) {
// Stocks collection is connected, open the file and insert the doc
console.log("Trying to load from " + process.argv[2]);
csv()
.fromPath(process.argv[2], {
columns: true
})
.on('data', function(data, index) {
//data.stock = process.argv[2].substring(process.argv[2].lastIndexOf('/') + 1, process.argv[2].lastIndexOf('.'));
collection.insert(data, {safe: true}, function(error, collection){
if ( error ) { console.log("Error inserting record : " + error); }
});
console.log("Inserted data for " + index);
})
.on('error', function(error) {
db.close();
console.log("Error: " + error);
})
.on('end', function(count) {
console.log("Finished all writing.");
db.close();
});
}
});
}
});
P.S: I am able to load the data using the mongoimport utility, but being a newcomer to node.js and mongodb; I'd like to understand the mistake I am making in the code above.
It's likely because you're closing the database connection after the csv() finishes reading. But, as collection.insert is asynchronous, there's no guarantee each call has finished before the csv is done and db.close() is called.
One option is to collect the csv data into an Array, then insert all of them at once:
var docs = [];
csv()
// ...
.on('data', function (data, index) {
docs.push(data); // or possibly: docs[index] = data;
})
// ...
.on('end', function () {
console.log("Finished reading CSV.");
collection.insert(docs, { safe: true }, function (error, inserted) {
console.log("Finished all writing.");
db.close();
});
});