I'm trying to build an application which at the moment simply console logs Mongo documents in the client browser. I understand the Mongo change streams fine on the server side and have the following code to watch for inserts into the collection and when a change occurs, the dataset is reloaded:
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
const changeStream = collection.watch();
changeStream.on('change', next => {
// If there is a change in the collection, reload the data.
reload();
});
});
function reload() {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
console.log(docs)
});
});
};
However, I'm struggling to replicate the same on the client side. So far what I have done is created an XHR request in a client side js file which looks like this:
$(window).on('load', () => {
function load_options() {
var data = null;
var xhr = new XMLHttpRequest();
xhr.withCredentials = true;
xhr.addEventListener("readystatechange", function () {
if (this.readyState === 4) {
if (this.responseText === "") {
} else {
data = $.parseJSON(this.responseText);
$.each(data, function (i, item) {
console.log(item)
});
}
}
});
xhr.open("GET", "/dashboard/load-options");
xhr.setRequestHeader("cache-control", "no-cache");
xhr.send(data);
};
load_options();
});
And I have changed the server side code to look like this:
router.get('/load-options', (req, res) => {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
const changeStream = collection.watch();
changeStream.on('change', next => {
// If there is a change in the collection, reload the data.
reload();
});
});
function reload() {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
res.send(docs);
});
});
};
});
The desired outcome from this is for every time I insert a new document into the collection, the console logs the entire collection again with the new changes. However, This works for the first insert and the console logs the collection, but after that I'm getting the following error:
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
I know this is because my server side code now has
res.send(docs)
So I'm trying to send the headers again every time after the first request. My issue is that I don't know how I should be sending the data over from the server to the client to prevent this error.
Can anyone guide me in the right direction?
Sorry, too long answer for comments.
It means that your response has been sent with res.send(docs) once done, you canot modify the response (it has been sent).
Some possible solutions are :
use a websocket
use a polling interval on /load-options
use a stream on the output to send data to the client from the mongo stream.
The last solution which is non well known would be :
const out = new Readable();
const reload = () => {
MongoClient.connect(MongoConnection.url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, client) => {
const db = client.db('test');
const collection = db.collection('options');
collection.find({}).toArray(function (err, docs) {
client.close();
out.push(docs);
});
});
};
changeStream.on('change', next => {
reload();
});
out._read = () => {};
res.type('application/json').send(out);
My advice would be also to avoid the mongo.connect each time a data is modified.
Related
I am trying to share the Mongo connection with other modules in my Node.js project. I keep getting either undefined or is not a function when attempting to use the exported client. I also had a question around detecting if the connection is in fact open before performing operations on the database.
It seems like using the app.locals would be the proper way to share the connection but I could not get that working either. Below is what I have at the moment. I've tried this many ways. Most of what I can find online seems to export the Mongo Node driver's method, not the connection itself. The idea is to connect once and never disconnect until the app shuts down.
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
async function connect () {
app.locals.dbConnected = false;
try {
await client.connect();
app.locals.dbConnected = true;
module.exports = client;
} catch (e) {
console.error(e);
}
};
then in another module do something like:
await client.db('syslogs').collection('production').insertOne(doc);
Is it possible to share the connection?
Could do something like below:
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
let __inst = null;
export default new Promise((resolve, reject) => {
if (__inst !== null) resolve(__inst);
// the open event is the key here
// like this we can handle error, close etc thru events as well
client.open((err, mongoInst) => {
if (err) reject(err);
__inst = mongoInst;
resolve(__inst);
});
});
Then in other module you can use the export client like you want.
Thanks.
I just got it working using app.locals.
index.js
const { MongoClient } = require("mongodb");
const client = new MongoClient(uri, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
(async () => {
app.locals.dbConnected = false;
try {
await client.connect();
console.log("Connected to DB");
app.locals.client = client;
app.listen(PORT, HOST, () => {
console.log(`Running on http://${HOST}:${PORT}`);
});
} catch (e) {
console.error(e);
}
})();
Then in my module:
async function index (req, res) {
try {
let db = req.app.locals.client.db("admin");
await db.command({ ping: 1 });
console.log("pinged admin database");
}catch(err) {
console.log(err);
}
}
This is my account.js file
accounts.mongoConnect = function (){
MongoClient.connect(MongoConfig.uri, { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if (error) {
throw error;
}
database = client.db(MongoConfig.dbName);
collection = database.collection(MongoConfig.collections[0]);
console.log("Connected to `" + MongoConfig.dbName + "`!");
//console.log(collection)
});
}
and this is my companies.js file
companies.mongoConnect = function (){
MongoClient.connect(MongoConfig.uri, { useNewUrlParser: true, useUnifiedTopology: true }, (error, client) => {
if (error) {
throw error;
}
database = client.db(MongoConfig.dbName);
console.log(MongoConfig.collections);
collection = database.collection(MongoConfig.collections[1]);
console.log("Companies.js Connected to `" + MongoConfig.dbName + "`!");
});
}
When i run my app.js there is console log from account.js only, there is no console.log from companies.js
There's nothing problem with your code. I have checked it look pretty fine from my side.
Have you done setting up the connection?
Should be like this, connected mongodb to your app node js.
httpsServer.listen(serverConfig.port, () => {
accounts.mongoConnect();
companies.mongoConnect();
});
Im trying to decouple my express server start from the mongodb connection process .
mongodb.connect(process.env.CONNECTIONSTRING, { useNewUrlParser: true, useUnifiedTopology: true }, function (err, client) {
if (err) {
throw new Error(err)
}
module.exports = client
const server = require("./server")
server.start(opts, t => {
console.log(`server is up 4000`)
})
})
so instead of this single file I would like to have two files one used for mongodb connection , and other for starting the server . when i did this I got error related to mongodb, I think because the server started even before the mongodb conection was established.
any idea on how to solve this
Wrap it in a promise and call it wherever you want
Create a file name db.js it whatever else you want and require in the file that you need it. Then wrap the callback in a promise and export it for usage outside the file. Example above.
function initMongo() {
mongodb.connect(process.env.CONNECTIONSTRING, { useNewUrlParser: true, useUnifiedTopology: true }, function (err, client) {
return new Promise((resolve, reject) => {
if (err) {
return reject(err);
}
return resolve(client)
})
})
}
module.exports = { initMongo };
Then in your init function, you could call
const server = require("./server");
const mongoDb = require("./db");
async init() {
let client;
try {
client = await mongoDb.initMongo()
} catch(e) {
// could not connect to db
}
server.start(opts, t => {
console.log(`server is up 4000`)
})
}
I'm trying to create a script which is going to automatically send data from API every day at specific time and store it in MongoDB collection. For that purpose I use NodeJS-Schedule.
I just started to exploring MongoDB and now I'm looking for a way to automatically receive data from API and store it in MongoDB.
I saw that there is MongoDB-Cron
This is part of the code that I use to receive a data from API.
var j = schedule.scheduleJob("*/55 20 * * *", function() {
request(
"GET",
"http://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY=API-KEY-HERE"
)
.then((r1) => {
var x1 = JSON.parse(r1.target.responseText);
var BTCdata = x1.data.find((d) => d.symbol === "BTC").quote.USD.volume_24h; // creating a variable to store a BTC request from API
console.log(BTCdata);
})
.catch((err) => {
console.log(err);
});
});
function request(method, url) {
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open(method, url);
xhr.onload = resolve;
xhr.onerror = reject;
xhr.send();
});
}
Here is the part to insert a data to MongoDB collection.
I a little bit don't understand how I can automate this insert function and can I make this automatisation to send data to mongodb every day at specific time?
var url = "mongodb+srv://name:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority";
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, function(err, db) {
if (err) throw err;
var dbo = db.db("Crypto");
var myobj = { Name: "BTC", Volume: "BTCdata" };
dbo.collection("Crypto-Values").insertOne(myobj, function(err, res) {
if (err) throw err;
console.log("1 document inserted");
db.close();
});
});
EDIT1:
Is the code supposed to be like this?
var MongoClient = require('mongodb').MongoClient;
var MongoCron = require('mongodb-cron');
const saveToDatabase = (BTCdata) => {
var url = "mongodb+srv://name:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority";
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, function(err, db) {
var j = schedule.scheduleJob("*/55 20 * * *", function() {
request(
"GET",
"http://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY=API-KEY-HERE"
)
.then((r1) => {
var x1 = JSON.parse(r1.target.responseText);
var BTCdata = x1.data.find((d) => d.symbol === "BTC").quote.USD.volume_24h; // creating a variable to store a BTC request from API
console.log(BTCdata);
// Saving to database
saveToDatabase(BTCdata);
})
.catch((err) => {
console.log(err);
});
});
});
};
I don't think you need to disturb yourself with automating the db insertion, you can include the insertion in the corn job itself. Perhaps, encapsulate the data persisting logic in a function:
const saveToDatabase = (BTCdata) => {
var url = "mongodb+srv://name:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority";
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, function(err, db) {
if (err) throw err;
var dbo = db.db("Crypto");
var myobj = { Name: "BTC", Volume: BTCdata };
dbo.collection("Crypto-Values").insertOne(myobj, function(err, res) {
if (err) throw err;
console.log("1 document inserted");
db.close();
});
});
}
You should call the function with the BTCData after the API call in the corn job. The program should be something like this:
const { MongoClient } = require('mongodb');
const schedule = require('node-schedule');
const saveToDatabase = function (BTCdata) {
const url = 'mongodb+srv://name:password#cluster0-1kunr.mongodb.net/<dbname>?retryWrites=true&w=majority';
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, (err, db) => {
if (err) throw err;
const dbo = db.db('Crypto');
const myobj = { Name: 'BTC', Volume: BTCdata };
dbo.collection('Crypto-Values').insertOne(myobj, (error, res) => {
if (error) throw error;
console.log('1 document inserted');
db.close();
});
});
};
function request(method, url) {
return new Promise(((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open(method, url);
xhr.onload = resolve;
xhr.onerror = reject;
xhr.send();
}));
}
const j = schedule.scheduleJob('*/55 20 * * *', () => {
request(
'GET',
'http://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY=API-KEY-HERE',
)
.then((r1) => {
const x1 = JSON.parse(r1.target.responseText);
const BTCdata = x1.data.find((d) => d.symbol === 'BTC').quote.USD.volume_24h; // creating a variable to store a BTC request from API
console.log(BTCdata);
// Saving to database
saveToDatabase(BTCdata);
})
.catch((err) => {
console.log(err);
});
});
Remember to catch errors where necessary
I am separating the connect function of MongoDB to a separate module, so that the mongoDB connection is reusable. The issue is, I could not get the client/DB variable outside the connect function. It shows undefined.
var MongoClient = require('mongodb').MongoClient;
var _client;
var mongoURL = "mongodb://localhost:27017/";
module.exports = {
connectToMongoServer: (callback) => {
MongoClient.connect(mongoURL,{ useNewUrlParser: true },function(err,client){
_client = client;
return callback(err);
});
},
getClient: () => {
return _client;
}
};
Within the connect function, the _client details contains the information, but if I return it using getClient, it shows undefined.
MongoDB - v3.6.5
Node - v9.9.0
I've made up a snippet which should work the same ad your code, and it works.
So I think the problem is how you are calling your function getClient(); are you sure you are calling it after it get connected?
var _client;
function someAsyncFunc(callback) {
setTimeout(() => callback(false, 'client'), 500);
}
const file = {
connectToMongoServer: (callback) => {
someAsyncFunc(function(err, client) {
_client = client;
return callback(err);
});
},
getClient: () => {
return _client;
}
};
console.log('display one :', file.getClient());
file.connectToMongoServer((err) => {
console.log('display error :', err);
console.log('display two :', file.getClient());
});