MongoDB change stream timeouts if database is down for some time - node.js

I am using mongoDB change stream in nodejs, everything works fine but if database is down has taken more than 10 5 seconds to get up change stream throws timeout error, here is my change stream watcher code
Service.prototype.watcher = function( db ){
let collection = db.collection('tokens');
let changeStream = collection.watch({ fullDocument: 'updateLookup' });
let resumeToken, newChangeStream;
changeStream.on('change', next => {
resumeToken = next._id;
console.log('data is ', JSON.stringify(next))
changeStream.close();
// console.log('resumeToken is ', JSON.stringify(resumeToken))
newChangeStream = collection.watch({ resumeAfter : resumeToken });
newChangeStream.on('change', next => {
console.log('insert called ', JSON.stringify( next ))
});
});
however on database end i have handled it, i.e if database is down or reconnected by using this code
this.db.on('reconnected', function () {
console.info('MongoDB reconnected!');
});
this.db.on('disconnected', function() {
console.warn('MongoDB disconnected!');
});
but i am not able to handle change stream watcher to stop it when database is down and start it again when database is reconnected or if there is any other better way to do it ?

What you want to do is to encapsulate the watch() call in a function. This function will then call itself on error, to rewatch the collection using a previously saved resume token. What is missing from the code you have is the error handler. For example:
const MongoClient = require('mongodb').MongoClient
const uri = 'mongodb://localhost:27017/test?replicaSet=replset'
var resume_token = null
run()
function watch_collection(con, db, coll) {
console.log(new Date() + ' watching: ' + coll)
con.db(db).collection(coll).watch({resumeAfter: resume_token})
.on('change', data => {
console.log(data)
resume_token = data._id
})
.on('error', err => {
console.log(new Date() + ' error: ' + err)
watch_collection(con, coll)
})
}
async function run() {
con = await MongoClient.connect(uri, {"useNewUrlParser": true})
watch_collection(con, 'test', 'test')
}
Note that watch_collection() contains the watch() method along with its handler. On change, it will print the change and store the resume token. On error, it will call itself to rewatch the collection again.

This is the solution i developed, just add the stream.on(error) function so it will not crash when there is error, as restart the stream on database reconnect, also save resume token in file for every event, this is helpful when application is crashed or stopped and you run again and during that time if x number of records were added, so on application restart just get last resume token from file and start watcher from there it will get all records inserted after that and hence no record will be missed, here is code below
var rsToken ;
try {
rsToken = await this.getResumetoken()
} catch (error) {
rsToken = null ;
}
if (!rsToken)
changeStream = collection.watch({ fullDocument: 'updateLookup' });
else
changeStream = collection.watch({ fullDocument: 'updateLookup', resumeAfter : rsToken });
changeStream.on('change', next => {
resumeToken = next._id;
THIS.saveTokenInfile(resumeToken)
cs_processor.process( next )
});
changeStream.on('error', err => {
console.log('changestream error ')
})

Related

Query was already executed, MoongoseError

I am making a transaction controller in NodeJS but when I send data through postman I get this error:
MongooseError: Query was already executed: Customer.updateOne({ name: 'Axel' }, { '$set': { balance: 98...
at model.Query._wrappedThunk [as _updateOne] (C:\Users\m4afy\Desktop\the spark foundation\Banking system\node_modules\mongoose\lib\helpers\query\wrapThunk.js:23:19)
at C:\Users\m4afy\Desktop\the spark foundation\Banking system\node_modules\kareem\index.js:494:25
at process.processTicksAndRejections (node:internal/process/task_queues:77:11) {
originalStack: 'Error\n' +
' at model.Query._wrappedThunk [as _updateOne] (C:\\Users\\m4afy\\Desktop\\the spark foundation\\Banking system\\node_modules\\mongoose\\lib\\helpers\\query\\wrapThunk.js:27:28)\n' +
' at C:\\Users\\m4afy\\Desktop\\the spark foundation\\Banking system\\node_modules\\kareem\\index.js:494:25\n' +
' at process.processTicksAndRejections (node:internal/process/task_queues:77:11)'
}
my Transaction code goes as follow:
const Transaction = require('../models/transaction')
const Customer = require('../models/customers')
const cashTransaction = async (req, res, next) => {
const {from, to, amount} = req.body
try {
let sender = await Customer.findOne({'name' : `${from}`})
let senderBalance = Number(sender.balance) - Number(amount)
await Customer.updateOne({name : from}, {balance : senderBalance}, err =>{
if (err){
console.log(err)
res.status(500).send('Could not update sender information')
} else {
console.log('Sender information updated');
}
})
let receiver = await Customer.findOne({name : to})
let receiverBalance = Number(receiver.balance) + Number(amount)
await Customer.updateOne({name : to}, {balance : receiverBalance}, err =>{
if (err){
console.log(err);
res.status(500).send('Could not update receive ver information')
} else{
console.log('receiver information updated');
}
})
const transaction = new Transaction({
from,
to,
amount
})
await transaction.save()
res.status(200).json({transaction : {transaction} , message : 'transaction saved'})
} catch (error) {
console.log(error);
res.status(500).send('An Error occured');
}
}
how can I update it multiple times?
It worked one time but then am getting this error, any help?
Using await and a callback simultaneously will result in the query executing twice.
The Model.updateOne method returns a query object. Passing a callback function causes the query to be immediately executed and then the callback is called. Await will likewise cause the query to be executed, and will return the result.
When you use both at the same time, both try to execute the query, but a specific instance of a query can only be executed once, hence the error.
You might try using await inside of a try/catch instead of a callback.
Each call to updateOne instantiates a new query object, so you should be able to do both updates

Mongodb native connections from nodejs return undefined databases list

I'm just starting to use Mongodb without mongoose (to get away from the schemas), and wanted to create a simple module with various exported functions to use in the rest of my app. I've pasted the code below.
The problem I'm having is that the databasesList.databases comes back as undefined, and I'm not sure why. There should be 2 databases on my cluster, and one collection in each database.
As a tangential question, I thought maybe I would check the collections instead (now commented out), but though I found this page (https://docs.mongodb.com/manual/reference/method/db.getCollectionNames/) the function getCollectionNames seems not to exist. Now I'm wondering if I'm using the wrong documentation and that is why my databases are coming back undefined.
const client = new MongoClient(uri)
const connection = client.connect( function (err, database) {
if (err) throw err;
else if (!database) console.log('Unknown error connecting to database');
else {
console.log('Connected to MongoDB database server');
}
});
module.exports = {
getDatabaseList: function() {
console.log('start ' + client);
databasesList = client.db().admin().listDatabases();
//collectionList = client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
//databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}```
your code is correct Just need to change few things.
module.exports = {
getDatabaseList: async function() {
console.log('start ' + client);
databasesList = await client.db().admin().listDatabases();
//collectionList = await client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}
You have to declare async function and use await also.
The async and await keywords enable asynchronous, promise-based behaviour to be written in a cleaner style, avoiding the need to explicitly configure promise chains.
You can use this modular approach to build your database access code:
index.js: Run your database application code, like list database names, collection names and read from a collection.
const connect = require('./database');
const dbFunctions = require('./dbFunctions');
const start = async function() {
const connection = await connect();
console.log('Connected...');
const dbNames = await dbFunctions.getDbNames(connection);
console.log(await dbNames.databases.map(e => e.name));
const colls = await dbFunctions.getCollNames(connection, 'test');
console.log(await colls.map(e => e.name));
console.log(await dbFunctions.getDocs(connection, 'test', 'test'));
};
start();
database.js:: Create a connection object. This connection is used for all your database access code. In general, a single connection creates a connection pool and this can be used throughout a small application
const { MongoClient } = require('mongodb');
const url = 'mongodb://localhost:27017/';
const opts = { useUnifiedTopology: true };
async function connect() {
console.log('Connecting to db server...');
return await MongoClient.connect(url, opts );
}
module.exports = connect;
dbFunctions.js:: Various functions to access database details, collection details and query a specific collection.
module.exports = {
// return list of database names
getDbNames: async function(conn) {
return await conn.db().admin().listDatabases( { nameOnly: true } );
},
// return collections list as an array for a given database
getCollNames: async function(conn, db) {
return await conn.db(db).listCollections().toArray();
},
// return documents as an array for a given database and collection
getDocs: async function(conn, db, coll) {
return await conn.db(db).collection(coll).find().toArray();
}
}

MongoError: pool destroyed when fetching all data without conditions

I am new to mongoDb, as I am trying to query from different collection and in order to do that, when I am fetching data from category collection I mean when I am running select * from collection it is throwing error, MongoError: pool destroyed.
As per my understanding it is because of some find({}) is creating a pool and that is being destroyed.
The code which I am using inside model is below,
const MongoClient = require('mongodb').MongoClient;
const dbConfig = require('../configurations/database.config.js');
export const getAllCategoriesApi = (req, res, next) => {
return new Promise((resolve, reject ) => {
let finalCategory = []
const client = new MongoClient(dbConfig.url, { useNewUrlParser: true });
client.connect(err => {
const collection = client.db(dbConfig.db).collection("categories");
debugger
if (err) throw err;
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {
if(err) return next(err);
finalCategory.push(data);
resolve(finalCategory);
// db.close();
});
client.close();
});
});
}
When my finding here is when I am using
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {})
When I am using find(query) it is returning data but with {} or $gte/gt it is throwing Pool error.
The code which I have written in controller is below,
import { getAllCategoriesListApi } from '../models/fetchAllCategory';
const redis = require("redis");
const client = redis.createClient(process.env.REDIS_PORT);
export const getAllCategoriesListData = (req, res, next, query) => {
// Try fetching the result from Redis first in case we have it cached
return client.get(`allstorescategory:${query}`, (err, result) => {
// If that key exist in Redis store
if (false) {
res.send(result)
} else {
// Key does not exist in Redis store
getAllCategoriesListApi(req, res, next).then( function ( data ) {
const responseJSON = data;
// Save the Wikipedia API response in Redis store
client.setex(`allstorescategory:${query}`, 3600, JSON.stringify({ source: 'Redis Cache', responseJSON }));
res.send(responseJSON)
}).catch(function (err) {
console.log(err)
})
}
});
}
Can any one tell me what mistake I am doing here. How I can fix pool issue.
Thanking you in advance.
I assume that toArray is asynchronous (i.e. it invokes the callback passed in as results become available, i.e. read from the network).
If this is true the client.close(); call is going to get executed prior to results having been read, hence likely yielding your error.
The close call needs to be done after you have finished iterating the results.
Separately from this, you should probably not be creating the client instance in the request handler like this. Client instances are expensive to create (they must talk to all of the servers in the deployment before they can actually perform queries) and generally should be created per running process rather than per request.

Using NodeJS promise to query MongoDB

I am building a chatbot using WATSON API which sends artist data give users' input. I am trying to use nodejs promise in order to query my DB and print out the data, since DB accessing is asynchronous.
So the artpromise function is a function which takes in the artist's name and query the db to save the result in the 'result' variable. Then I am trying to print out the result (in chatbot i actually print out the result to the user).
However I am not getting the result I want and keep getting a syntax error. Any help would be appreciated.
let arttistinfo;
function artpromise (artist) {
return new Promise(function(resolve, reject) {
const MongoClient = require("mongodb").MongoClient;
const url = 'mongodb://majac.co.kr:27017/artbot';
MongoClient.connect(url, function(err, db) {
if (err) throw err;
var dbo = db.db("artbot");
var query = {name: artist};
artistinfo = dbo.collection("artistdb").find(query)
.toArray(function(err, result) {
if (err) throw reject(err);
resolve(result);
});
db.close();
}
});
)};
let artist = "Jan Tarasin";
artpormise.then(function(artist) {
console.log(result);
});
I'd rewrite like so, I can see there were a small number of issues with your code, but this works for me now:
function artpromise (artist) {
return new Promise(function(resolve, reject) {
const MongoClient = require("mongodb").MongoClient;
const url = 'mongodb://majac.co.kr:27017/artbot';
MongoClient.connect(url, function(err, db) {
if (err) throw err;
var dbo = db.db("artbot");
var query = {name: artist};
artistinfo = dbo.collection("artistdb").find(query)
.toArray(function(err, result) {
if (err) throw reject(err);
resolve(result);
});
db.close();
});
});
};
let artist = "Jan Tarasin";
artpromise(artist).then(function(result) {
console.log(result);
});
I get the result below:
[{
_id: 5abdbc18423795deaaff0d8e,
nationality: 'Polish',
art_link: 'https: //media.mutualart.com/Images/2016_06/29/20/203606422/0532d043-71f6-47bc-945e-aeededd2d483_570.Jpeg',
years: '1926',
name: 'JanTarasin',
art_title: '"Falujace watki I",
2003r.'
}]
MongoDB Node driver is natively supporting promises from v3 on. So you may greatly simplify your code by using them.
Here is how i would approach to your problem;
function artpromise (artist) {
const MongoClient = require("mongodb").MongoClient;
return MongoClient.connect('mongodb://majac.co.kr:27017') // connect to mongo server
.then(mc => mc.db('artbot') // get mongoClient object and connect to artbot db
.collection('artistdb') // connect to the artistdb collection
.find({name: artist}) // perform your query
.toArray() // convert the results into an array
.then(as => (mc.close(), as))) // close db and return array from query result
.catch(e => console.log(e)); // catch errors
}
let artist = "Jan Tarasin";
artpromise(artist).then(as => as.forEach(a => console.log(a)));
[nodemon] starting `node maeror.js`
{ _id: 5abdbc18423795deaaff0d8e,
nationality: 'Polish',
art_link: 'https://media.mutualart.com/Images/2016_06/29/20/203606422/0532d043-71f6-47bc-945e-aeededd2d483_570.Jpeg',
years: '1926',
name: 'Jan Tarasin',
art_title: ' "Falujące wątki I", 2003 r. ' }
[nodemon] clean exit - waiting for changes before restart
It might be useful to remind that cursor.toArray() returns a promise as it has to iterate all the query results at once before consturcting the results array. Sometimes this operation might be time consuming yielding delayed server response. So you may instead use the cursor.forEach() method to process the documents returned from the query one by one like a stream. Which means processing the first document and then iterating to the next one. Here is another example to show how it might be implemented.
function artpromise (artist) {
const MongoClient = require("mongodb").MongoClient;
return MongoClient.connect('mongodb://majac.co.kr:27017') // connect to mongo server
.then(function(mc){
var cursor = mc.db('artbot') // get mongoClient object and connect to artbot db
.collection('artistdb') // connect to the artistdb collection
.find({name: artist}); // get the cursor
return [mc, cursor]; // return mongoClient and cursor objects
});
}
let artist = "Italian";
artpromise(artist).then(function([mc,docs]){
docs.forEach(doc => console.log(doc), // process a document and then iterate to the next
() => mc.close()); // close db session when all documents are processed
})
.catch(e => console.log(e)); // catch errors
[nodemon] starting `node maeror_v2.js`
{ _id: 5abdbc18423795deaafeff13,
nationality: 'Dutch',
art_link: 'https://media.mutualart.com/Images/2012_04/15/13/132154856/ddf14e9d-85b1-4b5a-b621-00583e013879_570.Jpeg',
years: '1839 - 1902',
name: 'Frederick Hendrik Kaemmerer',
art_title: ' A Beach Stroll ' }
[nodemon] clean exit - waiting for changes before restart

Firebase cloud-function multiple executions

I have a firebase (Google) cloud-function as follows
// Initialize the Auth0 client
var AuthenticationClient = require('auth0').AuthenticationClient;
var auth0 = new AuthenticationClient({
domain: 'familybank.auth0.com',
clientID: 'REDACTED'
});
function getAccountBalance(app) {
console.log('accessToken: ' + app.getUser().accessToken);
auth0.getProfile(app.getUser().accessToken, function (err, userInfo) {
if (err) {
console.error('Error getting userProfile from Auth0: ' + err);
}
console.log('getAccountBalance userInfo:' + userInfo)
let accountowner = app.getArgument(PARAM_ACCOUNT_OWNER);
// query firestore based on user
var transactions = db.collection('bank').doc(userInfo.email)
.db.collection('accounts').doc(accountowner)
.collection('transactions');
var accountbalance = transactions.get()
.then( snapshot => {
var workingbalance = 0
snapshot.forEach(doc => {
workingbalance = workingbalance + doc.data().amount;
});
app.tell(accountowner + " has a balance of $" + workingbalance)
})
.catch(err => {
console.log('Error getting transactions', err);
app.tell('I was unable to retrieve your balance at this time.')
});
});
}
actionMap.set(INTENT_ACCOUNT_BALANCE, getAccountBalance);
app.handleRequest(actionMap);
When this executes, I see the following logs
Notice that parts of the function are being executed multiple times, and the second execution is failing. If I close out the auth0.getProfile call after logging userInfo, then the function works, but obviously doesn't have userInfo.
Any idea why parts of this function are executing multiple times and why some calls would fail?
The userInfo is undefined at point (2) because there has been an error (reported on the line right beneath it, which was the previous logged message). Your error block does not leave the function, so it continues to run with an invalid userInfo object.
But that doesn't explain why the callback is getting called twice - once with a valid userInfo and once with an err. The documentation (although not the example) for AuthenticationClient.getProfile() indicates that it returns a Promise (or undefined - although it doesn't say why it might return undefined), so I am wondering if this ends up calling the callback twice.
Since it returns a promise, you can omit the callback function and just handle it with something like this:
function getAccountBalance(app) {
let accountowner = app.getArgument(PARAM_ACCOUNT_OWNER);
console.log('accessToken: ' + app.getUser().accessToken);
var accessToken = app.getUser().accessToken;
auth0.getProfile( accessToken )
.then( userInfo => {
console.log('getAccountBalance userInfo:' + userInfo)
// query firestore based on user
var transactions = db.collection('bank').doc(userInfo.email)
.db.collection('accounts').doc(accountowner)
.collection('transactions');
return transactions.get();
})
.then( snapshot => {
var workingbalance = 0
snapshot.forEach(doc => {
workingbalance = workingbalance + doc.data().amount;
});
app.tell(accountowner + " has a balance of $" + workingbalance)
})
.catch( err => {
console.error('Error:', err );
app.tell('I was unable to retrieve your balance at this time.')
})
});
}

Resources