Node.js + mongoose find freezes node when more than 100 results - node.js

I have a simple mongoose model on which I call find with limit max 100 it calls the done callback:
this.find({}).limit(100).exec(done);
The callback is never called If I modify this line into (or any higher number)
this.find({}).limit(101).exec(done);
There is no error anywhere, the database keeps working, but this node app freezes and must be restarted.
If I ssh into the server to connect to the same database and connect to mongo shell, on the same collection find({}) returns all ~700 collections in less than a sec.
When I cloned the same database to my local PC and run the app to connect to local database it worked, but the app freezes on the server if its connect to the database on the same server.
Any idea how to debug this one?
Edit1: Added model file:
Model file:
'use strict';
let mongoose = require('mongoose');
let Schema = mongoose.Schema;
let foodSchema = new Schema(
{
name: Object,
type: String,
description: Object,
price: Number,
priceBig: Number,
active: Boolean
},
{
collection: 'foods'
}
);
let model = mongoose.model('food', foodSchema);
model.getAllFoods = function (done) {
this.find({}, done);
};
model.getActiveFoods = function (done) {
this.find({active: true}, done);
};
model.getFoodById = function (id, done) {
this.findOne({_id: id}, done);
};
module.exports = model;
Usage:
foodModel.getAllFoods(function (err, docs) {
if (err) {
res.sendStatus(500);
return;
}
res.send(docs);
});
getActiveFoods works just fine (returns 96 docs)

After the tip from JohnnyK I updated Mongoose from 4.1.11 to 4.3.7 and that fixed the issue.

Related

Memory issue with mongo in node

I am facing memory issues with my node app. Took some heapdumps and saw a lot of mongo objects being held in the memory which is causing the node app to run out of memory.
I have the following setup for my app.
MongoDB 3.4.13
Mongoose 4.11.10 (tried 4.13.11 and 5.0.7 also)
Node 8.9.4
config.js
const clientUID = require('./env').clientUID;
module.exports = {
// Secret key for JWT signing and encryption
secret: 'mysecret',
// Database connection information
database: `mongodb://localhost:27017/app_${clientUID}`,
// Setting port for server
port: process.env.PORT || 3000,
}
I have several models in the app. Every model is defined in the following manner (just listing one of the models here):
models/card.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const CardSchema = new Schema({
name: {
type: String,
unique: true,
required: true
},
macId: {
type: String,
unique: true,
required: true
},
cardTypeId: {
type: mongoose.Schema.Types.ObjectId,
ref: 'CardType',
required: true
},
},
{
timestamps: true
});
module.exports = mongoose.model('Card', CardSchema);
In the app I require the model and perform some actions as follows:
const Card = require('./models/card');
...require other models
const config = require('./config');
mongoose.connect(config.database);
function fetchCardByMacId(macId) {
return Card.findOne({ macId }).lean().exec();
}
function updateTrackerByMacId(macId, x, y, nodeId) {
const data = {x, y, lastNodeId: nodeId};
fetchCardByMacId(macId)
.then(card => {
Tracker.findOneAndUpdate({ cardId: card._id }, data, { upsert: true, new: true }).exec((error, tracker) => {
if (error) {
return console.log('update tracker error', error);
}
TrackerHistory.findOne({ trackerId: tracker._id }).exec((err, trackerHistory) => {
if (err) {
return console.log('fetch trackerHistory error', err);
}
if (trackerHistory) {
trackerHistory.trackers.push({ x, y, timestamp: moment().format(), nodeId });
TrackerHistory.findOneAndUpdate({_id: trackerHistory._id},trackerHistory,(er, trackerHis) => {
if (er) {
return console.log('trackerHistory change update error', er);
}
})
} else {
const trackerHistoryNew = new TrackerHistory({
trackerId: tracker._id,
trackers: [{ x, y, timestamp: moment().format(), nodeId }]
});
trackerHistoryNew.save((er, trackerHis) => {
if (er) {
return console.log('trackerHistory create error', er);
}
});
}
});
});
}).catch(error => {
console.log('updateTrackerByMacId error', error);
});
}
Like this there are many other functions that read and update data.
Every 5 seconds I get new data that needs to be inserted into the db (not more than few 100kbs) and some of the old db data also gets updated based on this new data (seems like fairly straight forward db ops...read, manipulate and update back).
From the index.js I spawn 2 child processes that take the load of processing this new data and updating the db based on the business logic. When new data is received in the index.js using event listeners, I send it to child process 1 to insert/update the db. child process 2 runs on a 10s timer to read this updated data and then do some further updates to the db.
Running this on my local macbook pro is no issue (logging heap memory being used never goes above 40-50mb). When i load it on a DO Ubuntu 16.04 server (4GB /2 CPUs) I am facing memory issues. The child processes are exiting after hitting the memory threshold for the process (~1.5gb) which seems very odd to me.
I also tried to do this using docker containers and see the same results. on the mac it runs without issues but on the server it is eating up memory.
Generating heapdumps shows a lot of mongo objects in the heap.
I would like some help in understanding what I am doing wrong here and what is the issue with mongo eating up this much memory on the server.
So there was a big issue with the way the TrackerHistory collection was modelled. TrackerHistory had an array and every time a new object had to be added to the array the whole TrackerHistory object was being loaded in the memory and at the given frequency of updating the real time data the memory was bloating up faster than it was being gc'd.
Fixed it by removing the trackers array in a new collection and adding a foreign key reference to the TrackerHistory.
reference article that helped me identify this issue.
https://www.mongodb.com/blog/post/6-rules-of-thumb-for-mongodb-schema-design-part-1

Is it necessary to open MongoDB connection every time I want to work with the DB?

In the example I am working with is this code:
//lets require/import the mongodb native drivers.
var mongodb = require('mongodb');
//We need to work with "MongoClient" interface in order to connect to a mongodb server.
var MongoClient = mongodb.MongoClient;
// Connection URL. This is where your mongodb server is running.
var url = 'mongodb://localhost:27017/my_database_name';
// Use connect method to connect to the Server
MongoClient.connect(url, function (err, db) {
if (err) {
console.log('Unable to connect to the mongoDB server. Error:', err);
} else {
//HURRAY!! We are connected. :)
console.log('Connection established to', url);
// Get the documents collection
var collection = db.collection('users');
//Create some users
var user1 = {name: 'modulus admin', age: 42, roles: ['admin', 'moderator', 'user']};
var user2 = {name: 'modulus user', age: 22, roles: ['user']};
var user3 = {name: 'modulus super admin', age: 92, roles: ['super-admin', 'admin', 'moderator', 'user']};
// Insert some users
collection.insert([user1, user2, user3], function (err, result) {
if (err) {
console.log(err);
} else {
console.log('Inserted %d documents into the "users" collection. The documents inserted with "_id" are:', result.length, result);
}
//Close connection
db.close();
});
}
});
As you may see, he is doing an operation in the connect function. I would like to keep it modular and separate the connection from DB operations.
My suggestion would be to make a singleton on db variable and reuse that one. At least that's what I would do in Java to which I am used to.
However, I am not sure as in the example he hasn't suggested anything like that.
I would recommend against maintaining one connection if you want any kind of scalability.
There are a number of options for connection pooling, etc, but most folks who spend any time at all with Node and MongoDB end up moving to Mongoose at some point.
In addition to adding a nice schema layer, it offers connection abstraction so that you can default to a shared connection by calling mongoose.connect(), or you can create multiple connections or participate in connection pooling by calling mongoose.createConnection(). In both cases, you call it without a callback, and the mongoose machinery will defer subsequent calls to the module until after the connection is established, so that your code doesn't have to care.
Something like your use case might look like so:
// in your app.js or server.js file
var mongoose = require('mongoose');
mongoose.connect(config.db.url); // assuming you have some module that handles config variables
Then in ./models/user.js
const mongoose = require('mongoose'),
Schema = mongoose.Schema;
const UserSchema = new Schema({
name: String,
age: Number,
roles: [String]
});
mongoose.model('User',UserSchema);
finally, in lets say a seed function to create your initial batch of users:
const mongoose = require('mongoose'),
User = mongoose.model('User');
// create some users
var user1 = new User({name: 'modulus admin', age: 42, roles: ['admin', 'moderator', 'user']});
var user2 = new User({name: 'modulus user', age: 22, roles: ['user']});
user1.save(console.log);
user2.save(console.log);
I believe maintaining a single connection is the best as mentioned in another thread:
The primary comitter in node-mongodb-native says
You open do MongoClient.connect once when your app boots up and reuse the db object. It's not a singleton connection pool each .connect creates a new connection pool. So open it once an[d] reuse across all requests.
Say on server start initiate the mongo connection.
Server.js:
...
var db = require('./db');//require db.js
db.openMongoConnection(function(error)
{
if(error)
{
console.log(error);
console.log("cannot make the connection with database");
}
else
{
server.listen(7400);//say ur server listening on 7000 port
}
}
db.js
var db1;
var MongoClient = require('mongodb').MongoClient;
exports.openMongoConnection = function(callback)
{
MongoClient.connect(<YourUrl1>,function(err,dbInstance)
{
if(err)
{
callback(err);
}
else
{
db1 = dbInstance;
callback(null);
}
});
};
exports.getCollection = function(collectionName, callback){
dbInstance.collection(collectionName, function(err, collectionInstance){
if(err)
{
callback(err);
}
else
{
callback(null, collectionInstance)
}
});
}
Then you can call the getCollection to use at anytime by requiring dbInsance

Mongoosastic - { [Error: No Living connections] message: 'No Living connections' }

Im trying to use mongoosastic for searching purposes, but I keep getting 'No Living connections' error and mapping problem
Here's the code
var mongoose = require('mongoose');
var mongoosastic = require('mongoosastic');
var Schema = mongoose.Schema;
var JobSchema = Schema({
category: { type: Schema.Types.ObjectId, ref: 'Category', es_indexed:true},
title: { type: String, es_indexed:true },
});
JobSchema.plugin(mongoosastic);
module.exports = mongoose.model('Job', JobSchema);
routes.js
var Job = require('../models/job');
Job.createMapping(function(err, mapping) {
if (err) {
console.log('error creating mapping (you can safely ignore this)');
console.log(err);
} else {
console.log('mapping created!');
console.log(mapping);
}
});
app.post('/search', function(req, res, next) {
Job.search({query_string: {query: req.body.q}}, function(err, results) {
if (err) return next(err);
res.send(results);
});
});
I keep getting this error,
Can anyone with experience in using mongoosastic tell,me how do i fix this problem?
When adding the plugin to your JobSchema model, you need to pass a second parameter with how to connect to Elasticsearch:
JobSchema.plugin(mongoosastic, {
hosts: [
'localhost:9200'
]
});
You can also re-use an Elasticsearch client object if you have one ready.
var esClient = new elasticsearch.Client({host: 'localhost:9200'});
JobSchema.plugin(mongoosastic, {
esClient: esClient
})
I am also facing same issue but I solved now using below method
Its very simple : you have to install elastic search in your local or anywhere else
Download elastic search http://www.elastic.co/downloads/elasticsearch
Extract folder
go to folder from cmd or Treminal find bin folder and inside that folder run elastic search script
And in nodejs code by default it will link to localhost:9200 so no need to use
new elasticsearch.Client({host: 'localhost:9200'}) , but if your elastic search deloyed some where else then you have to use above client method
I think you have to create a client and pass it into the plugin, but use ip addresses that is what worked for me.
// http://127.0.0.1:9200 == http://localhost:9200
var esClient = new elasticsearch.Client({host: 'http://127.0.0.1:9200'});
JobSchema.plugin(mongoosastic, {
esClient: esClient
})
Note: if you were not previously using elasticsearch you might have to re-index your documents (I had to, but I may have done something wrong)
Referencing this answer: https://stackoverflow.com/a/30204512/1146562
from that answer you can pass host straight into the plugin and do not have to create the client.

node.js mongodb update error

I have a node.js website. I am using mongoose to connect with my mongodb. Adding new records works fine and find also works fine.
But when I update the record it throws the error below. I have a callback function but dont know whats wrong.
throw new Error("writeConcern requires callback")
^
Error: writeConcern requires callback
Below is my update code.
var newUser = new User();
newUser.update({ 'local.email' : emailID }, { 'local.resetkey': ResetHash }, { multi: false }, function (err, res) {
if (err) return handleError(err);
console.log('The raw response from Mongo was ', raw);
});
This is my schema...
var mongoose = require('mongoose');
var bcrypt = require('bcrypt-nodejs');
var crypto = require('crypto');
var safe = { w: "0" };
// define the schema for our user model
local : {
email : String,
password : String,
resetkey : String,
resetexpiry : String,
},
});
module.exports = mongoose.model('User', userSchema);
newUser is a document, but you are calling update as it is defined for the model and therefore getting a wrong argument in place of the callback
Try: User.update(... as in the mongoose API docs: Model.update(conditions, update, options, callback);
You show incomplete code for your schema.

Mongoose save callback doesn't fire

I'm new to mongoose and I'm having a hard time finding the issue within my code. I'm building a REST server using Sails.js and Mongoose. I have a node module (e.g. "sails-mongoose") for exporting mongoose, where I also connect to my database:
var mongoose = require('mongoose');
mongoose.connect('mongodb://#localhost:27017/fooria');
module.exports = mongoose;
And in my model.js:
var adapter = require('sails-mongoose');
var schema = new adapter.Schema({
firstname: {
type: String,
required: true,
trim: true
}
});
module.exports = {
schema: schema,
model: adapter.model('Collection', schema)
}
In my controller's create method I have:
create: function(req, res, next) {
var userData = {firstname: 'Test'};
var users = new Users.model(userData);
users.save(function(err, data){
if (err) return res.json(err, 400);
res.json(data, 201);
});
}
When running create method, the entry is saved to the Mongodb collection but the callback is never reached. Can someone please help me on this track, as I found similar questions but none helped me though. Thanks!
I suppose your are using Express. According Express docs you are calling res.json using incorrect parameters (wrong order).
Correct format:
res.json(code, data)
Example:
res.json(500, { error: 'message' })

Resources