NodeJS connection to MongoDB (on AWS C9) returns undefined db object - node.js

I'm learning how to code on Amazon's Cloud 9, and I'm trying to connect to a MongoDB database from NodeJS, but when I call the connect() function the db returned in the callback is undefined. However, I can see that the data exists when I run show dbs in the mongo shell, and the mongo server is running without issue, and the connect() function itself doesn't throw any errors.
index.js: Here is my index.js file containing the MongDB connection (this file executes when I run npm start in the terminal):
var MongoClient = require('mongodb').MongoClient;
// Connect to the db
MongoClient.connect("mongodb://localhost:27017/testdb", function (err, db) {
if (err){ console.log(err)}
else{
console.log("Connected!")
console.log(db.name)
}
db.close()
})
mongo shell confirms the database exists (sample is the name of the collection):
use testdb
switched to db testdb
db.sample.find()
{ "_id" : ObjectId("5aed5fc7a44ab7d8a4efce2f"), "name" : "Luckyfield" }
mongo server is running as it says "waiting for connections on port 27017", and whenever I run the index.js file this server records the connection opening and closing in the terminal:
connection accepted from 127.0.0.1:43820 #16 (1 connection now open)
2018-05-05T11:07:16.128+0000 I NETWORK [conn16] received client metadata from 127.0.0.1:43820 conn16: { driver: { name: "nodejs", version: "3.0.7" }, os: { type: "Linux", name: "linux", architecture: "x64", version: "4.9.91-40.57.amzn1.x86_64" }, platform: "Node.js v6.14.1, LE, mongodb-core: 3.0.7" }
2018-05-05T11:07:16.138+0000 I NETWORK [conn16] end connection 127.0.0.1:43820 (0 connections now open)
When I replace db.name to just db, the console shows a MongoClient object full of data, although I don't know what it is. I also tried inserting or querying documents but again, db seems to be undefined.
In summary, when I run npm start in a separate terminal, the index.js file executes and prints undefined in the console, even though the connection is properly established and the data actually exists. Thank you for any help!

Related

AWS lambda with mongoose to Atlas - MongoNetworkError

I am trying to connect MongoDB Atlas with mongoose and aws lambda but i get error MongoNetworkError
AWS Lambda
Mongoose
MongoDB Atlas
The same code was tested with serverless-offline and works perfect, the problem is when i deploy it to AWS Lambda.
This is the code snipet
'use strict';
const mongoose = require('mongoose');
const MongoClient = require('mongodb').MongoClient;
let dbuser = process.env.DB_USER;
let dbpass = process.env.DB_PASSWORD;
let opts = {
bufferCommands: false,
bufferMaxEntries: 0,
socketTimeoutMS: 2000000,
keepAlive: true,
reconnectTries: 30,
reconnectInterval: 500,
poolSize: 10,
ssl: true,
};
const uri = `mongodb+srv://${dbuser}:${dbpass}#carpoolingcluster0-bw91o.mongodb.net/awsmongotest?retryWrites=true&w=majority`;
// simple hello test
module.exports.hello = async (event, context, callback) => {
const response = {
body: JSON.stringify({message:'AWS Testing :: '+ `${dbuser} and ${dbpass}`}),
};
return response;
};
// connect using mongoose
module.exports.cn1 = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
let conn = await mongoose.createConnection(uri, opts);
const M = conn.models.Test || conn.model('Test', new mongoose.Schema({ name: String }));
const doc = await M.find();
const response = {
body: JSON.stringify({data:doc}),
};
return response;
};
// connect using mongodb
module.exports.cn2 = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
console.log("Connec to mongo using connectmongo ");
MongoClient.connect(uri).then(client => {
console.log("Success connect to mongo DB::::");
client.db('awsmongotest').collection('tests').find({}).toArray()
.then((result)=>{
let response = {
body: JSON.stringify({data:result}),
}
callback(null, response)
})
}).catch(err => {
console.log('=> an error occurred: ', err);
callback(err);
});
};
In the CloudWatch logs i see this error
{
"errorType": "MongoNetworkError",
"errorMessage": "failed to connect to server [carpoolingcluster0-shard-00-02-bw91o.mongodb.net:27017] on first connect [MongoNetworkError: connection 5 to carpoolingcluster0-shard-00-02-bw91o.mongodb.net:27017 closed]",
"stack": [
"MongoNetworkError: failed to connect to server [carpoolingcluster0-shard-00-02-bw91o.mongodb.net:27017] on first connect [MongoNetworkError: connection 5 to carpoolingcluster0-shard-00-02-bw91o.mongodb.net:27017 closed]",
" at Pool.<anonymous> (/var/task/node_modules/mongodb-core/lib/topologies/server.js:431:11)",
" at Pool.emit (events.js:189:13)",
" at connect (/var/task/node_modules/mongodb-core/lib/connection/pool.js:557:14)",
" at callback (/var/task/node_modules/mongodb-core/lib/connection/connect.js:109:5)",
" at runCommand (/var/task/node_modules/mongodb-core/lib/connection/connect.js:129:7)",
" at Connection.errorHandler (/var/task/node_modules/mongodb-core/lib/connection/connect.js:321:5)",
" at Object.onceWrapper (events.js:277:13)",
" at Connection.emit (events.js:189:13)",
" at TLSSocket.<anonymous> (/var/task/node_modules/mongodb-core/lib/connection/connection.js:350:12)",
" at Object.onceWrapper (events.js:277:13)",
" at TLSSocket.emit (events.js:189:13)",
" at _handle.close (net.js:597:12)",
" at TCP.done (_tls_wrap.js:388:7)"
],
"name": "MongoNetworkError",
"errorLabels": [
"TransientTransactionError"
]
}
Here is example on github to reproduce the error.
https://github.com/rollrodrig/error-aws-mongo-atlas
Just clone it, npm install, add your mongo atlas user, password and push to AWS.
Thanks.
Some extra steps are required to let lambda call external endpoint
https://aws.amazon.com/premiumsupport/knowledge-center/internet-access-lambda-function/
Your atlas should also whitelist IP address of the servers, from which lambda will be connected.
Another option to consider - VPC peering between your lambda VPC and Atlas.
I have some questions concerning your configuration:
Did you whitelist the AWS Lambda function's IP address in Atlas?
Several posts on SO indicate that users get a MongoNetworkError like this if the IP is not whitelisted. [1][4]
Did you read the best-practices guide by Atlas which states that mongodb connections should be initiated outside the lambda handler? [2][3]
Do you use a public lambda function or a lambda function inside a VPC? There is a substantial difference between them and the latter one is more error-prone since the VPC configuration (e.g. NAT) must be taken into account.
I was able to ping the instances in the Atlas cluster and was able to establish a connection on port 27017. However, when connecting via the mongo shell, I get the following error:
Unable to reach primary for set CarpoolingCluster0-shard-0.
Cannot reach any nodes for set CarpoolingCluster0-shard-0. Please check network connectivity and the status of the set. This has happened for 1 checks in a row.
When I use your GitHub sample from AWS lambda I get the exact same error message as described in the question.
As the error messages are not authentication-related but network-related, I assume that something is blocking the connection... Please double-check the three config questions above.
[1] What is a TransientTransactionError in Mongoose (or MongoDB)?
[2] https://docs.atlas.mongodb.com/best-practices-connecting-to-aws-lambda/
[3] https://blog.cloudboost.io/i-wish-i-knew-how-to-use-mongodb-connection-in-aws-lambda-f91cd2694ae5
[4] https://github.com/Automattic/mongoose/issues/5237
Well, thanks everyone. Finally i found the solution with the help of the mongo support.
Here is the solution for anyone who needs
When you create a Mongo Altas cluster they ask you add your local ip and it is automatically added to the WhiteList. You can see it in
Your cluster > Network Access > IP Whitelist there in the list you will see your IP. It mean that only people from YOUR network will be able to connect to your MongoAtlas. AWS Lambda is NOT in your network, soo aws lambda will never connect to your Mongo Atlas. That is why i get the error MongoNetworkError.
Fix
You need to add the AWS Lambda IP to the Mongo Atlas WhiteListIP
go to your Your cluster > Network Access > IP Whitelist
click in the button ADD IP ADDRESS
click on ALLOW ACCESS FROM ANYWHERE it will add the ip 0.0.0.0/0 to the list, click confirm
Test your call from AWS Lambda and i will work.
FINALLY !
What you did is tell to Mongo Atlas that ANYONE from ANYWHERE can connect to your mongo Atlas.
Of course this is not a good practice. What you need is add only the AWS Lambda IP, here is when VPC comes to scene.
Create a VPC is little complex and it have many steeps, there are good tutorials in the other comments.
But for sure this small guide tacle the MongoNetworkError

Error when attempting knex seed:run after successful knex migrate:latest for remote database

I'm running the following error when attempting to run knex seed:run against my remote postgres database (not localhost): Knex:Error Pool2 - Error: connect ECONNREFUSED 127.0.0.1:5432.
I am able to run knex migrate:latest successfully and can see that the tables are created on my postgres server, but when I try to seed I get that error. I've run the same migrations/seed file against my local configuration and it has worked without a problem, but when I attempt to seed my heroku postgres instance, it throws this error (I'm not running my local pg service when I'm seeding the new db, which is likely why it's throwing an error).
Any thoughts on why it is attempting to connect to localhost instead of the specified db? Sample of my file provided below:
var User = require("./models/User");
var Project = require("./models/Project");
exports.seed = function(knex, Promise) {
console.log(knex.client.config.connection); //This returns the correct db info.
return knex('user').del()
.then(function() {
return knex('project').del()
}).then(function() {
return new User({id: 1, firstName: "James", lastName: "Lee", phone: "123-456-2000", email: "test#test.com"}).save(null, {method: "insert"});
}).then(function() {
return new Project({id: 1, name: "Test"}).save(null, {method: "insert"});
})
};
This seems to have occurred due to how I was setting up the migrations / seeds. The configurations were actually pulling from two different places, one which had the correct SSL settings in place, the other without (seed file). Adding the correct settings in both places seemed to resolve the issue.

mongoose connection as a separate module in nodejs app

In my project I want to make a separate module to get mongoose connection,say connection.js ,
var mongoose = require('mongoose');
mongoose.connect('mongodb://host:port/db');
mongoose.connection.on('connected', function () {
console.log('Mongoose default connection open to localhost:27017' );
// If the connection throws an error
mongoose.connection.on('error',function (err) {
console.log('Mongoose default connection error: ' + err);
});
// When the connection is disconnected
mongoose.connection.on('disconnected', function () {
console.log('Mongoose default connection disconnected');
});
module.exports= mongoose;
which I can import using require in another file,say file1.js , as
var connect_to_mongoose = require('connection');
whenever necessary.
But I have came across the problem that since in nodejs IO is async then how can i make sure that the connection is successful and I can now use connect_to_mongoose variable for queries,insertions,deletions etc.
My second question is that after handling the above scenario how can I manage multiple connections for multiple databases. Bcoz as far as i know(for practical reasons) in mongoose one connection is dedicated to one DB only.
I think you should consider scenarios while working with mongodb and mongoose.
mongoose.connect opens a default connection as soon as app starts
you don't have to create every time a new connection if you are dealing with multiple tables / collections (whatever you call).
if you are dealing with multiple databases then you've separate your mongodb url like mongoose.connect(mongodb://localhost/db1) && mongoose.connect(mongodb://localhost/db2)
But above point no. 3 would give you an Warning : Trying to Close an open connection
To solve above issue just use the following :
var db = mongoose.createConnection(mongodb://localhost/db1)
And after your all tasks are completed close the connection
Cheers :)

Mongoose not connecting on Ubuntu Ubuntu 14.04

I've got a node app built on Hapi using MongoDB and mongoose. Locally, I can use the app without issue. I can connect to the db, add data, find it, etc.
I've created an Ubuntu 14.04 x64 droplet on Digital Ocean.
I can ssh into my droplet and verify that my db is there with the correct name. I'm using dokku-alt to deploy and I have linked the db name to the app using dokku's mongodb:link appName mydb
I was having issues once I deployed the app where it would hang and eventually timeout. After a lot of debugging and commenting out code I found that any time I try to hit mongo like this the app will hang:
var User = request.server.plugins.db.User;
User
.findOne({ id: request.auth.credentials.profile.raw.id })
.exec(function(err, user){
// do something
});
Without this, the app loads fine, albeit without data. So my thought is that mongoose is never properly connecting.
I'm using grunt-shell-spawn to run a script which checks if mongo is already running, if not it starts it up. I'm not 100% certain that this is needed on the droplet, but I was having issues locally where mongo was already running... script:
/startMongoIfNotRunning.sh
if pgrep mongod; then
echo running;
else
mongod --quiet --dbpath db/;
fi
exit 0;
/Gruntfile.js
shell: {
make_dir: {
command: 'mkdir -p db'
},
mongodb: {
command: './startMongoIfNotRunning.sh',
options: {
stdin: false,
}
}
},
And here's how I'm defining the database location:
/index.js
server.register([
{ register: require('./app/db'), options: { url: process.env.MONGODB_URL || 'mongodb://localhost:27017/mydb' } },
....
/app/db/index.js
var mongoose = require('mongoose');
var _ = require('lodash-node');
var models = require('require-all')(__dirname + '/models');
exports.register = function(plugin, options, next) {
mongoose.connect(options.url, function() {
next();
});
var db = mongoose.connection;
plugin.expose('connection', db);
_.forIn(models, function(value, key) {
plugin.expose(key, value);
});
};
exports.register.attributes = {
name: 'db'
};
My app is looking for db files in db/. Could it be that dokku's mongodb:link appName mydb linked it to the wrong location? Perhaps process.env.MONGODB_URL is not being set correctly? I really don't know where to go from here.
It turns out the solution to my problem was adding an entry to the hosts file of my droplet to point to the mongo db url:
127.0.0.1 mongodb.myurl.com
For some reason, linking the db to my app with Dokku didn't add this bit. I would have thought that it was automatic. The app container's host file did get a mongodb entry when i linked the db to the app.

Replica Set not working as expected

I have configured like below and my MongoDB don't need username or password:
mongo: {
module: 'sails-mongo',
url: "mongodb://127.0.0.1:27017/mydb",
replSet: {
servers: [
{
host: "127.0.0.1",
port : 27018
},
{
host: "127.0.0.1",
port : 27019
}
],
options: {connectWithNoPrimary:true, rs_name:"rs0"}
}
}
It's working fine, meaning I do not get a connection error and I am able to do querying. But when I brought down 127.0.0.1:27017, 127.0.0.1:27018 becomes PRIMARY as if I did a rs.status(). After this, I am no longer able to do any query and keep getting the following:
Error: no open connections
I am sure that I setup replica-set in my local machine correctly as I used MongoDB native driver to test the above mentioned scenario (bring down PRIMARY and SECONDARY take over as PRIMARY) and there is no problem.
var url = 'mongodb://127.0.0.1:27017,127.0.0.1:27018,127.0.0.1:27019/mydb?w=0&wtimeoutMS=5000&replicaSet=sg1&readPreference=secondary';
mongodb.MongoClient.connect(url, function(err, result) {
if(err || result === undefined || result === null) {
throw err;
} else {
db = result;
}
});
ok I found the answer. This message emitted because of session.js. I commented everything in the file and now it is working. The reason I guess is in session.js, it only pointing to a single host, which is the original PRIMARY. when you bring down this mongodb PRIMARY, session.js no longer can connect so it threw exception. I also tried the mongodb URL string in sessions.js by putting in also the hosts ip in the replica set (mongodb://127.0.0.1:27017,127.0.0.1:27018,127.0.0.1:27019/mydb) but failed to "sails lift". When put only single host then it is fine.
now if I need to store sessions info, I need to start another mongodb instance then session.js point to this new instant.

Resources