i am connecting mongodb using the mongodb client in my app.
'app.js' file.
var MongoClient = require('mongodb').MongoClient
MongoClient.connect('mongodb://localhost:27017/animals', function (err, db) {
if (err) throw err
// db.collection('mammals').find().toArray(function (err, result) {
// if (err) throw err
// console.log(result)
// })
})
the issue is:
(node:16348) DeprecationWarning: current URL string parser is
deprecated, and will be removed in a future version. To use the new
parser, pass option { useNewUrlParser: true } to MongoClient.connect.
/home/amarjeet/Desktop/node2/node_modules/mongodb/lib/operations/mongo_client_ops.js:474
throw err;
^
MongoNetworkError: failed to connect to server [localhost:27017] on
first connect [MongoNetworkError: connect ECONNREFUSED 127.0.0.1:27017]
at Pool.<anonymous> (/home/amarjeet/Desktop/node2/node_modules/mongodb-
core/lib/topologies/server.js:564:11)
at Pool.emit (events.js:188:13)
at Connection.<anonymous>
(/home/amarjeet/Desktop/node2/node_modules/mongodb-core/lib/connection/pool.js:317:12)
at Object.onceWrapper (events.js:276:13)
at Connection.emit (events.js:188:13)
at Socket.<anonymous> (/home/ amarjeet/Desktop/node2/node_modules/mongodb-core/lib/connection/connection.js:246:50)
at Object.onceWrapper (events.js:276:13)
at Socket.emit (events.js:188:13)
at emitErrorNT (internal/streams/destroy.js:82:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:50:3)
[nodemon] app crashed - waiting for file changes before starting...
so this is the issue is shown on terminal and i have no idea how to fix it!
just add {useNewUrlParser: true } in connection
var MongoClient = require('mongodb').MongoClient
MongoClient.connect('mongodb://localhost:27017/animals', {useNewUrlParser: true }, function (err, db) {
if (err) throw err
// db.collection('mammals').find().toArray(function (err, result) {
// if (err) throw err
// console.log(result)
// })
})
You have to install MongoDB database server first in your system and start it.
if already installed:
check whether server is in start state. and Try connecting with mongo shell
and if server is also in start state:
than just put {useNewUrlParser: true } as mentioned by the #Vaghani Janak
Related
When I try to connect to my MongoDB that requires SSL, my NodeJs app crashes on the following method:
conn = await mongoose.connect(process.env.DB_HOST, {
tlsCAFile: __dirname + '/ca-certificate.crt',
useNewUrlParser: true,
useUnifiedTopology: true
})
and I get the following error in stderr.log:
events.js:377
throw er; // Unhandled 'error' event
^
Error: read EINVAL
at Pipe.onStreamRead (internal/stream_base_commons.js:209:20)
Emitted 'error' event on Socket instance at:
at emitErrorNT (internal/streams/destroy.js:106:8)
at emitErrorCloseNT (internal/streams/destroy.js:74:3)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
errno: -22,
code: 'EINVAL',
syscall: 'read'
}
The interesting this is that this works just fine on my local windows machine, but crashes when deployed to A2Hosting shared hosting.
Also I am able to connect successfully (even on A2hosting) when connecting without mongoose like so:
const client = new MongoClient(uri);
try {
await client.connect();
const db = client.db('egomenu');
console.log('connected successfully');
} finally {
await client.close();
}
I am using mongoose: ^6.3.1 and node: 14.20.1 on A2hosting.
I believe that the error is generated when trying to read the .crt file during connection; however cannot figure out what is causing it.
Any help would be greatly appreciated :)
I am using a postgres database for my express web server.
I am using the 'pg' library to execute queries on this database.
Here is my connection method :
const db = new Client({
user: 'xxx',
host: 'xxx',
database: 'xxx',
password: 'xxx',
port: xxx,
})
db.connect(err => {
if (err) {
console.error('connection error', err.stack)
} else {
console.log('connected')
}
Then to execute a request I do this:
db.query(MY_REQUEST, function (err, data) {
if (err) throw err;
res.render('hello/world', {
title: 'Hello',
data: data.rows
});
});`
It all works perfectly. But after several minutes without using my website, my connection to the db times out, and I get the following error:
node:events:355
throw er; // Unhandled 'error' event
^
Error: Connection terminated unexpectedly
at Connection.<anonymous> (/usr/src/app/node_modules/pg/lib/client.js:132:73)
at Object.onceWrapper (node:events:484:28)
at Connection.emit (node:events:378:20)
at Socket.<anonymous> (/usr/src/app/node_modules/pg/lib/connection.js:58:12)
at Socket.emit (node:events:378:20)
at TCP.<anonymous> (node:net:665:12)
Emitted 'error' event on Client instance at:
at Client._handleErrorEvent (/usr/src/app/node_modules/pg/lib/client.js:319:10)
at Connection.<anonymous> (/usr/src/app/node_modules/pg/lib/client.js:149:16)
at Object.onceWrapper (node:events:484:28)
[... lines matching original stack trace ...]
at TCP.<anonymous> (node:net:665:12)
How could I do to reconnect automatically when the connection is cut or when a request fails?
You should attach an error-handler in order to prevent the unhandled error crashing your app. It's as simple as:
db.on('error', e => {
console.error('DB error', e);
});
As to why the error happens we need more details, looks like it could be a connection reset due to idle-timeout?
You can create a function to control if you're connected to database or not, before you continue with your main function.
Create a function for controlling database connection status, reconnecting etc. and before you run a database related function, first start that middle function and wait for result, after that you can continue using database again.
If you want(which should be prefered way mostly), create that middle function as an async function and return a promise, when using that function wait for that function.
I was forced to move DB clusters so I have a new connection string and no actual code changes in the application were made.
When I try to start the application I get the below error message using the following connection string(s), I've included my old one and new one. This cluster is running on Mongo Atlas as a M10 cluster.
Old
mongodb+srv://<myusername>:<mypassword>#production-m0-<account ID?>.mongodb.net
New
mongodb+srv://<myusername>:<mypassword>#production.<account ID?>.mongodb.net
Error message/log
/opt/node_app/node_modules/mongodb/lib/topologies/replset.js:346
throw err;
^
MongoError: database names cannot contain the character '/'
at Function.create (/opt/node_app/node_modules/mongodb-core/lib/error.js:43:12)
at validateDatabaseName (/opt/node_app/node_modules/mongodb/lib/operations/db_ops.js:728:24)
at new Db (/opt/node_app/node_modules/mongodb/lib/db.js:182:3)
at MongoClient.db (/opt/node_app/node_modules/mongodb/lib/mongo_client.js:237:14)
at /opt/node_app/node_modules/mongoose/lib/connection.js:564:62
at /opt/node_app/node_modules/mongodb/lib/utils.js:410:17
at executeCallback (/opt/node_app/node_modules/mongodb/lib/utils.js:402:9)
at /opt/node_app/node_modules/mongodb/lib/operations/mongo_client_ops.js:286:5
at connectCallback (/opt/node_app/node_modules/mongodb/lib/operations/mongo_client_ops.js:265:5)
at /opt/node_app/node_modules/mongodb/lib/operations/mongo_client_ops.js:417:5
at ReplSet.connectHandler (/opt/node_app/node_modules/mongodb/lib/topologies/replset.js:343:9)
at Object.onceWrapper (events.js:300:26)
at ReplSet.emit (events.js:210:5)
at /opt/node_app/node_modules/mongodb-core/lib/topologies/replset.js:786:18
at processTicksAndRejections (internal/process/task_queues.js:75:11) {
driver: true,
name: 'MongoError',
[Symbol(mongoErrorContextSymbol)]: {}
}
Code used in application
const dbString =`${config.db}/mydb?retryWrites=true&w=majority`
mongoose.connect(dbString, { autoReconnect: true, useNewUrlParser: true }, (err) => {
if (!err) console.log('MongoDB has connected successfully.')
});
EDIT as requested combined UrI
mongodb+srv://<myusername>:<mypassword>#production.<account ID?>.mongodb.net/mydb?retryWrites=true&w=majority
UPDATE/SOLVED ... the problem was that pgpass plugin was looking for $HOME/.pgpass, and $HOME wasn't defined on lambda, causing failure of join(undefined, '.pgpass'). Not really worth crediting myself with an answer for that ....
I have the following test code in a lambda handler (compiled from typescript):
const db = new Pool()
const {rows} = await db.query('select 1 as x')
console.log('rows', JSON.stringify(rows))
The connection is created via PGHOST PGPORT PGUSER PGDATABASE in the environment. I know the configuration is "almost right" because the first time I tried it I got:
{"errorMessage":"error: no pg_hba.conf entry for host \"10.1.1.249\",
user \"foo\", database \"bar\", SSL off\n at
Connection.parseE (/var/task/handler.js:9209:11)\n at
Connection.parseMessage (/var/task/handler.js:9034:19)\n at
Socket. (/var/task/handler.js:8777:22)\n at emitOne
(events.js:96:13)\n at Socket.emit (events.js:188:7)\n at
readableAddChunk (_stream_readable.js:176:18)\n at
Socket.Readable.push (_stream_readable.js:134:10)\n at TCP.onread
(net.js:547:20)"}
That seemed hopeful. I changed the postgres config to allow from the subnet, and tried again. However, now I get:
2018-02-06 18:14:21.183 (-05:00) 76761ca5-0b93-11e8-8783-a74d098c9f4a select
2018-02-06 18:14:21.202 (-05:00) 76761ca5-0b93-11e8-8783-a74d098c9f4a TypeError: Path must be a string. Received undefined
at assertPath (path.js:7:11)
at Object.join (path.js:1211:7)
at Object.module.exports.getFileName (/var/task/handler.js:32434:16)
at module.exports (/var/task/handler.js:32355:23)
at Connection.<anonymous> (/var/task/handler.js:31255:9)
at emitOne (events.js:96:13)
at Connection.emit (events.js:188:7)
at Socket.<anonymous> (/var/task/handler.js:8781:12)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
END RequestId: 76761ca5-0b93-11e8-8783-a74d098c9f4a
What is going on? It seemed as if it had got to the database and connected the first time... Now it fails on the query. Is there some option I should be using?
First, you are not calling pool.connect to acquire a client from the pool.
Second, the current Node.js runtime 6.10 on AWS Lambda does not support async functions. (Unless you transpile your code down to ES5 using Babel directly or some other boilerplate that uses Babel such as es2017-lambda-boilerplate)
Having said that, you'll have to use either callbacks or promises (then/catch). For example:
const {Pool} = require("pg");
const db = new Pool({
host: "localhost",
user: "database-user",
// ... rest of config options
});
db.connect((err, client, release) => {
if (err) {
console.error("Error acquiring client.", err.stack);
} else {
client.query("SELECT 1 AS x", (err, result) => {
release();
if (err) {
console.error("Error executing query.", err.stack);
} else {
console.log(result.rows);
}
})
}
})
I am trying to read from a slave node but I get the following error.
MongoError: not master
at Function.MongoError.create (node_modules/mongodb-core/lib/error.js:31:11)
at node_modules/mongodb-core/lib/topologies/server.js:777:66
at Callbacks.emit (node_modules/mongodb-core/lib/topologies/server.js:95:3)
at null.messageHandler (node_modules/mongodb-core/lib/topologies/server.js:249:23)
at Socket.<anonymous> (node_modules/mongodb-core/lib/connection/connection.js:265:22)
at emitOne (events.js:78:13)
at Socket.emit (events.js:170:7)
at readableAddChunk (_stream_readable.js:147:16)
at Socket.Readable.push (_stream_readable.js:111:10)
at TCP.onread (net.js:524:20)
Here is my code
var MongoClient = require('mongodb').MongoClient
MongoClient.connect('mongodb://slave.example.com/?slaveOk=true', {slaveOk: true}, (err, db) => {
if (err) { throw err }
d = db.db('order', {server: {slaveOk: true}})
d.eval('rs.slaveOk(); 1', (err, data) => {
if (err) { throw err }
console.log(data);
})
})
I am providing the complete list of hosts in the connection string because the node that is running this query only has access to the secondary MongoDB nodes.
When using the command line interface I am able to make queries as long as I run rs.slaveOk() before executing my query.
you can simply using below code
var collection1 = db.collection(currentCollection,{readPreference:'secondaryPreferred'});
for more details check Scaling Read Query Load