My problem is that I can't retrieve data from my mongodb database... And I don't know why.
I probably do something wrong, here is a little samble which doesn't work.
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('akemichat', new Server('localhost', 27017), {w:1});
db.open(function (err, p_db) {
db = p_db;
});
db.collection('rooms', function (err, collection) {
if (!err) {
collection.find().toArray(function(err, items) {
items.forEach(function(room) {
console.log('hello'); // Never call...
});
});
} else {
console.log(err);
}
});
Notice that I have data in my database as shows the following
➜ akemichat git:(master) ✗ mongo
MongoDB shell version: 2.4.7
connecting to: test
> use akemichat
switched to db akemichat
> db.rooms.find()
{ "name" : "home", "_id" : ObjectId("527008e850305d1b7d000001") }
Thanks for help !
Notice: the example program never ends, I don't know why... Maybe because the connection is never closed but if I call the db.close() in the toArray callback, It will never be called because the callback never happends.
So many things in node are asynchronous. Your connection is open after you are trying to read from your collection.
You should query the collection after you know for sure you are connect. Down and dirty:
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('akemichat', new Server('localhost', 27017), {w:1});
db.open(function (err, p_db) {
db = p_db;
db.collection('rooms', function (err, collection) {
if (!err) {
collection.find().toArray(function(err, items) {
items.forEach(function(room) {
console.log('hello'); // Never call...
});
});
} else {
console.log(err);
}
});
});
I ran this locally and received back the "hello" message. Also your script never finishes because the node process will run until it is closed or crashes. This is by design. Which also means that you don't have to keep opening and closing your mongo connections. You can open a connection when your application starts and close it when your application is shut down.
Related
In my code below I wish to receive an "order" from the feed and store it in the database.
I understand that the class method marketEvent will be called whenever data is received and accordingly I would need my insert statement within this function.
It would be inefficient to open and close the db connection each time the class method is called so I seek to open the connection and pass the db object through to marketEvent.
I am new to nodejs and web sockets so can't work out how to put it together.
var pg = require("pg")
var conString = "postgres://myusername:mypassword#localhost/poloniex";
var client = new pg.Client(conString);
var autobahn = require('autobahn');
var wsuri = "wss://api.poloniex.com";
var connection = new autobahn.Connection({
url: wsuri,
realm: "realm1"
});
connection.onopen = function (session) {
function marketEvent (args,kwargs) {
client.query("INSERT INTO orderbook(order) values($1)", [args]);
}
session.subscribe('BTC_XMR', marketEvent);
}
connection.onclose = function () {
console.log("Websocket connection closed");
}
client.connect();
connection.open();
I am unaware of "pg" and "autobahn" both. But from the documentation of "pg" package, may I offer you a hint of solution
client.connect() is not meant to be an isolated call. It expects a function delegate to carry out the operation.
connection.onopen = function (session) {
function marketEvent (args,kwargs) {
client.connect(function(err) {
if (err) throw err;
client.query("INSERT ...", [args]);
});
}
session.subscribe('BTC_XMR', marketEvent);
}
About using the connection efficiently, you may be asking for pooling I guess.
var pool = new pg.Pool(config);
pool.connect(function(err, client, done) {
if (err) throw err;
client.query('INSERT ...', function(err, result) {
//call `done()` to release the client back to the pool
done();
});
});
Please refer original source of this suggestion for the exact usage:
https://www.npmjs.com/package/pg
I have built an event api using php with use of cassandra and phpcassa.
Recently, I authored a node.js + helenus replacement of the same api. After I finished, I started to benchmark the node.js code with ab tool. However, after 1000+ inserts on cassandra, the connection can be lost and failover code is running. I'm inserting async to cassandra after each request occur.
I am instantiating ConnectionPool object when the node app starts. However, I use pool.connect() function in each request.
Does pool.connect() starts a new connection or binds the existing connection from ConnectionPool object ?
Notes:
In phpcassa, I didn't have such problems probably because I'm inserting to cassandra sync.
I'm using 5 cassandra nodes
Yes, pool.connect() creates a new connection to the pool everytime it's called, so you usually only have to call it one time when the application is started, and pass that connection to any module or method that needs it.
I usually use a module to do this
module.exports = function(app, cb) {
var domain = require('domain'),
d = domain.create(),
helenus = require('helenus'),
d.run(function() {
var pool = new helenus.ConnectionPool({
hosts : ['localhost:9160'],
user : "",
password : "",
keyspace : 'something',
cqlVersion : '3.0.0'
});
pool.connect(function(err, keyspace){
if(err){
cb(err, null);
} else {
cb(null, pool);
}
});
pool.on('error', function(err) {
cb(err, null);
});
});
d.on('error', function(err) {
console.log('error', err.stack);
cb(err, null);
});
}
and call it my app with something like this (really simplified)
var express = require('express');
var app = this.express();
var database = require('db');
database(app, function(err, conn) {
if (err==null) {
// connected, do stuff
conn.cql("SELECT * FROM table WHERE KEY = ?", [what], function(err, result) {
if (err==null) {
// get result
}
});
}
});
I've used this with millions of records, with continuos inserts and a large number of lookups, without any issue, Cassandra performs better for me than any other DB I've tried.
I am starting to use appfog services in order to host node application.
I am getting trouble trying to use mongodb in my application.
In you tutorial here: https://docs.appfog.com/services/mongodb#walkthrough it is written to connect mongodb like this:
require('mongodb').connect(mongourl, ...
while mogourl is the url generated by the generate_mongo_url function.
The problem is that I am using newer api (I think) and I cannot pass url to the open method. This is how I am using mongodb:
var mongoClient = new MongoClient(new Server('localhost', 27017));
mongoClient.open(function(err, mongoClient) { ...
Where and how can I use the generated mongourl? How can I pass the credentials and the mongo variable used in generate_mongo_url function?
UPDATE
According to #mjhm suggestion, this is my open function:
var mongoService = null;
if(process.env.VCAP_SERVICES){
var env = JSON.parse(process.env.VCAP_SERVICES);
mongoService = env["mongodb-1.8"][0]["credentials"];
} else {
mongoService = {
"hostname": "localhost",
"port": 27017,
"isLocal": true,
"username": "",
"password": "",
"name": ""
};
}
this.mongoClient.open(function(err, mongoClient) {
if (!err) {
console.log("Open DB Success");
var db = mongoClient.db(DB_NAME);
if (!mongoService.isLocal) {
db.authenticate(mongoService.username,
mongoService.password, function (err, result) {
if (!err) {
console.log("Authenticate DB Success");
doAction();
} else {
console.log("Authenticate DB Error: " + err);
}
});
} else {
doAction();
}
} else {
console.log("Open DB Error: " + err);
}
});
When I am running this code on appfog, I am waiting a lot of time (more then 20 seconds) and then I get:
$ curl myappname.eu01.aws.af.cm/list
curl: (52) Empty reply from server
Any idea what is wrong?
What you are looking for is the MongoClient.connect function
http://mongodb.github.com/node-mongodb-native/driver-articles/mongoclient.html#mongoclient-connect
It takes the url you are talking about.
the URL where your client / driver wants to connect to was 'localhost'. I replaced it with a
variable mongoUrl
var mongoClient = new MongoClient(new Server(mongoUrl, 27017));
You need to authenticate after opening the database. The way to think of it is that authentication happens against the database not the connection, so as you discovered the generate_mongo_url function isn't very useful.
For example:
var mongoClient = new MongoClient(new Server('localhost', 27017));
mongoClient.open(function(err, mongoClient) {
var db = mongoClient.db('test');
db.authenticate('me', 'mypwd', function (err, result) {
var coll = db.collection('query_example3');
coll.find().toArray(function(err, result) {
console.log(result);
process.exit(0);
});
});
});
I'm a big Node.js and Mongo newbie, so please be gentle.
So here's my Node.js app:
var mongo = require('mongodb');
var Server = mongo.Server;
var Db = mongo.Db;
var server = new Server('hostname.mongolab.com', 666, {auto_reconnect : true}, {w:0, native_parser: false});
var db = new Db('dbName', server, {safe:true});
db.open(function(err, client) {
if(err) { return console.dir(err); }
client.authenticate('mongolabUser', 'mongolabUserPassword', function(authErr, success) {
if(authErr) { return console.dir(authErr); }
var stream = client.collection('myCollection').find({}).stream();
stream.on('data', function(item) {console.log("Do something with item"); });
stream.on('end', function() {console.log("Empty!");});
});
db.close();
});
Through prodigious use of debugger statements, I've come to the conclusion that the client.authenticate doesn't seem to be run. It looks like it's about to execute that line, but then just leapfrogs over it and goes straight to db.close().
But that's just the first of my problems. At some point prior, I was able to connect in to the database and authenticate, but my user was no retrieving anything in the find({}) command. I tried all sorts of ways, and streams are my latest attempt before deciding to give up on it for now.
Mongolab seems to be on v2.0.7, my mongo installation is v2.2.1. When I use the command line tool to log in as mongolabUser and execute a command like db.myCollection.find(), I get everything in my collection, so it can't be an issue with permissions.
Any advice/suggestions?
client.authenticate() is asynchronous, so the line that calls it starts the authentication, but doesn't wait for the server to respond before moving on to executing the next line, db.close(). So by the time the server responds the connection has been closed by the client.
Does moving the db.close() inside the event handler for stream.end help?
var mongo = require('mongodb');
var Server = mongo.Server;
var Db = mongo.Db;
var server = new Server('hostname.mongolab.com', 666, {auto_reconnect : true}, {w:0, native_parser: false});
var db = new Db('dbName', server, {safe:true});
db.open(function(err, client) {
if(err) { return console.dir(err); }
client.authenticate('mongolabUser', 'mongolabUserPassword', function(authErr, success) {
if(authErr) { return console.dir(authErr); }
var stream = client.collection('myCollection').find({}).stream();
stream.on('data', function(item) {console.log("Do something with item"); });
stream.on('end', function() {
console.log("Empty!");
db.close();
});
});
});
I am trying to read from MongoDB and print the contents to a webpage. I am using mongodb module to read from Mongo.
I am able to successfully read and print the data to a webpage but I am not able to figure out when to close the db and when to end the http connection. Hence my webpage prints results but keeps waiting for the server to send something.
I referred the following questions but can't understand what I need to do in this specific scenario:
Looking for help with reading from MongoDB in Node.JS
When to close MongoDB database connection in Nodejs
How to close all connections to the MongoDB server
Here is my code:
/* Opens the secondary collection and goes through each entry*/
var getClientIDs = function(collect, res) {
db.collection(collect, function(err, collection) {
var cursor = collection.find();
cursor.each(function(err, item) {
if(item != null) {
console.log(item['_id'] +"\t" + item['name']);
res.write(item['_id'].toString());
res.write(" ");
res.write(item['name'].toString());
res.write("</br>");
}
/*else {
res.end("The End");
db.close();
} Closes connection before other stuff is done. */
});
});
}
/* Opens the main collection and goes through each entry*/
var openCollection = function(collect, res) {
console.log(green);
// Establish connection to db
db.open(function(err, db) {
// Open a collection
db.collection(collect, function(err, collection) {
// Create a cursor
var cursor = collection.find();
// Execute the each command, triggers for each document
cursor.each(function(err, item) {
if(item != null) {
getClientIDs(item['_id'], res);
}
/* else {
db.close();
} This closes the connection before other stuff is done */
});
});
});
}
/* Start Here */
var http = require('http');
var port = 8888;
http.createServer(function (req, res) {
res.writeHead(200,{"Content-Type": "text/html; charset=utf-8"});
openCollection('company',res);
}).listen(port);
The way the db is that there is a collection called 'company' and it has a bunch of IDs in it. There are other collections with the name of the id :
company = {{ _id: 'A001' }
{ _id: 'A002' }
{ _id: 'A003' }
}
A001 = {{_id: "A001-01", "name":"foo"}
{_id: "A001-02", "name":"bar"}}
A002 = {{_id: "A002-01", "name":"foo2"}
{_id: "A002-02", "name":"bar2"}}
I did not create the collections this way. This was what I had to work with and create a script which would just print IDs and names on a webpage.
With my code, the webpage prints:
A001-01 foo
A001-02 bar
A002-01 foo2
A002-02 bar2
Thank you.
When you open a MongoDB connection with the native driver, you're actually opening a pool of 5 connections (by default). So it's best to open that pool when your app starts and just leave it open rather than open and close the pool on each request.
You're on the right track with closing out your HTTP response; just call res.end(); when the response is complete (i.e. when item is null in the cursor.each callback).