Executing a find query to my MongoDB DB seems to reset the connection and make the node server crash.
My server handles socket events like this:
io.sockets.on('connection', function(socket) {
MongoClient.connect(url, function(err, db) {
if (err)
console.log('Error');
console.log('Connection established to', url);
var collection = db.collection('spedizioni');
socket.on('adminReq', function() {
handlers.handleAdmin(collection, socket);
});
});
});
the handleAdmin function is:
function handleAdmin(collection, socket) {
console.log('Admin event');
collection.find(null, function(err, raw) {
console.log('Find function');
console.log(raw);
if (err){
socket.emit('err');
console.log('Error function');
}
if (raw) {
socket.emit('adminRes', raw);
console.log('Response function');
} else {
socket.emit('adminNull');
console.log('Null function');
}
});
}
I want the query to return all items on the database; as per the MongoDB manual, I do that by executing a find query without a parameter.
I tried omitting null or using {} as first parameter but nothing changes.
Upon pressing the button to generate the adminReq event, the 'Connection to DB' string is printed on console and the firefox console signals a NEW connection to socket was estabilished; my client script connects at document.load once.
Below is the node console output after that; as you can see the query is executed; looking at the 'raw' output it seems failed attempts were made.
err is null and there is nothing else to output.
Looking at other answers about the 'maximum call stack' exceeded it seems it is caused by a recursive function usually, but it's not the case here.
http://pastebin.com/0xv1qcHn
Why is this the output and not the query result? Why is the connection reset?
A very similar function is working fine and the syntax for returning the whole DB seems correct, feels I am missing something very obvious...
not sure if you should can use null, but i think an empty object should work
you need to convert your result into an array
collection.find({}).toArray(function(err, raw) {
console.log('Find function');
console.log(raw);
if (err){
socket.emit('err');
console.log('Error function');
}
if (raw) {
socket.emit('adminRes', raw);
console.log('Response function');
} else {
socket.emit('adminNull');
console.log('Null function');
}
});
Related
I have a node application that makes a call to mongoDB every 10 seconds, but looking at the output in my terminal, the connections just keep counting up and never seem to close:
My code to hit the ddb every 10 seconds:
const MongoClient = require("mongodb").MongoClient
setInterval(function(){
MongoClient.connect(uri, (err, client) => {
if (err){
console.log(err);
}
database = client.db(databaseName)
getData(function(data){
if(data.length > 0){
db_response = data;
params["fieldA"] = db_response[0]['fieldA'];
}
})
})
}, 10000)
function getData(callback){
var query = { fieldA: "foo" };
database.collection(CollectionName).find(query).toArray(function(err, result){
if (err){
throw err;
}
callback(result);
})
}
(The vars uri, CollectionName and databaseName are declared earlier) I guess what i need to do (and havent yet figured out) is to connect to the DB once when the server starts, and then run the getData() function on successful connection, does that mean the database variable needs to be a global var??
As you correctly identified you only need to create your db connection once. So rather than wrapping the the db connection creation with setInterval, wrap setInterval around the only function you want to repeat, in this case getData.
On your other question, the database variable doesn't need to global but you are right getData does need to use it. Therefore pass it as an argument along with your callback function.
If you want to close your connection use client.close(); inside MongoClient.connect
const MongoClient = require("mongodb").MongoClient
MongoClient.connect(uri, (err, client) => {
if (err){
console.log(err);
}
const database = client.db(databaseName);
setInterval(function(){
getData(database, function(data){
if(data.length > 0){
db_response = data;
params["fieldA"] = db_response[0]['fieldA'];
}
})
}, 10000)
})
function getData(db, callback){
var query = { fieldA: "foo" };
db.collection(CollectionName).find(query).toArray(function(err, result){
if (err){
throw err;
}
callback(result);
})
}
The 'GetUsers' function is running and it returns the correct values as expected. However, res.send(users) is running before the nested function has completed with the value so it is undefined. I have tried doing the res.send(users) from inside the nested function but it doesn't recognize the function. I have also tried using return users but can't get it to work. Any help would be appreciated!
app.get('/GetUsers', function (req, res){
var url = 'mongodb://localhost:27017/users';
var users = "";
MongoClient.connect(url, function (err, db) {
if (err) {
console.log('Unable to connect to the mongoDB server. Error:', err);
} else {
console.log('Connection established to', url);
// Get the documents collection
var collection = db.collection('users');
// Get some users
collection.find().toArray(function (err, res) {
if (err) {
console.log(err);
} else {
console.log('Got Information from db.');
}
//Close connection
db.close();
console.log('db closed.');
users = res;
console.log(users);
});
}
});
res.send(users);
});
Just move res.send(users) inside your callback before/after you call console.log(users).
You reused the variable res in the toArray call, so please rename it there: ...toArray(function (err, res)... to ...toArray(function (err, result)... and also in users = res; to users = result;.
this is async call it will send the response and will not wait for the database connection function.
write res.send in call back function
write your res.send function where you are closing your data base connection.
here is detail How do I return the response from an asynchronous call?
Found the answer!!!
I needed to use res.send(users) in my callback after I had closed the database as pointed out by the users suggestions above. HOWEVER, that alone wasn't working as res.send(users) was erroring as it wasn't recognized as a function. I had to use res.send({'matched':users}) and then access my response objects on the client side like this: response.matched.objects etc.
Thanks for your help everyone!
i have kept insert & update code in 2 different files and based on condition
always insert should execute first and then update.but somehow update executes first then insert
test.js : simplified code
i am using these packages :pg , uuid
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
// ------INSERT
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
});
});
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
output
tom#tom:~$node test.js
Executing Insert query
Executing update query
finished executing Update query //WHY UPDATE FINISHES FIRST
finished executing Insert query
Note :
this problem can be easily solved by using async.But my insert code and update code are in different files and depending on some situation update code might execute.so don't want to use async
Problem
Even though Insert query goes to execute first why does update finishes first in output
am i missing any thing ..?
Lets solve this question step by step
you "stated so don't want to use async" libraries
solution 1 :
if PostgreSQL make update faster, update will return result before insert. If you want start executing update query only after finishing insert then
you should set connection pool capacity to 1.
pg.defaults.poolSize = 1
but you should do this before any pg.connect()
The connect method retrieves a Client from the client pool, or if all pooled clients are busy and the pool is not full, the connect method will create a new client passing its first argument directly to the Client constructor. In either case, your supplied callback will only be called when the Client is ready to issue queries or an error is encountered. The callback will be called once and only once for each invocation of connect.
Conclusion : your queries will execute in sequence.BUT BUT BUT this solution is BAD for scaling app as there will be always only one connection serving all users .So till one connection is serving one user , other users will have to wait for response.
Solution 2 :
you also stated "i have kept insert & update code in 2 different files"
it looks like you need to designed your code in such a way that it you are able to use asynchronus libraries , that solves this problem
As I already mentioned, the only way to ensure that update function will be fired only after insert function is done, is to call it inside of insert function callback. That are the basics of asynchronous programming.
pg.connect(conString, function(err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Insert query');
// ------UPDATE
pg.connect(conString, function(err, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function(err, result) {
done();
if(err) { return console.error('error running query', err); }
console.log('finished executing Update query');
});
});
});
You are missing the asynchronous nature of the pg.connect and also client.query. The call to these return a callback which passes the control to next expression before the completion of execution and hence non-blocking nature of nodejs. If you want to assure the correct flow, either call the successive ones inside the callback success
var pg = require('pg');
var uuid = require('node-uuid').v4;
var id = uuid().toString();
// ------INSERT
return pg.connect;
// ------UPDATE
return pg.connect;
// your calling file
var insert = require('/path/to/insertfile');
var conString = 'postgres://postgres:pass#127.0.0.1:5432/testdb';
var update = require('/path/to/updatefile');
insert(conString, function (err, client, done) {
console.log('Executing Insert query');
client.query('insert into testdb (id,data,iscancelled) values ($1,$2,$3)',[id,'hello','no'], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Insert query');
update(conString, function (error, client, done) {
console.log('Executing update query');
client.query("update testdb set iscancelled = 'yes' where id = $1",[id], function (err, result) {
if (err) {
return console.error('error running query', err);
}
console.log('finished executing Update query');
done();
});
});
done();
});
});
But this is very prone to callback hell. So consider making all async call return a promise. Take a look at bluebird. If you want an ORM that has built in promise based call, you can take a look at sequelize. It might be handy for you.
It has syntax as easy as:
var Model1 = require('/path/to/model1');
var Model2 = require('/path/to/model2');
var insertObj = {
"someKey": "value"
};
Model1.create(insertObj)
.then( function (createdObj1) {
return Model2.findOne({
where: {
"filter": "filterValue"
}
});
})
.then( function (documentToUpdate) {
return documentToUpdate.update({
"fieldToUpdate": "value"
});
})
.then( null, function (err) {
console.log(err);
});
Trying to drop all docs in all collections before unit tests run...
var collections = mongoose.connection.collections;
async.eachSeries(_.keys(collections), function(key, cb){
collections[key].remove(function(){
//Never gets here, doesn't drop the collection and doesn't error.
cb();
});
}
But the callback in remove() never gets fired.
I've logged out collections[key] and it does resolve to a collection.
No errors, but it times out as it never runs the callback.
Ive tried looping the Models as well and calling that remove but its the same issue.
What am I doing wrong here?? Any logs I could look at?
You could try using the drop method:
var async = require("async"),
_ = require("underscore"),
collections = _.keys(mongoose.connection.collections);
async.forEach(collections, function(key, done) {
var collection = mongoose.connection.collections[key]
collection.drop(function(err) {
if (err && err.message != "ns not found") {
done(err);
} else {
done(null);
}
})
}, callback);
EDIT: Try the following:
var collections = mongoose.connection.collections;
async.eachSeries(_.keys(collections), function(key, cb){
mongoose.connection.db.collection(key, function(err, col) {
col.remove({}, function(){
cb();
});
})
}
Unrelated issue, it wasn't connecting to the DB in the test environment. But no errors were reported, and it had a valid list of collections due to mongoose models.
I added the following to the test set up to log out the errors and other info to help find these issues in the future...
mongoose.connection.on('connected', function () {
console.log('Mongoose default connection open to ' + config.db.url);
});
mongoose.connection.on('error',function (err) {
console.log('Mongoose default connection error: ' + err);
});
mongoose.connection.on('disconnected', function () {
console.log('Mongoose default connection disconnected');
});
I reinstalled riak, node.js and riak-js on a new sever (Ubuntu 13.04) and suddenly when I use Riak-JS's db.keys() to get a list of keys in a bucket, it simply never returns.
Here's a test snippet that shows how it fails:
var db = require('riak-js').getClient();
console.log('1');
db.save('bkt','x','1', function(err, data) {
console.log('2');
db.get('bkt','x', function(err, data) {
console.log('3:'+data);
db.keys('bkt', function(err, list) {
console.log('4');
for (key in list) { console.log('5:'+list[key]); }
});
});
});
Here's the output:
1
2
3:1
It doesn't output anything else. I installed the latest Riak (1.4.2), Node.js and Riak-JS (#latest).
Thanks!
According to documentation db.keys() returns event emitter instead of passing the result to the callback. It's because the key list may be very long.
In that case the callback serves only to handle errors.
So instead of passing the callback, you have to use the returned even emitter:
db.keys('bkt', function(err) {
if (err) {
//handle error
}
})
.on('keys', function(list) {
//handle the keys list
//remember, keys may be delivered in batches
})
.on('end', function() {
//handle the end of keys stream
})
.start();