I have some server-side helper functions in system.js collection, which are then used in node.js. However sometimes, they are undefined.
Here is the scenario:
I load these functions once, on server start:
db.eval('db.loadServerScripts()', function(err, result) { ... });
So this is called only once on start, not for every request.
From now on I can call e.g.:
db.eval('getNextSequence(\'test\')', function(err, seq){});
But sometimes I get the error, that getNextSequence is undefined. I suspect those functions to exist only in current connection scope. So maybe when node receives new connection, the functions are not set.
Is there any way to use those functions in node.js, but to have them reliably available always?
Example scenario:
//1./ this function is stored in system.js
getNextSequence: function(name)
{
var ret = db.counters.findAndModify({
query: { _id: name },
update: { $inc: { seq: 1 } },
new: true,
upsert: true
});
return ret ? ret.seq : null;
}
//2./ this is called on nodejs server startup (once for server lifetime)
var mongo = require('mongoskin');
var db = mongo.db("mongodb://localhost:27017/mydb", {native_parser:true});
//...
db.eval('db.loadServerScripts()', function(err, result) {
//...crash if failed
});
//3./ this is used in node.js code, on request processing:
db.eval('getNextSequence(\'someNamespace\')', function(err, seq){
// int seq is converted to string slug
// a new entity with slugId is saved to collection
});
Related
I need help with create simple function in nodejs that show all rows in some table from mongodb.
In second time the function running its get the data from node.js caching and not go to mongodb.
somthing like that idea:
getData function(){
if(myCache == undefined){
// code that get data from mongodb (i have it)
// and insert into cache of node.js (TODO)
}
else {
// code that get data from cache node.js (TODO)
}
}
The general idea is to implement some form of asynchronous caching in which the cache object will have a key-value storage. So, for example, extending your idea you could restructure your function to follow this pattern:
var myCache = {};
var getData = function(id, callback) {
if (myCache.hasOwnProperty(id)) {
if (myCache[id].hasOwnProperty("data")) { /* value is already in cache */
return callback(null, myCache[id].data);
}
/* value is not yet in cache, so queue the callback */
return myCache[id].queue.push(callback);
}
/* cache for the first time */
myCache[id] = { "queue": [callback] };
/* fetch data from MongoDB */
collection.findOne({ "_id": id }, function(err, data){
if (err) return callback(err);
myCache[id].data = data;
myCache[id].queue.map(function (cb) {
cb(null, data);
});
delete myCache[id].queue;
});
}
a while ago I managed to write a method to bulk upsert many information into my database. Now what I am trying to do is a method to clean old records on the same database and table.
raidSchema.statics.bulkUpsert = function (raids, callback) {
var bulk = Raid.collection.initializeUnorderedBulkOp();
for (var i = 0; i < raids.length; i++) {
var raid = raids[i];
var date = new Date();
bulk.find({id: raid.id, hash: raid.hash}).upsert().update({
$setOnInsert: {
...
},
$set: {
...
}
});
}
bulk.execute(callback);
};
This works perfectly. Then I did this, in hope it would clean the old records that I don't need anymore:
raidSchema.statics.cleanOldRaids = function (callback) {
var date = new Date();
date.setMinutes(date.getMinutes() - 30);
var bulk = Raid.collection.initializeUnorderedBulkOp();
bulk.find({$or: [ { maxHealth: {$lte: 0} }, { isComplete: true }, {updatedOn: {$lte: date.getTime()}} ] }).remove();
bulk.execute(callback);
};
And I am running this methods with this script, which tries to run it every 30 minutes:
var Raid = require('../models/raid');
var async = require('async');
var cleanInterval = 1000 * 60 * 30;
var cleanRaids = function () {
console.log('cleanRaids: Starting cleaning');
async.series([
function (callback) {
Raid.cleanOldRaids();
callback(null, 'All servers');
}],
function (err, results) {
if (err) throw err;
console.log("cleanRaids: Done cleaning (" + results.join() + ")");
setTimeout(cleanRaids, cleanInterval);
})
};
cleanRaids();
but right after I run my server it crashes saying that it cannot read property find of undefined:
.../models/raid.js:104
bulk.find({$or: [ { maxHealth: {$lte: 0} }, { isComplete: true }, {updatedO
^
TypeError: Cannot read property 'find' of undefined
I am completely lost since it works perfectly with the bulkUpsert method, which is run by a very similar code.
Anyone has any idea as to why this might be happening?
Thanks a lot.
The problem here is that mongoose has not connected to the database yet, and therefore has no handle to the underlying driver object you are accessing via the .collection accessor.
The mongoose methods themselves perform a little "magic", by essentially queuing all operations until the database connection is actually made. i.e:
Model.find().exec(function(err,docs) { }); // <-- callback queues until connection is ready
However if no connection is present, native methods will not return a collection object:
Model.collection.find({},function(err,docs) { }); <-- collection is undefined
The bulk methods just return a structure that has not executed, so the error does not present until you try to call a method on that stucture.
The fix is easy, just wait for the connection before executing any code:
mongoose.connection.on("open",function(err) {
// body of program in here
});
So though "mongoose methods" do their own magic to "hide this away", this is needed when calling native methods. The only other way you get away with it is when you are absolutely sure that one of the "mongoose methods" has actually fired already, and that a connection has been made.
Better to be safe than sorry, so it is a wise practice to put the body of your main program initialize and methods within such a block as above.
I'm designing a client/server synchronization feature. The client sends a bunch of changed events to server. The server will do creation, deletion or modification upon requested item status. After the database operation, the server need send a summary back to client.
Below is excerpt from my server side code, designed with mongoose and restify.
var EventModel = mongoose.model('Event', eventSchema);
server.post("/sync", function (req, res, next) {
var events = req.params.events;
var created = [], deleted = [], updated = [];
events.forEach(function (elem) {
if (elem.status == 0) {
// Delete
EventModel.remove({ _id: elem.uuid }, function (err, event) {
if (!err) deleted.push({uuid: elem.uuid});
});
} else if (elem.status == 1) {
// Create and update uuid
var event = new EventModel(elem);
event.save(function (err, doc) {
if (!err) {
elem.uuid = event._doc._id;
created.push(elem);
}
});
} else if (elem.status == 2) {
// Update
EventModel.findOne({ _id: elem.uuid }, function (err, event) {
event.save(function (err, doc) {
if (!err) updated.push({uuid:elem.uuid});
});
});
}
});
// Notify client what are processed.
// PROBLEM: created, deleted, updated are always empty!
res.send({processed: {created: created, deleted: deleted, updated: updated}});
});
Since mongoose do CRUD in async way, the response created,deleted and updated are always empty.
Is there any way to let the mongoose operation in series?
As stated in the comments you could use the npm async module.
Alternatively, you may prefer to nest callbacks (but this might lead to what is known as callback hell, viz many nested callbacks) or take advantage of the mongoose .then() method - see http://mongoosejs.com/docs/promises.html
Here you can do ..
EventModel.remove(args).then((removeResponse) => {
return EventModel.findOne(args);
}).then((findResponse) => {
// etc
})
These requests will happen synchronously.
I'm a bit confused about concurrency in SailsJS's waterline.
Currently I'm doing data retrieval like this;
var results = {};
// Get user by id 5
User.find('5', function(err, user) {
results.user = user;
// when it resolves, get messages
Message.find({userId: '5'}, function(err, messages) {
results.messages = messages;
// when message query resolves, get other stuff
OtherStuff.find({userId: '5'}, function(err, otherStuff) {
results.otherStuff = otherStuff;
res.view({results});
});
});
});
The problem is that the DB calls are not concurrent. Every request launches after previous one's promise has been fulfilled. I'd like to launch all requests at the same time and then see somehow if all promises are fulfilled and if so, proceed to pass results to the view.
How am I gonna achieve this concurrency with db requests?
Thanks!
Use async.auto. The async module is globalized in Sails:
async.auto({
user: function(cb) {
// Note--use findOne here, not find! "find" doesn't accept
// an ID argument, only an object.
User.findOne('5').exec(cb);
},
messages: function(cb) {
Message.find({userId: '5'}).exec(cb);
},
otherStuff: function(cb) {
OtherStuff.find({userId: '5'}).exec(cb);
}
},
// This will be called when all queries are complete, or immediately
// if any of them returns an error
function allDone (err, results) {
// If any of the queries returns an error,
// it'll populate the "err" var
if (err) {return res.serverError(err);}
// Otherwise "results" will be an object whose keys are
// "user", "messages" and "otherStuff", and whose values
// are the results of those queries
res.view(results);
}
);
I am using twitter API in my code and mongodb. The is reflecting the correct output in database, but it's not terminating. I guess the problem is with db.server.find({id:myid},cb); statement in code below. However, I don't know how to work it out.
var Twit = require('../lib/twitter'),
conf = require('../config1');
var myid;
var twit = new Twit(conf);
var databaseUrl = "mydb2"; // "username:password#example.com/mydb"
var collections = ["server", "followers"];
var db = require("mongojs").connect(databaseUrl, collections);
twit.get('account/verify_credentials', function (err, reply) {
myid = reply.id;
function addToServer(myid, cb) {
db.server.find({
id: myid
}, cb);
};
addToServer(myid, function (err, resp) {
if (err) {
console.log("err");
} else if (resp.length > 0) {
console.log("My Id present in server present");
} else {
console.log("New to the app.So updating server ");
db.server.insert({
id: myid
});
db.followers.insert({
id: myid,
following: []
})
}
});
});
P.S: This is a part of my code , I have also used process.exit(0) function, but still no help.
I think your issue is related to this: https://github.com/mafintosh/mongojs/issues/15.
Here's a gist. If I call db.close() the program exists, and if I don't, it doesn't. So process.on('exit') must not be the right place to call it.
But the issue is that that you have a persistent tcp connection open to the DB, and as long as that's running, the script won't shut down.
Is this a run-once script, or do you need to keep this thing running?
EDIT:
Since the script only needs to run once, I'd use callbacks on your 2 database queries and close the database down in the last callback.