Multiple result display MongoDB Node.JS - node.js

I was writing code for crypto currency rates display backend,
I have 2 main problems are
First:
last: {
rate: (idx, cb) => {
models.master.find({}).where('id',idx).limit(10).exec((err,data)=>{
if(data && data.length>0) cb(data);
else cb(err);
});
}
}
have a look at the code it always having only one data JSON object instead it should have multiple instead.
Second:
How can I sort by recently added data means I already added time key in order to do so but I am with the lack of knowledge.(both ascending and descending)
"time":"5-7-2017_12:05:43:PM"
Test MongoDB object:
[{"_id":"595c88bf4d451b206454434b","time":"5-7-2017_12:05:43:PM","id":"bitcoin","name":"Bitcoin","symbol":"BTC","rank":"1","price_usd":"2561.25","price_btc":"1.0","market_cap_usd":"42078648188.0","available_supply":"16428950.0","total_supply":"16428950.0","percent_change_1h":"0.2","percent_change_24h":"-2.42","percent_change_7d":"2.13","last_updated":"1499236459","price_inr":"165829.411875","market_cap_inr":"2724403116224","__v":0}]

Related

Google Datastore NodeJS combine (union) multiple sets of keys only results

I am working with NodeJS on Google App Engine with the Datastore database.
Note that this question is an extension of (not a duplicate) my original post.
Due to the fact that Datastore does not have support the OR operator, I need to run multiple queries and combine the results.
Here is my approach (based on the selected answer from my original post):
Use Keys-only queries in the 1st stage
Perform the combination of the keys obtained into a single list (including deduplication)
Obtaining the entities simply by key lookup
I have achieved #1 by running two separate queries with the async parallel module. I need help with step 2.
Question: How to combine (union) two lists of entity keys into a single list (including de-duplication) efficiently?
The code I have below successfully performs both the queries and returns two objects getEntities.requesterEntities and getEntities.dataownerEntities.
//Requirement: Get entities for Transfer Requests that match either the Requester OR the Dataowner
async.parallel({
requesterEntities: function(callback) {
getEntitiesByUsername('TransferRequest', 'requester_username', loggedin_username, (treqs_by_requester) => {
//Callback pass in response as parameter
callback(null, treqs_by_requester)
});
},
dataownerEntities: function(callback) {
getEntitiesByUsername('TransferRequest', 'dataowner_username', loggedin_username, (treqs_by_dataowner) => {
//Callback pass in response as parameter
callback(null, treqs_by_dataowner)
});
}
}, function(err, getEntities) {
console.log(getEntities.requesterEntities);
console.log(getEntities.dataownerEntities);
//***HOW TO COMBINE (UNION) BOTH OBJECTS CONTAINING DATASTORE KEYS?***//
});
function getEntitiesByUsername(kind, property_type, loggedin_username, getEntitiesCallback) {
//Create datastore query
const treq_history = datastore.createQuery(kind);
//Set query conditions
treq_history.filter(property_type, loggedin_username);
treq_history.select('__key__');
//Run datastore query
datastore.runQuery(treq_history, function(err, entities) {
if(err) {
console.log('Transfer Request History JSON unable to return data results for Transfer Request. Error message: ', err);
} else {
getEntitiesCallback(entities);
}
});
}
I was able to combine the two separate sets of entity keys by iterating over the arrays and comparing the ID values for each entity key and creating a new array with the unique keys.
Note: The complete solution is posted as an answer to my original post.
//Union of two arrays of objects entity keys
function unionEntityKeys(arr1, arr2) {
var arr3 = [];
for(var i in arr1) {
var shared = false;
for (var j in arr2)
if (arr2[j][datastore.KEY]['id'] == arr1[i][datastore.KEY]['id']) {
shared = true;
break;
}
if(!shared) {
arr3.push(arr1[i])
}
}
arr3 = arr3.concat(arr2);
return arr3;
}

MongoDB, Updates and Controlling Document Expiration

I'm working on a node.js project. I'm trying to understand how MongoDB works. I'm obtaining data hourly via a cron file. I'd like for there to be unique data, so I'm using update instead of insert. That works fine. I'd like to add the option that the data expires after three days. Its not clear to me how to do that.
In pseudo code:
Setup Vars, URL's, a couple of global variables, lineNr=1, end_index=# including databaseUrl.
MongoClient.connect(databaseUrl, function(err, db) {
assert.equal(null, err, "Database Connection Troubles: " + err);
**** db.collection('XYZ_Collection').createIndex({"createdAt": 1},
{expireAfterSeconds: 120}, function() {}); **** (update)
s = fs.createReadStream(text_file_directory + 'master_index.txt')
.pipe(es.split())
.pipe(es.mapSync(function(line) {
s.pause(); // pause the readstream
lineNr += 1;
getContentFunction(line, s);
if (lineNr > end_index) {
s.end();
}
})
.on('error', function() {
console.log('Error while reading file.');
})
.on('end', function() {
console.log('All done!');
})
);
function getContentFunction(line, stream){
(get content, format it, store it as flat JSON CleanedUpContent)
var go = InsertContentToDB(db, CleanedUpContent, function() {
stream.resume();
});
}
function InsertContentToDB(db, data, callback)
(expiration TTL code if placed here generates errors too..)
db.collection('XYZ_collection').update({
'ABC': data.abc,
'DEF': data.def)
}, {
"createdAt": new Date(),
'ABC': data.abc,
'DEF': data.def,
'Content': data.blah_blah
}, {
upsert: true
},
function(err, results) {
assert.equal(null, err, "MongoDB Troubles: " + err);
callback();
});
}
So the db.collection('').update() with two fields forms a compound index to ensure the data is unique. upsert = true allows for insertion or updates as appropriate. My data varies greatly. Some content is unique, other content is an update of prior submission. I think I have this unique insert or update function working correctly. Info from... and here
What I'd really like to add is an automatic expiration to the documents within the collection. I see lots of content, but I'm at a loss as to how to implement it.
If I try
db.collection('XYZ_collection')
.ensureIndex( { "createdAt": 1 },
{ expireAfterSeconds: 259200 } ); // three days
Error
/opt/rh/nodejs010/root/usr/lib/node_modules/mongodb/lib/mongodb/mongo_client.js:390
throw err
^
Error: Cannot use a writeConcern without a provided callback
at Db.ensureIndex (/opt/rh/nodejs010/root/usr/lib/node_modules/mongodb/lib/mongodb/db.js:1237:11)
at Collection.ensureIndex (/opt/rh/nodejs010/root/usr/lib/node_modules/mongodb/lib/mongodb/collection.js:1037:11)
at tempPrice (/var/lib/openshift/56d567467628e1717b000023/app-root/runtime/repo/get_options_prices.js:57:37)
at /opt/rh/nodejs010/root/usr/lib/node_modules/mongodb/lib/mongodb/mongo_client.js:387:15
at process._tickCallback (node.js:442:13)
If I try to use createIndex I get this error...
`TypeError: Cannot call method 'createIndex' of undefined`
Note the database is totally empty, via db.XYZ_collection.drop() So yeah, I'm new to the Mongo stuff. Anybody understand what I need to do? One note, I'm very confused by something I read: in regards to you can't create TTL index if indexed field is already in use by another index. I think I'm okay, but its not clear to me.
There are some restrictions on choosing TTL Index: you can't create
TTL index if indexed field is already used in another index. index
can't have multiple fields. indexed field should be a Date bson type
As always, many thanks for your help.
Update: I've added the createIndex code above. With an empty callback, it runs without error, but the TTL system fails to remove entries at all, sigh.

how to improve the view with map/reduce in couchdb and nodejs

I'm using nodejs with the module cradle to interact with the couchdb server, the question is to let me understanding the reduce process to improve the view query...
For example, I should get the user data from his ID with a view like this:
map: function (doc) { emit(null, doc); }
And in node.js (with cradle):
db.view('users/getUserByID', function (err, resp) {
var found = false;
resp.forEach(function (key, row, id) {
if (id == userID) {
found = true;
userData = row;
}
});
if (found) {
//good, works
}
});
As you can see, this is really bad for large amount of documents (users in the database), so I need to improve this view with a reduce but I don't know how because I don't understand of reduce works.. thank you
First of all, you're doing views wrong. View are indexes at first place and you shouldn't use them for full-scan operations - that's ineffective and wrong. Use power of Btree index with key, startkey and endkey query parameters and emit field you like to search for as key value.
In second, your example could be easily transformed to:
db.get(userID, function(err, body) {
if (!err) {
// found!
}
});
Since in your loop you're checking row's document id with your userID value. There is no need for that loop - you may request document by his ID directly.
In third, if your userID value isn't matches document's ID, your view should be:
function (doc) { emit(doc.userID, null); }
and your code will be looks like:
db.view('users/getUserByID', {key: userID}, function (err, resp) {
if (!err) {
// found!
}
});
Simple. Effective. Fast. If you need matched doc, use include_docs: true query parameter to fetch it.

How to sort an array in node.js based on time in milliseconds using Redis database

I'm trying to sort an array which contains Id and Time,based on time(milliseconds).
Map.sort(function(a,b){return a.Time-b.Time});
for (keys in Map) {
multi.hgetall(Map[key].id+':List');
}
multi.exec(function(err,data){
res.send(data);
});
Its not displaying any data.There are 3 Ids in that array.
I don't know what is res in your code. And even if you name the variable keys, you'll get the array indexes in this sentence:
for(keys in Map){...}
Try something like this, using a forEach iterator:
Map=[{'id':'a', 'Time':90}, {'id':'c', 'Time':30}, {'id':'b', 'Time':66}]
Map.forEach(function(item){
console.log(item);
multi.hgetall(item.id + ':List');
});
multi.exec(function(err, data){
console.log('data', data);
});
It should print some data if the keys a:List, b:List and c:List exist in your redis database.

Creating incrementing numbers with mongoDB

We have an order system where every order has an id. For accounting purposes we need a way to generate invoices with incremening numbers. What is the best way to do this without using an sql database?
We are using node to implement the application.
http://www.mongodb.org/display/DOCS/How+to+Make+an+Auto+Incrementing+Field
The first approach is keeping counters in a side document:
One can keep a counter of the current _id in a side document, in a
collection dedicated to counters. Then use FindAndModify to atomically
obtain an id and increment the counter.
The other approach is to loop optimistically and handle dup key error code of 11000 by continuing and incrementing the id for the edge case of collisions. That works well unless there's high concurrency writes to a specific collection.
One can do it with an optimistic concurrency "insert if not present"
loop.
But be aware of the warning on that page:
Generally in MongoDB, one does not use an auto-increment pattern for
_id's (or other fields), as this does not scale up well on large database clusters. Instead one typically uses Object IDs.
Other things to consider:
Timestamp - unique long but not incrementing (base on epoch)
Hybrid Approach - apps don't necessarily have to pick one storage option.
Come up with your id mechanism based on things like customer, date/time parts etc... that you generate and handle collisions for. Depending on the scheme, collisions can be much less likely. Not necessarily incrementing but is unique and has a well defined readable pattern.
I did not find any working solution, so I implemented the "optimistic loop" in node.js to get Auto-Incrementing Interger ID fields. Uses the async module to realize the while loop.
// Insert the document to the targetCollection. Use auto-incremented integer IDs instead of UIDs.
function insertDocument(targetCollection, document, callback) {
var keepRunning = true;
var seq = 1;
// $type 16/18: Integer Values
var isNumericQuery = {$or : [{"_id" : { $type : 16 }}, {"_id" : { $type : 18 }}]};
async.whilst(testFunction, mainFunction, afterFinishFunction);
// Called before each execution of mainFunction(). Works like the stop criteria of a while function.
function testFunction() {
return keepRunning;
}
// Called each time the testFunction() passes. It is passed a function (next) which must be called after it has completed.
function mainFunction(next) {
findCursor(targetCollection, findCursorCallback, isNumericQuery, { _id: 1 });
function findCursorCallback(cursor) {
cursor.sort( { _id: -1 } ).limit(1);
cursor.each(cursorEachCallback);
}
function cursorEachCallback(err, doc) {
if (err) console.error("ERROR: " + err);
if (doc != null) {
seq = doc._id + 1;
document._id = seq;
targetCollection.insert(document, insertCallback);
}
if (seq === 1) {
document._id = 1;
targetCollection.insert(document, insertCallback);
}
}
function insertCallback(err, result) {
if (err) {
console.dir(err);
}
else {
keepRunning = false;
}
next();
}
}
// Called once after the testFunction() fails and the loop has ended.
function afterFinishFunction(err) {
callback(err, null);
}
}
// Call find() with optional query and projection criteria and return the cursor object.
function findCursor(collection, callback, optQueryObject, optProjectionObject) {
if (optProjectionObject === undefined) {
optProjectionObject = {};
}
var cursor = collection.find(optQueryObject, optProjectionObject);
callback(cursor);
}
Call with
insertDocument(db.collection(collectionName), documentToSave, function() {if(err) console.error(err);});

Resources