nodejs, pgSQL and Async queries - node.js

I am new to nodejs and I am trying to fetch some data from my PG server. I manage to get my data but not in the order I expected. I may not use it the proper way, can anyone help ?
Here is a sample of code :
var pg = require('pg');
var db = new pg.Client(conString);
var link = db.connect();
var data = {};
// -----
console.log(prefix+'Fetching categories');
db.query('SELECT DISTINCT category FROM cc WHERE category IS NOT NULL', function(err, data){
data.rows.forEach(function(row){
data[row.category] = {}; // initialise
});
console.log('1111111',data,'---------');
});
console.log('2222222',data,'---------');
for (var category in data)
{
console.log(prefix+'Listing values for on "'+category+'"');
var values = db.query('SELECT SUBSTRING(date::varchar, 1,7) AS month, sum(amount) FROM cc WHERE category = \''+category+'\' GROUP BY 1 ORDER BY 1', function(err, data){
console.log('Got values',data.rows);
});
}
// -----
console.log(prefix+'Ending connection to database');
// db.end();
// -----
console.log(prefix+'Ending transaction on server side');
response.end();
I get 222222 before 1111111 :/ so my result is sent as empty and then it is filled :( How do I have to do ?
Thanks for your time !

Take a look at promises :https://github.com/promises-aplus/promises-spec. (one good lib for nodejs is q - http://documentup.com/kriskowal/q/).they are really useful in keeping nodejs code coherent and neat in all those callbacks.a must for every nodejs developer.

Related

node js how to sort a collection

I have a mongodb collection which has a large quantity of entries. + 10000.
All of these have the same paramters:
Val_string
T1_string
T2_string
I am receiving this via a get request from my mobile app however the order is not correct.
How can I sort my collection by the Val string which is a number going from 1 to 10000 in this case.
I am using mongoose mongodb and node js.
Presently I query the collection and get 5 results send them and then loop another 5 until I have read all the results.
So I need to sort the collection out prior to sending this data.
app.get("/testmethod", function(req, res)
{
mongoose.connect("mongodb://localhost:27017/CTI", {useNewUrlParser: true},function(err, client){
if(err) {
console.log(err)
}
else
{
client.db.collection("datas").find().limit(items_to_send).skip(Sent_data).toArray(function(err, result) {
if (err) {throw err;}
//console.log(result);
Sent_data = Sent_data + items_to_send;
db.close();
var D1_T1 = result[0].T1_String;
var D1_T2 = result[0].T2_String;
var D2_T1 = result[1].T1_String;
var D2_T2 = result[1].T2_String;
var D3_T1 = result[2].T1_String;
var D3_T2 = result[2].T2_String;
var D4_T1 = result[3].T1_String;
var D4_T2 = result[3].T2_String;
var D5_T1 = result[4].T1_String;
var D5_T2 = result[4].T2_String;
res.status(200).send({
D1_T1: D1_T1,
D1_T2: D1_T2,
D2_T1: D2_T1,
D2_T2: D2_T2,
D3_T1: D3_T1,
D3_T2: D3_T2,
D4_T1: D4_T1,
D4_T2: D4_T2,
D5_T1: D5_T1,
D5_T2: D5_T2
});
});
}//End of else
});//End of connect
});
What I want is a sort to order the entire collection by the val string field which goes from 1-10000
You have as below
client.db.collection("datas").find().limit(items_to_send).skip(Sent_data)
You can add .sort({fieldName:sortOrder})
sort order: 1 / -1(desc)
client.db.collection("datas").find().limit(items_to_send)
.skip(Sent_data).sort({"val_string":1})
Sort follows lexicographical order so no problem of being string.

how to join two tables and get all data base on child in firebase

i wanted to get the data from two tables in firebase db
the 1st table was
from here i want to get the next data from table based on the hospital_fk
this is the result it got on my json
and here is my script for getting the data..
router.get('', function(req, res){
var booths = database.ref('booths');
var hospital = database.ref('hospitals');
booths.once('value', function (snapshot) {
var dataSet = [];
snapshot.forEach(function (childSnapshot) {
var childKey = childSnapshot.key;
var fk = snapshot.child(childKey).val();
hospital.child(childSnapshot.val().hospital_fk).on('value', hospital=>{
var childData = _.assign(fk, hospital.val());
dataSet.push({
childKey: childKey,
childData: childData
});
res.json(dataSet);
});
});
});
});
now my problem was only the first data is being returned and also getting an error.. says that FIREBASE WARNING: Exception was thrown by user callback. Error: Can't set headers after they are sent.
any idea on how to get all the records and what's the best approach on joining two tables.
When you call res.json(dataSet) it sets a header that your response is JSON, and sends the JSON. You can only set headers on the response before sending data, so the second time you make this call, Node.js will rightfully throw an error saying that it can't set the header.
What you'll need to do is first gather all joined data into a single JSON response, and then send it in one go after you've loaded all of them. To do this you use a list of promises and Promise.all():
router.get('', function(req, res){
var booths = database.ref('booths');
var hospital = database.ref('hospitals');
booths.once('value', function (snapshot) {
var promises = [];
snapshot.forEach(function (childSnapshot) {
var childKey = childSnapshot.key;
var fk = snapshot.child(childKey).val();
var promise = hospital.child(childSnapshot.val().hospital_fk).once('value');
promises.push(promise);
});
Promise.all(promises).then(function(snapshots) {
var dataSet = [];
snapshots.forEach(function(hospital) {
var childData = _.assign(hospital.key, hospital.val());
dataSet.push({
childKey: hospital.key,
childData: childData
});
});
res.json(dataSet);
});
});
});
Now the code only calls res.json(dataSet) once, after it's gotten all of the hospital data.
You'll note that I also changed your on('value' to once('value', since I doubt you'll want to keep the listener active for more than just one read.

how to send multiple values from angular2 form to nodejs server and query from mongodb on the basis of 6 different values?

I am working in angular 2 with back end nodejs with express. I am facing a problem that I have to send a form data to nodejs server and then find data from mongoDB database and get the response! Help me please. I can send only one value id but I have to search on the basis of different values. I don't know the id.
router.get('/task/:id', function(req, res, next){
var team = req.params.team;
var opposite = req.params.opposite;
var toss = req.params.toss;
var decision= req.params.decision;
var match = req.params.match;
var ground = req.params.ground;
var query = {};
query[team] = team;
query[Oteam] = Oteam;
query[TossWinner] = TossWinner;
query[Decision] = Decision;
query[Match] = Match;
query[NG] = NG;
db.pendingPredictions.findOne(query,function(err, pendingPrediction){
if(err)
{
res.send(err);
}
else {
res.json(pendingPrediction);
}
});
});

How to iterate mongodb database in node.js to send to Algolia?

In the documentation of Algolia, for the node.js part they specified to use MySQL for indexing but not MongoDB, I have another question regarding this issue but it is more a general question , check here
Some folks ask me to use mongo-connector but tried it and I got some unknown error, which got me to square one
My real question is, how do i iterate a list of collections in mongodb to algolia?
This is the Algolia's version of MySQL in Node.js
var _ = require('lodash');
var async = require('async');
var mysql = require('mysql');
var algoliasearch = require('algoliasearch');
var client = algoliasearch("RQGLD4LOQI", "••••••••••••••••••••••••••••••••");
var index = client.initIndex('YourIndexName');
var connection = mysql.createConnection({
host: 'localhost',
user: 'mysql_user',
password: 'mysql_password',
database: 'YourDatabaseName'
});
connection.query('SELECT * FROM TABLE_TO_IMPORT', function(err, results, fields) {
if (err) {
throw err;
}
// let's use table IDS as Algolia objectIDs
results = results.map(function(result) {
result.objectID = result.id;
return result;
});
// split our results into chunks of 5,000 objects, to get a good indexing/insert performance
var chunkedResults = _.chunk(results, 5000);
// for each chunk of 5,000 objects, save to algolia, in parallel. Call end() when finished
// or if any save produces an error
// https://github.com/caolan/async#eacharr-iterator-callback
async.each(chunkedResults, index.saveObjects.bind(index), end);
});
function end(err) {
if (err) {
throw err;
}
console.log('MySQL<>Algolia import done')
};
To be specific I'm using mongoose as my ORM, so I have no experience in other libraries. Please help me on this, so that I could some searching interface already :(.
You can use the following code to iterate over the whole MongoDB mydb.myCollection collection + create batches that will be sent to the Algolia index:
var Db = require('mongodb').Db,
Server = require('mongodb').Server,
algoliasearch = require('algoliasearch');
// init Algolia index
var client = algoliasearch("*********", "••••••••••••••••••••••••••••••••");
var index = client.initIndex('YourIndexName');
// init connection to MongoDB
var db = new Db('mydb', new Server('localhost', 27017));
db.open(function(err, db) {
// get the collection
db.collection('myCollection', function(err, collection) {
// iterate over the whole collection using a cursor
var batch = [];
collection.find().forEach(function(doc) {
batch.push(doc);
if (batch.length > 10000) {
// send documents by batch of 10000 to Algolia
index.addObjects(batch);
batch = [];
}
});
// last batch
if (batch.length > 0) {
index.addObjects(batch);
}
});
});

node-sqlite3 with iojs, koa and yield

I'm new to iojs and am trying to write a small web application with koa and node-sqlite3.
One thing I couldn't quite get my head around is how to use the 'yield' syntax on the node-sqlite3 callback based API.
I've googled around and all I've found was this stackoverflow post (synchronous sqlite transactions node), which says this is possible.
Can anyone please give me a pointer to more concrete examples?
Thanks in advance
After some more digging, I found some hint from the stackoverflow post Koa.js request with promises is hanging.
The trick is to use the native Promises.
Sample code below works with iojs v1.6.4 and Koa 0.19.0
var koa = require('koa');
var app = koa();
var route = require('koa-route');
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(':memory:');
function *query() {
var promise = new Promise(function(resolve, reject) {
var result = [];
db.serialize(function() {
db.run("DROP TABLE IF EXISTS lorem");
db.run("CREATE TABLE lorem (info TEXT)");
var stmt = db.prepare("INSERT INTO lorem VALUES (?)");
for (var i = 0; i < 10; i++) {
stmt.run("Ipsum " + i);
}
stmt.finalize();
db.all("SELECT rowid AS id, info FROM lorem", function(err, rows) {
resolve(rows);
});
});
});
return promise;
}
function *handler() {
this.body = yield query();
this.status = 200;
}
app.use(route.get('/list', handler));
app.listen(3000);
there is a nodejs module called co-sqlite3 here :
https://www.npmjs.com/package/co-sqlite3
promise based node-sqlite3 named co-sqlite3 for co or koa
Installing
npm install co-sqlite3
Usage
work with co
var co = require('co');
var sqlite3 = require('co-sqlite3');
co(function*() {
//connect a database
var db = yield sqlite3('test.db');
// create a table
yield db.run('CREATE TABLE IF NOT EXISTS testtable (id INT NOT NULL)');
var stmt = yield db.prepare('INSERT INTO testtable(id) VALUES( ? )');
for(var i =0 ; i < 100 ; i++){
yield stmt.run(i);
}
stmt.finalize();
var row = yield db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50]);
console.log(row); // {id: 49}
var rows = yield db.all('SELECT * FROM testtable');
console.log(rows.length);
}).catch(function(err) {
console.log(err.stack);
});
work with koa
var koa = require('koa');
var sqlite3 = require('co-sqlite3');
var app = koa();
app.use(function*(next){
this.db = yield sqlite3('test.db');
yield next ;
});
app.use(function* (){
this.body = yield this.db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50]);
})
app.listen(3000);
just as a promise
var sqlite3 = require('co-sqlite3');
sqlite3('test.db').then(function(db){
db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50])
.then(function(row){
console.log(row);
});
});
While promises work, the major advantage of using koa is to leverage generators (the yield keyword). For that to happen, the library you're using needs to be prepared to work with generators.
I know you've specified you're using sqlite, but for an example of database access with yield, see how this mongodb package co-monk works:
yield users.insert({ name: 'Tobi', species: 'ferret' });
var res = yield users.findOne({ name: 'Tobi' });
res.name.should.equal('Tobi');
While you could use the co package for wrapping node-sqlite3 yourself, if you're starting you'll probably find it easier to use one of those existing co-based libraries.
There seems to be a generator-ready package for MySQL too, though I couldn't find any equivalent for sqlite.
In case that helps, here's a more complete blog post with examples of using co-monk: http://www.marcusoft.net/2014/04/koaExamples.html

Resources