node-sqlite3 with iojs, koa and yield - node.js

I'm new to iojs and am trying to write a small web application with koa and node-sqlite3.
One thing I couldn't quite get my head around is how to use the 'yield' syntax on the node-sqlite3 callback based API.
I've googled around and all I've found was this stackoverflow post (synchronous sqlite transactions node), which says this is possible.
Can anyone please give me a pointer to more concrete examples?
Thanks in advance

After some more digging, I found some hint from the stackoverflow post Koa.js request with promises is hanging.
The trick is to use the native Promises.
Sample code below works with iojs v1.6.4 and Koa 0.19.0
var koa = require('koa');
var app = koa();
var route = require('koa-route');
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(':memory:');
function *query() {
var promise = new Promise(function(resolve, reject) {
var result = [];
db.serialize(function() {
db.run("DROP TABLE IF EXISTS lorem");
db.run("CREATE TABLE lorem (info TEXT)");
var stmt = db.prepare("INSERT INTO lorem VALUES (?)");
for (var i = 0; i < 10; i++) {
stmt.run("Ipsum " + i);
}
stmt.finalize();
db.all("SELECT rowid AS id, info FROM lorem", function(err, rows) {
resolve(rows);
});
});
});
return promise;
}
function *handler() {
this.body = yield query();
this.status = 200;
}
app.use(route.get('/list', handler));
app.listen(3000);

there is a nodejs module called co-sqlite3 here :
https://www.npmjs.com/package/co-sqlite3
promise based node-sqlite3 named co-sqlite3 for co or koa
Installing
npm install co-sqlite3
Usage
work with co
var co = require('co');
var sqlite3 = require('co-sqlite3');
co(function*() {
//connect a database
var db = yield sqlite3('test.db');
// create a table
yield db.run('CREATE TABLE IF NOT EXISTS testtable (id INT NOT NULL)');
var stmt = yield db.prepare('INSERT INTO testtable(id) VALUES( ? )');
for(var i =0 ; i < 100 ; i++){
yield stmt.run(i);
}
stmt.finalize();
var row = yield db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50]);
console.log(row); // {id: 49}
var rows = yield db.all('SELECT * FROM testtable');
console.log(rows.length);
}).catch(function(err) {
console.log(err.stack);
});
work with koa
var koa = require('koa');
var sqlite3 = require('co-sqlite3');
var app = koa();
app.use(function*(next){
this.db = yield sqlite3('test.db');
yield next ;
});
app.use(function* (){
this.body = yield this.db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50]);
})
app.listen(3000);
just as a promise
var sqlite3 = require('co-sqlite3');
sqlite3('test.db').then(function(db){
db.get('SELECT * FROM testtable WHERE id < ? ORDER BY ID DESC ' ,[50])
.then(function(row){
console.log(row);
});
});

While promises work, the major advantage of using koa is to leverage generators (the yield keyword). For that to happen, the library you're using needs to be prepared to work with generators.
I know you've specified you're using sqlite, but for an example of database access with yield, see how this mongodb package co-monk works:
yield users.insert({ name: 'Tobi', species: 'ferret' });
var res = yield users.findOne({ name: 'Tobi' });
res.name.should.equal('Tobi');
While you could use the co package for wrapping node-sqlite3 yourself, if you're starting you'll probably find it easier to use one of those existing co-based libraries.
There seems to be a generator-ready package for MySQL too, though I couldn't find any equivalent for sqlite.
In case that helps, here's a more complete blog post with examples of using co-monk: http://www.marcusoft.net/2014/04/koaExamples.html

Related

Structure of a synchronous application in Node.js and MongoDb

I need to build an application that does these things (in order):
on load:
01- connect to MongoDB 'db'
02- creates a collection 'cas'
03- check if a web page has updates, if yes go to step 04, if not go to step 07
04- do web scraping (using Cheerio) of the web site and get a $ variable like that $ = cheerio.load(body);
05- elaborate this object to get only informations I'm interested in and organize them in a jsons object like this one:
var jsons = [
{year: 2015, country: Germany, value: 51},
{year: 2015, country: Austria, value: 12},
{year: 2016, country: Germany, value: 84},
{year: 2016, country: Bulgaria, value: 104},
...
];
06- insert each of these elements ({year: 2015, country: Germany, value: 51}, ...) in the collection 'cas' of database 'db'
07- download the data (for example in a csv file)
08- create a web page for data visualization of these data using D3.js
09- disconnect from 'db'
If Node.js were synchronous, I could write something like this:
var url = 'http://...';
var jsons = [];
connectDb('db');
createCollection('db', 'cas');
if(checkForUpdates(url)) {
var $ = scrape(url);
jsons = elaborate($);
for(var i = 0; i < jsons.length; i++) {
saveDocumentOnDbIfNotExistsYet('db', 'cas', jsons[i]);
}
}
downloadCollectionToFile('db', 'cas', './output/casData.csv');
createBarChart('./output/casData.csv');
disconnectDb('db');
But Node.js is asynchronous so this code would not work properly.
I've read that I can use Promise to get the code to run in a certain order.
I read the documentation about the Promise and some sites that showed simple tutorials.
The structure of a Promise is:
// some code (A)
var promise = new Promise(function(resolve, reject) {
// some code (B)
});
promise.then(function() {
// some code (C)
});
promise.catch(function() {
// some code (D)
});
// some code (E)
If I understood correctly, in this case the execution (if Node.js were synchronous) would be equivalent to:
// some code (A)
// some code (E)
if(some code (B) not produce errors) {
// some code (C)
}
else {
// some code (D)
}
or (swap between code A and E, because they are asynchronous)
// some code (E)
// some code (A)
if(some code (B) not produce errors) {
// some code (C)
}
else {
// some code (D)
}
So now I wonder what is the right structure for my application.
I thought about:
var cheerio = require('cheerio');
var express = require('express');
var fs = require('fs');
var MongoClient = require('mongodb').MongoClient;
var dbUrl = 'mongodb://localhost:27017/';
var dbName = 'db';
var collectionName = 'cas';
const app = express(); // run using > node app.js
// connect to db
var connect = function(url) {
return new Promise(function(resolve, reject) {
MongoClient.connect(url + dbName, function(err, db) {
if(err) {
reject(err);
}
else {
console.log('Connected');
resolve(db);
}
});
});
}
// create collection
connect.then(function(db) {
db.createCollection(collectionName, function(err, res) {
if(err) {
throw err;
}
else {
console.log('Collection', collectionName, 'created!');
}
});
});
// connection error
connect.catch(function(err) {
console.log('Error during connection...');
throw err;
});
It's right? If yes, how can I proceed with other steps?
I can I improve my code?
EDIT 1
Following the example of Андрей Щербаков, I modified my code in this way:
app.js:
// my files
var db = require('./middlewares/db.js');
var url = 'mongodb://localhost:27017/';
var dbName = 'db';
var collectionName = 'cas';
const start = async function() {
const connect = await db.connectToMongoDb(url, dbName);
const cas = await connect.createYourCollection(collectionName);
const isPageHasUpdates = oneMoreFunction(); // i don't know how you gonna check it
if(isPageHasUpdates) {
await step 4;
await step 5;
await step 6;
}
await step 7
return something; // if you want
}
start()
.then(res => console.log(res)) // here you can use result of your start function if you return something or skip this then
.catch(err => console.log(err)); // do something with your error
middlewares/db.js:
var MongoClient = require('mongodb').MongoClient;
let dbInstance;
var methods = {};
methods.connectToMongoDb = function(url, dbName) {
if(dbInstance) {
return dbInstance;
}
else {
MongoClient.connect(url + dbName, function(err, db) {
if(!err) {
dbInstance = db;
return db;
}
});
}
}
methods.createYourCollection = function(collectionName) {
?.createCollection(collectionName, function(err, res) {
if(err) {
throw err;
}
});
}
module.exports = methods;
But I'm not sure I'm doing well.
How can I separate function in different files? For example I want to put all the function about db in file middlewares/db.js. But I have some problems in line ?.createCollection(collectionName, function(err, res).
If you are running node version 7.6 or higher, better way will be to use async await which works with promises.
So your code will look like
const start = async() => {
const connect = await connectToMongoDb(url);
const cas = await connect.createYourCollection();
const isPageHasUpdates = oneMoreFunction(); // i don't know how you gonna check it
if(isPageHasUpdates) {
await step 4;
await step 5;
await step 6;
}
await step 7
return something; // if you want
}
start()
.then(res => console.log(res)) // here you can use result of your start function if you return something or skip this then
.catch(err => console.log(err)); // do something with your error
Sure any function you are gonna await should be promisified as you did with your connect function( but if you are using https://www.npmjs.com/package/mongodb functions already promisified)
Update
The best way will be to use mongoose, but if you want to work with native mongodb you can write your mongodb like this https://pastebin.com/BHHc0uVN (just an example)
You can expand this example as you want.
You can create function createCollection
const createCollection = (connection, collectionName) => {
return connection.createCollection(collectionName); // actually i'm not sure that this function exists in mongodb driver
}
And usage will be:
const mongodbLib = require('./lib/mongodb'); //path to db.js file
mongodbLib.init()
.then(connection => mongodbLib.createCollection(connection, 'cas'))
.then(() => doSmthElse())
Or if you are sure that init is done(you can do it once before you main script like starting server or whatever you doing)
const mongodbLib = require('./lib/mongodb'); //path to db.js file
const connection = mongodbLib.getConnection();
Or if you want to simple work with collection like in step 6, add your cas collection(like user in example file). But this you can use when your init function is done as well.
So usage will be
const mongodbLib = require('./lib/mongodb');
const cas = mongodbLib.collections.cas;
cas().insertMany(docs)
.then()
.catch()

nodejs get sqlite3 query result using promise or wait

This is my first personal project in Nodejs. I'm trying to get in live soon.
I have a Nodejs server that uses sqlite3. There are only 3000 rows with word, transform and a precalculated value each in a column of the table, which is already populated.
I need to just lookup the word in the DB to be sure it is valid.
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
db.get("SELECT * FROM tab WHERE w = ?", word, function(err, row) {
if(err) { console.log("Lookup:",word,", Error => ",err); return false; }
return true;
});
The problem is that the caller of this code has a lot of context and need the operation to wait. So, I tried this
function dbLookup(db, w) {
return function(cb) {
var rows = [];
db.exec('SELECT w FROM tab WHERE w = "'+w+'"')
.on('row', function(r) {
rows.push(r)
})
.on('result', function() {
cb(rows);
});
}
async.each([word], function(w) {
dbLookup(this.db, w);
}, function(err) {
if(err) {console.log("...ERROR..."); return false; }
else {console.log("...SUCCESS..."); return true; }
});
This doesn't solve the wait issue as the callback can fire at its own pace.
I read that promise using something like bluebird can solve my problem
but now I'm not able to get the value/result of the query out:
I've been pulling my hair for so long. Please help me either get the async working or get the result back from the promise approach.
var async = require('async');
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
function check(word, callback) {
db.get("SELECT count(1) cnt FROM tab WHERE w = ?", word, callback)
}
async.map(words, check, function(err, results) {
if (err)
return console.log('Query error')
var all_checked = results.filter(function(r) {
return r.cnt > 0
});
...
});
Or
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
db.all("SELECT distinct w FROM tab", function(err, rows) {
var all_checked = words.filter(function (w) {
return rows.indexOf(w) != -1;
})
...
})

How to iterate mongodb database in node.js to send to Algolia?

In the documentation of Algolia, for the node.js part they specified to use MySQL for indexing but not MongoDB, I have another question regarding this issue but it is more a general question , check here
Some folks ask me to use mongo-connector but tried it and I got some unknown error, which got me to square one
My real question is, how do i iterate a list of collections in mongodb to algolia?
This is the Algolia's version of MySQL in Node.js
var _ = require('lodash');
var async = require('async');
var mysql = require('mysql');
var algoliasearch = require('algoliasearch');
var client = algoliasearch("RQGLD4LOQI", "••••••••••••••••••••••••••••••••");
var index = client.initIndex('YourIndexName');
var connection = mysql.createConnection({
host: 'localhost',
user: 'mysql_user',
password: 'mysql_password',
database: 'YourDatabaseName'
});
connection.query('SELECT * FROM TABLE_TO_IMPORT', function(err, results, fields) {
if (err) {
throw err;
}
// let's use table IDS as Algolia objectIDs
results = results.map(function(result) {
result.objectID = result.id;
return result;
});
// split our results into chunks of 5,000 objects, to get a good indexing/insert performance
var chunkedResults = _.chunk(results, 5000);
// for each chunk of 5,000 objects, save to algolia, in parallel. Call end() when finished
// or if any save produces an error
// https://github.com/caolan/async#eacharr-iterator-callback
async.each(chunkedResults, index.saveObjects.bind(index), end);
});
function end(err) {
if (err) {
throw err;
}
console.log('MySQL<>Algolia import done')
};
To be specific I'm using mongoose as my ORM, so I have no experience in other libraries. Please help me on this, so that I could some searching interface already :(.
You can use the following code to iterate over the whole MongoDB mydb.myCollection collection + create batches that will be sent to the Algolia index:
var Db = require('mongodb').Db,
Server = require('mongodb').Server,
algoliasearch = require('algoliasearch');
// init Algolia index
var client = algoliasearch("*********", "••••••••••••••••••••••••••••••••");
var index = client.initIndex('YourIndexName');
// init connection to MongoDB
var db = new Db('mydb', new Server('localhost', 27017));
db.open(function(err, db) {
// get the collection
db.collection('myCollection', function(err, collection) {
// iterate over the whole collection using a cursor
var batch = [];
collection.find().forEach(function(doc) {
batch.push(doc);
if (batch.length > 10000) {
// send documents by batch of 10000 to Algolia
index.addObjects(batch);
batch = [];
}
});
// last batch
if (batch.length > 0) {
index.addObjects(batch);
}
});
});

nodejs, pgSQL and Async queries

I am new to nodejs and I am trying to fetch some data from my PG server. I manage to get my data but not in the order I expected. I may not use it the proper way, can anyone help ?
Here is a sample of code :
var pg = require('pg');
var db = new pg.Client(conString);
var link = db.connect();
var data = {};
// -----
console.log(prefix+'Fetching categories');
db.query('SELECT DISTINCT category FROM cc WHERE category IS NOT NULL', function(err, data){
data.rows.forEach(function(row){
data[row.category] = {}; // initialise
});
console.log('1111111',data,'---------');
});
console.log('2222222',data,'---------');
for (var category in data)
{
console.log(prefix+'Listing values for on "'+category+'"');
var values = db.query('SELECT SUBSTRING(date::varchar, 1,7) AS month, sum(amount) FROM cc WHERE category = \''+category+'\' GROUP BY 1 ORDER BY 1', function(err, data){
console.log('Got values',data.rows);
});
}
// -----
console.log(prefix+'Ending connection to database');
// db.end();
// -----
console.log(prefix+'Ending transaction on server side');
response.end();
I get 222222 before 1111111 :/ so my result is sent as empty and then it is filled :( How do I have to do ?
Thanks for your time !
Take a look at promises :https://github.com/promises-aplus/promises-spec. (one good lib for nodejs is q - http://documentup.com/kriskowal/q/).they are really useful in keeping nodejs code coherent and neat in all those callbacks.a must for every nodejs developer.

Making Mocha test work for my db module

I am new to Mocha, and only a little experience with Node/Express. My DbProvider module works perfectly (mongodb) when I am access it through my Express app. And now I want to test it. I have read the Mocha site and some tutorials I could find. But I have big trouble of finding an real-world example out there that I could follow (any links much appreciated!).
Here is my unsuccessful attempt to write a testfile:
var DbProvider = require('../db').DbProvider;
var assert = require('assert');
var dbProvider = new DbProvider('localhost', 27017, 'mydb');
var util = require('util');
console.log(util.inspect(dbProvider));
describe('DbProvider', function(){
describe('findAllNotes', function(){
it('should return some notes', function(){
dbProvider.findAllNotes({}, function (err, result){
assert(result.length > 0);
});
})
})
})
The output I get is this:
$ mocha
{}
✖ 1 of 1 test failed:
1) DbProvider findAllNotes should return some notes:
TypeError: Cannot call method 'collection' of undefined
at DbProvider.doOperation (/Users/frode/Node/json/db.js:46:11)
at DbProvider.findAllNotes (/Users/frode/Node/json/db.js:56:8)
at Context.<anonymous> (/Users/frode/Node/json/test/test.js:15:18)
(cutting out the rest)
It seems that I am unsuccessful to create the dbProvider. This works perfectly in my app... How can I make this work? (And perhaps also: Is the way I have set it up in general ok?)
Edit: Here is the db.js file:
// Database related
'use strict';
var MongoClient = require('mongodb').MongoClient;
var BSON = require('mongodb').BSONPure;
var ObjectID = require('mongodb').ObjectID;
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
var Validator = require('validator').Validator
var fieldMaxLength = 1024;
//var util = require('util');
var DbProvider = function(host, port, database) {
var dbUrl = "mongodb://"+host+":"+port+"/"+database;
var self = this;
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
});
};
// Do some basic validation on the data we get from the client/user
var validateParams = function(params, callback) {
// Let´ do a quick general sanity check on the length on all fields
for(var key in params) {
if(params[key].length > fieldMaxLength) callback(new Error('Field ' + key + ' is too long.'));
}
// and the let us check some specific fields better
if (params._id) {
if(checkForHexRegExp.test(params._id)) {
// In case of '_id' we also need to convert it to BSON so that mongodb can use it.
params._id = new BSON.ObjectID(params._id);
} else {
var err = {error: 'Wrong ID format'};
}
}
if(err) callback(err);
}
// Generalized function to operations on the database
// Todo: Generalize even more when user authenication is implemented
DbProvider.prototype.doOperation = function(collection, operation, params, callback) {
validateParams(params, callback);
var operationCallback = function(err, result) {
callback(err, result);
};
this.db.collection(collection, function(err, collection) {
if(operation==='find') {
collection.find().toArray(operationCallback);
} else {
collection[operation](params, operationCallback);
}
});
}
DbProvider.prototype.findAllNotes = function(params, callback) {
this.doOperation('notes', 'find', params, callback);
};
DbProvider.prototype.findNoteById = function(params, callback) {
this.doOperation('notes', 'findOne', params, callback);
};
DbProvider.prototype.saveNote = function(params, callback) {
params.created_at = new Date();
this.doOperation('notes', 'save', params, callback);
};
DbProvider.prototype.deleteNote = function(params, callback) {
this.doOperation('notes', 'remove', params, callback);
};
DbProvider.prototype.findUser = function(params, callback) {
this.doOperation('users', 'findOne', params, callback);
};
exports.DbProvider = DbProvider;
SOLUTION:
After Benjamin told me to handle the async nature of mongodb connecting to the database, and inspired by his suggestion on how to adapt the code, I split the constructor function DbProvider into two parts. The first part, the constructor DbProvider now just saves the db-parameters into a variable. The second part, a new function, DbProvider.connect does the actual async connection. See below.
var DbProvider = function(host, port, database) {
this.dbUrl = "mongodb://"+host+":"+port+"/"+database;
};
DbProvider.prototype.connect = function(callback) {
var self = this;
MongoClient.connect(this.dbUrl, function(err, db) {
self.db = db;
callback();
});
};
So I can now make a Mocha test like this (and async tests also need the "Done" included, like you see in the code below):
var assert = require('assert');
var DbProvider = require('../db').DbProvider;
var dbProvider = new DbProvider('localhost', 27017, 'nki');
describe('DbProvider', function(){
describe('findAllNotes', function(){
it('should return some notes', function(done){
dbProvider.connect(function(){
dbProvider.findAllNotes({}, function (err, result){
assert(result.length > 0);
done();
});
});
})
})
})
Note that the acutal test ("should return some notes") is nothing to be proud of. What I wanted here was to get set up so I am able to test something. Now that I finally acutally can do that, I need to write good tests (something in the the line of having a test database, clear it, test insert a document, test search for a document, and so on...).
And in my Express app, I used to set up the database like this:
var DbProvider = require('./db').DbProvider;
// Setup db instance
var dbProvider = new DbProvider(
process.env.mongo_host || 'localhost',
process.env.mongo_port || 27017,
process.env.mongo_db || 'nki'
);
Now I do the same, but in addition, I call the new connect-function:
// Connect to db. I use (for now) 1 connection for the lifetime of this app.
// And I do not use a callback when connecting here (we do in the testing)
dbProvider.connect(function(){});
Benjamin actually pointed out that it may be ok but not the best practice to have the database set up like this in an Express app. But until I figure out what the best practice really is, I will leave this code as it is. Here is a couple of links reagarding the subject I found (but I have still not concluded of how I will solve it myself):
What's the best practice for MongoDB connections on Node.js? and
[node-mongodb-native] MongoDB Best practices for beginner
If you like, you are very welcome to follow/fork/whatever this project on github. My goal is to get it as production ready I can. The link is
https://github.com/frodefi/node-mongodb-json-server
MongoClient.connect is asynchronous.
From the docs:
callback (function) – this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the initialized db object or null if an error occured.
That means DbProvider.db isn't set yet in the test which is why you're getting undefined.
In here:
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
});
You're telling it "update self.db after the connection happened", which is at least one event loop tick after this one (but may be more). In your mocha code you're executing your .describe and .it methods right after creating your DbProvider instance which means it was not initialized yet.
I suggest that you re-factor DbProvider to return a callback instead of being a constructor function. Maybe something along the lines of:
var getDbProvider = function(host, port, database,callback) {
var dbUrl = "mongodb://"+host+":"+port+"/"+database;
MongoClient.connect(dbUrl, function(err, db) {
self.db = db;
callback(db);
});
};
Which also means moving all the DBProvider methods to an object (maybe the callback will return a dbprovider object and not just a db?).
Another bug solved by using Unit Tests :)
This is what I used: https://github.com/arunoda/mocha-mongo
It has set of testing helpers for mongodb

Resources