How to setup postgis extension for knex? - node.js

I am new to node and are building a simple API to handle geographic data.
For this I am trying to implement knex-postgis
I have a connection file that I require in my queries.js like so:
const knex = require('./knex');
and use it
update(id, poi) {
return knex('poi').where('id', id).update(poi, '*');
The doc say to implement the extension like this:
const knex = require('knex');
const knexPostgis = require('knex-postgis');
const db = knex({
dialect: 'postgres'
});
// install postgis functions in knex.postgis;
const st = knexPostgis(db);
/* or:
* knexPostgis(db);
* const st = db.postgis;
*/
Can someone please explain where in my structure I implement the code, this is the first time I am using an extension. Do i put it in my knex.js file?
My knex.js look like this:
const environment = process.env.NODE_ENV || 'development';
const config = require('../knexfile');
const environmentConfig = config[environment];
const knex = require('knex');
const connection = knex(environmentConfig);
module.exports = connection;
EDIT:
I tried putting this in my queries.js file
const knex = require('./knex');
const knexPostgis = require('knex-postgis');
const st = knexPostgis(knex);
const db = knex({
dialect: 'postgres'
});
My create function:
create() {
const sql = knex.insert({
geom: st.geomFromText('Point(-71.064544 44.28787)', 4326)
}).into('poi').toString();
console.log(sql);
return sql
It console.log a valid sql that works in pgadmin but in postman I get
"message": "queries.create(...).then is not a function",
And finally my route
router.post('/', (req, res, next) => {
queries.create(req.body).then(poi => {
res.json(poi[0]);
});
});

You are returning string from your create method, but expect a promise interface in your route handler.
And you are using knex instead of db for query building.
Try this
const builder = db.insert(/*same as above*/).into('tablename');
const sql = builder.toString();
console.log(sql);
// Return builder instance itself (promise) instead of string
return builder;

You're calling knex.insert. You should be calling db.insert.

Related

How to use cls-hooked unmanaged transactions?

I'm writing tests with jest and sequelize and I need to keep my database clean for every test, so I want to set a transaction for every test and then rollback at the end.
This is what I've got, but it wont pass the transaction to my tests:
beforeEach(async () => {
this.transaction = await db.sequelize.transaction();
});
test('Database should be clean', async () => {
const role = await db.role.create({
name: 'someName',
});
expect(role.id).toBe(1);
});
afterEach(async () => {
await this.transaction.rollback();
});
Sequelize is already setted to use cls
const Sequelize = require('sequelize');
const config = require('../../config/config.js');
const cls = require('cls-hooked');
const namespace = cls.createNamespace('testing-namespace');
Sequelize.useCLS(namespace);
const sequelize = new Sequelize(config);
...
It would be really helpful if somenone could explain me how to use unmanaged transactions with cls-hooked.
I managed to keep my database clean by using umzug to run seeds programatically, this is my code:
const { db } = require('../models');
const Umzug = require('umzug');
const path = require('path');
const umzug = new Umzug({
migrations: {
path: path.join(__dirname, './../../seeders'),
params: [db.sequelize.getQueryInterface()],
},
logging: false,
});
beforeEach(async () => {
const reverted = await umzug.down({ to: 0 });
const executed = await umzug.up();
});
I tried to use umzug v3 (beta), but it didn't work, so I used the stable version 2. This approach isn't as performant as I'd like, but gets the job done.

why is node-postgres only working with routes

I am currently trying to query some data from AWS RDS using postgresql and pg for node.js. For some reason, I found out I can only query data when I am using a route handler.
for example:
const data = require('../data/rand.json');
const db = require('../db');
const Router = require('express-promise-router');
const router = new Router();
module.exports = router;
router.get('/', (req,res) => {
db.query(`SELECT * FROM rooms WHERE id < 10`)
.then((val) => console.log(val))
.catch(err => console.log(err))
})
However, when I try to query data like this:
const db = require('../db');
db.query(`SELECT * FROM rooms WHERE id < 10`)
.then((val) => console.log(val))
.catch(err => console.log(err))
I get this error - TypeError [ERR_INVALID_ARG_TYPE]: The "key" argument must be of type string or an instance of Buffer, TypedArray, DataView, or KeyObject. Received null
I am fine to continue using routes to query, but was wondering why this is occurring and if it is normal. I couldn't find much info on the node-postgres docs. I am still new to this, thanks for any help.
edit: db file looks like this
const { Pool } = require('pg');
//don't need to add obj w/ env var inside Pool
const pool = new Pool();
//value - cb f(x)
module.exports = {
query: (text, params) => pool.query(text, params),
}

Loopback get model from datasource with discoverSchemas

I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.

Add global plugin for Mongoose on runtime

I am currently trying to attach a global Mongoose on runtime with no luck. My plugin requires a few dependencies and options generated upon my app's bootstrapping thus I need to add it sequentially. Mongoose seems to ignore everything wrapped within a closure.
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
connectAndAddPlugins();
Any help will be highly appreciated.
Apparently, since a model gets compiled and loaded with Mongoose global plugins are not attached anymore thus models should get registered afterwards:
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
const loadModels = () => {
const model = mongoose.model('Cat', { name: String });
}
connectAndAddPlugins();
loadModels();

How to use knexjs globally in a sails js application

how do i get the knex object in my controllers or any other model files if i am not using waterline.
for eg.:
in my api/models/Users.js
module.exports = {
find : function(id){
// my knex query
},
insert : function(data){
// my knex query again
}
}
So in my controllers i will just do:
var result = Users.find(id);
or
var result = Users.insert({username : 'sailsjs'});
or the knex object will be available globally with out being used in the model files itself... so that i can do the knex query in the controller it self
// UsersController/index
index : function(req, res){
// my knex query
}
Thanks
Arif
//config/bootstrap.js
module.exports.bootstrap = function (cb) {
var Knex = require('knex');
var knex = Knex.initialize({
client : "mysql",
connection : {
host :'localhost',
user :'root',
database : 'sales_force',
password : '*******'
}
});
knex.instanceId = new Date().getTime();
sails.config.knex = knex;
// It's very important to trigger this callack method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
cb();
};
// in the controller
var knex = sails.config.knex
this returns the knex object. the knex.instanceId shows that the same connection is used all over.
Please suggest if this might cause any problems.
Thanks
Arif
Best Option to use Knex Js globally in Sails Js (Tested for Version 1+) is to create a file named knex.js inside config directory, like this:
/**
* Knex Js, Alternate DB Adapter, In case needed, it is handy for doing migrations
* (sails.config.knex)
*
*
* For all available options, see:
* http://knexjs.org/
*/
const developmentDBConfig = require('./datastores');
const stagingDBConfig = require('./env/staging');
const productionDBConfig = require('./env/production');
function getConnectionString() {
let dbConnectionString = developmentDBConfig.datastores.default.url;
if (process.env.NODE_ENV === 'staging') {
dbConnectionString = stagingDBConfig.datastores.default.url;
}
if (process.env.NODE_ENV === 'production') {
dbConnectionString = productionDBConfig.datastores.default.url;
}
return dbConnectionString;
}
module.exports.knex = require('knex')({
client: 'postgresql',
connection: getConnectionString()
});
Now, in any file(helpers/controllers/views etc..) you can set and use knex as:
// Now use this knex object for anything like:
let user = await sails.config.knex('user').select('*').first();

Resources