how do i get the knex object in my controllers or any other model files if i am not using waterline.
for eg.:
in my api/models/Users.js
module.exports = {
find : function(id){
// my knex query
},
insert : function(data){
// my knex query again
}
}
So in my controllers i will just do:
var result = Users.find(id);
or
var result = Users.insert({username : 'sailsjs'});
or the knex object will be available globally with out being used in the model files itself... so that i can do the knex query in the controller it self
// UsersController/index
index : function(req, res){
// my knex query
}
Thanks
Arif
//config/bootstrap.js
module.exports.bootstrap = function (cb) {
var Knex = require('knex');
var knex = Knex.initialize({
client : "mysql",
connection : {
host :'localhost',
user :'root',
database : 'sales_force',
password : '*******'
}
});
knex.instanceId = new Date().getTime();
sails.config.knex = knex;
// It's very important to trigger this callack method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
cb();
};
// in the controller
var knex = sails.config.knex
this returns the knex object. the knex.instanceId shows that the same connection is used all over.
Please suggest if this might cause any problems.
Thanks
Arif
Best Option to use Knex Js globally in Sails Js (Tested for Version 1+) is to create a file named knex.js inside config directory, like this:
/**
* Knex Js, Alternate DB Adapter, In case needed, it is handy for doing migrations
* (sails.config.knex)
*
*
* For all available options, see:
* http://knexjs.org/
*/
const developmentDBConfig = require('./datastores');
const stagingDBConfig = require('./env/staging');
const productionDBConfig = require('./env/production');
function getConnectionString() {
let dbConnectionString = developmentDBConfig.datastores.default.url;
if (process.env.NODE_ENV === 'staging') {
dbConnectionString = stagingDBConfig.datastores.default.url;
}
if (process.env.NODE_ENV === 'production') {
dbConnectionString = productionDBConfig.datastores.default.url;
}
return dbConnectionString;
}
module.exports.knex = require('knex')({
client: 'postgresql',
connection: getConnectionString()
});
Now, in any file(helpers/controllers/views etc..) you can set and use knex as:
// Now use this knex object for anything like:
let user = await sails.config.knex('user').select('*').first();
Related
I'm having a problem with my mongodb connection string in my nextjs CRUD application in production.
I followed this guide: https://www.mongodb.com/developer/how-to/nextjs-building-modern-applications/
And I read about environment variables here: https://nextjs.org/docs/basic-features/environment-variables Giving me the idea that I should be able to safely store my connection string as an environment variable without exposing it to the browser, given I should only need to use it server side?
It works perfectly fine when I run the application locally. But in production (azure app service) the connection string appears undefined unless I expose it to the browser by adding the "NEXT_PUBLIC_" prefix to the variable.
Is it safe to expose this variable / Is there something I should do differently to make it work without exposing it / Is there another approach that should be taken entirely?
My database.js:
import { MongoClient } from 'mongodb';
import nextConnect from 'next-connect';
const client = new MongoClient(process.env.DB_CONNECTION_STRING, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
async function database(req, res, next) {
await client.connect();
req.dbClient = client;
req.db = client.db('Loggen');
return next();
}
const middleware = nextConnect();
middleware.use(database);
export default middleware;
You should not expose env variables.
A) create .env.local file in your project and set up local env variables. (normally all env files are ignored: check gitignore file)
B) You define your vercel .env variables (with the same values for connection)
C) As discussed here, you should follow this example, check how they manage connection (it's an official example), to avoid connection duplication and errors.
D) remember, that your .env variables are accessible only server-side. So, if you like, you can transfer them to client-side but it's not recommended
Your database.js (in example: mongodb.js) should be:
import { MongoClient } from 'mongodb';
const MONGODB_URI = process.env.mongoApiUrl;
const MONGODB_DB = process.env.MONGODB_DB;
// check the MongoDB URI
if (!MONGODB_URI) {
throw new Error('Define the MONGODB_URI environmental variable');
}
// check the MongoDB DB
if (!MONGODB_DB) {
throw new Error('Define the MONGODB_DB environmental variable');
}
let cachedClient = null;
let cachedDb = null;
export async function connectToDatabase() {
// check the cached.
if (cachedClient && cachedDb) {
// load from cache
return {
client: cachedClient,
db: cachedDb,
};
}
// set the connection options
const opts = {
useNewUrlParser: true,
useUnifiedTopology: true,
};
// Connect to cluster
let client = new MongoClient(MONGODB_URI, opts);
await client.connect();
let db = client.db(MONGODB_DB);
// set cache
cachedClient = client;
cachedDb = db;
return {
client: cachedClient,
db: cachedDb,
};
}
And better not to use your approach with next-connect, it will create big amount of connections.
I am new to node and are building a simple API to handle geographic data.
For this I am trying to implement knex-postgis
I have a connection file that I require in my queries.js like so:
const knex = require('./knex');
and use it
update(id, poi) {
return knex('poi').where('id', id).update(poi, '*');
The doc say to implement the extension like this:
const knex = require('knex');
const knexPostgis = require('knex-postgis');
const db = knex({
dialect: 'postgres'
});
// install postgis functions in knex.postgis;
const st = knexPostgis(db);
/* or:
* knexPostgis(db);
* const st = db.postgis;
*/
Can someone please explain where in my structure I implement the code, this is the first time I am using an extension. Do i put it in my knex.js file?
My knex.js look like this:
const environment = process.env.NODE_ENV || 'development';
const config = require('../knexfile');
const environmentConfig = config[environment];
const knex = require('knex');
const connection = knex(environmentConfig);
module.exports = connection;
EDIT:
I tried putting this in my queries.js file
const knex = require('./knex');
const knexPostgis = require('knex-postgis');
const st = knexPostgis(knex);
const db = knex({
dialect: 'postgres'
});
My create function:
create() {
const sql = knex.insert({
geom: st.geomFromText('Point(-71.064544 44.28787)', 4326)
}).into('poi').toString();
console.log(sql);
return sql
It console.log a valid sql that works in pgadmin but in postman I get
"message": "queries.create(...).then is not a function",
And finally my route
router.post('/', (req, res, next) => {
queries.create(req.body).then(poi => {
res.json(poi[0]);
});
});
You are returning string from your create method, but expect a promise interface in your route handler.
And you are using knex instead of db for query building.
Try this
const builder = db.insert(/*same as above*/).into('tablename');
const sql = builder.toString();
console.log(sql);
// Return builder instance itself (promise) instead of string
return builder;
You're calling knex.insert. You should be calling db.insert.
I have node.js application, which was built on express.js framework.
const app = express();
require('./config')(app);
require('./services')(app);
./config/config.js we instantiate config:
module.exports = function (app) {
const conf = {APIKey: 1234567890, url: '<someurl>'};
app.set('config', conf);
};
./services/APIService.js we create service instance(singleton)
module.exports = (app) => {
app.set('apiService', new APIService(app));
};
function APIService(app) {
const config = app.get('config');
this.key = config.APIKey;
};
APIService.prototype.sendRequest = () => {
const config = app.get('config');
this._send(config.url, 'some text');
};
Or, service2
module.exports = function(app) {
const config = app.get('config');
const myMod = require('myMod')(config.APIKey);
}
Cool, all works correct. But sometime administrator will change some config data. So, we create new config, set him to
newConf = {APIKey: 1234000000, url: '<some_new_url>'};
app.set('config', newConf);
APIService.sendRequest, will send request to CHANGED url, but APIService.key still unchanged. And myMod already instantiated with old config data.
We need write some setter methods, like this
//for APIService
APIService.prototype.setConfig = () => {
const config = app.get('config');
this.key = config.APIKey;
};
//for service 2
/* change const myMod to let myMod and create method for overriding */
or bang! kill and restart node.js server process. Bad idea. Maybe exist some method for this goal, something like app.restart() for safely reinitializing application(or, maybe, his parts)?
Did you try to call app.set('apiService', new APIService(app)); again ? or just have getter and setter on the prototype for your params.
Better way should be to have a new APIService object at each new request with a middleware, somehting like :
app.use(function (req, res, next){
req.api = new APIService(app);
next();
});
And use req.api.
I'm building a web app that I would like to use with two databases based on a GET query. These two databases have the same schema, the only difference is one has live data and the other is scrubbed (or test) data.
This works fine, but I'm wondering if this is the proper way to go about solving this problem.
I'm referencing a model with a schema: names_model.js
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var namesSchema = new Schema({
name: String,
createdAt: String
});
module.exports = mongoose.model('names', namesSchema);
And this is my main file. If the param query is 1 it will connect to the first db, else it will go connect to the second db.
var mongoose = require('mongoose');
var db = mongoose.createConnection('mongodb://localhost/database1');
var db2 = db.useDb('database2');
var NamesDB = require('./names_model.js');
var Connect = db.model('names', NamesDB);
var Connect2 = db2.model('names', NamesDB);
exports.getData = function(dbName, sendBack) {
console.log(dbName);
if (dbName == 1) {
var Names = Connect;
}
else {
Names = Connect2;
}
Names.find({}, function (err, docs) {
if (err) {
console.log(err)
}
else {
sendBack(docs);
}
});
};
Like I mentioned above, this does work, though I feel that I might be making extra steps for myself, but I'm not quite sure. I'm hoping someone might be able to tell me if theres an easier way.
Thanks!
T
You could use an environment variable to define if you are on "live/production" site or the development one.
Define an env variable in your systems, usually it is NODE_ENV=<env-name> and then use a condition on process.env.NODE_ENV to define what to use for each env:
var dbName;
if (process.env.NODE_ENV === 'development') {
// Define the development db
dbName = 'database1';
} else if (process.env.NODE_ENV === 'production') {
// Define the production db
dbName = 'database2';
}
var db = mongoose.createConnection('mongodb://localhost/' + dbName);
var NamesDB = require('./names_model.js');
var Connect = db.model('names', NamesDB);
I'm writing a Loopback script that is supposed to be called by cron.
In order to obtain the app object, I do
var app = require('./server/server');
# Script logic
console.log('done');
However, the script does not exit once it finishes execution. How do I make it exit?
Reference: http://docs.strongloop.com/display/public/LB/Working+with+LoopBack+objects
Finally found out the cause of this issue.
The problem is due to database connection (in my case, mongodb via loopback-connector-mongodb) is still connected.
To disconnect database connection, and subsequently exiting the console script
var app = require('./server/server');
app.dataSources.DATASOURCENAME.disconnect();
In some places I've read that the issue is the http server preventing the script from shutting down.
I ended up with a module which does not even start an http server, I named it loopback-init.js and I usually import it from migrations and scripts (the important part is the custom callback passed to boot()):
'use strict';
const Promise = require('bluebird');
const loopback = require('loopback');
const boot = require('loopback-boot');
const logger = require('logger');
const app = loopback();
boot(app, __dirname + '/../server', err => {
if (err) throw err;
logger.debug('Loopback initialized.');
app.start = function() {
app.close = function(cb) {
app.removeAllListeners('started');
app.removeAllListeners('loaded');
if (cb) cb();
};
};
});
const autoMigrate = Promise.promisify(
app.dataSources.db.automigrate,
{context: app.dataSources.db}
);
app.autoMigrate = autoMigrate;
module.exports = app;
and my db-migrate scripts look like this:
'use strict';
var dbm;
var type;
var seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function(options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function(db) {
const lb = require('loopback-init');
return lb.autoMigrate('Item')
.then(lb.close, lb.close);
};
exports.down = function(db) {
return db.dropTable('item');
};
exports._meta = {
"version": 1
};