I am starting with NodeJS, been writing some code for a couple days. Right now I am creating a new Winston Transport, in order to use MSSQL or OracleDB as the log destination.
I decided to create a DB class to manage connections, inserts, etc to the DB.
My idea is to declare a connection (null) in the constructor, and then have methods to create the connection, insert, etc.
However, as connecting is async, and Winston transport calls the log method, I need to create the connection, make the insert and close (or leave open the connection).
So I went with this:
class db {
constructor(){
this.connection = null;
}
connect(){
return new Promise(function(resolve, reject){
if (this.connection == null){
if (config.Logging.DB.type == 'mssql'){
const dbOptions = {
user: config.Logging.DB.user,
password: config.Logging.DB.password,
server: config.Logging.DB.mssql.server,
database: config.Logging.DB.mssql.database,
options: {
encrypt: config.Logging.DB.encrypt
}
};
this.connection = new mssql.ConnectionPool(dbOptions, err => {
if (err) reject('Can\'t establish a DB connection.');
this.connection.connect(err => {
if (err) reject(err);
resolve();
});
});
}
}else{
resolve();
}
});
}
insert(query){
if (config.Logging.DB.type == 'mssql'){
this.connection.request().query(query, err => {
console.log(err);
});
}
}
}
My idea is to create a db class instance upon starting the app, and then use that open connection to make any inserts (each time winston calls the log method), like this:
const db = require('./functions/db');
const db_instance = new db();
//Custom DB transport
class DBTransport extends Transport {
constructor(opts){
super(opts);
//db.getConnection();
}
log(info, callback) {
//write
db_instance.connect().then(()=>{
db_instance.insert(`insert into logs (message) values ("${info.message}")`);
}).catch((err)=>{
console.log(err);
});
callback();
}
}
But this isn't working at all. First of all I get an error as in the db class connect method I am trying to access 'this.connection' inside a the promise and I am not being able to.
And I also want to keep the connection open, as winston may call the log method lots of times (I have a process that will call the log method over 5K times).
I did try using some of the available winston-mssql transports out there... but they are all updated, and not working with the current version of winston.
Can anyone help?
You'll need to save the promise outside of your connect function and return that memoized promise each time like this:
class DBTransport {
connect() {
if (!this._pool) {
// Memoize (cache) the connection so you don't have to remake it every time
this._pool = new Promise(function(resolve, reject) {
const pool = new mssql.ConnectionPool(dbOptions);
return pool.connect().then(function() {
return pool;
});
});
}
return this._pool;
}
insert(query) {
// Always refetch your connection
return this.connect().then(function(pool) {
return pool.request().query(query);
})
}
}
Related
I need to set for each connection in a connection pool default session configurations like TIME_ZONE, LNS_DATE_LANGUAGE and others for Oracle database. Knex.js documentation provide a snippet demonstrated below but for PostgreSQL:
var knex = require('knex')({
client: 'pg',
connection: {hos: ''},
pool: {
afterCreate: function (conn, done) {
// in this example we use pg driver's connection API
conn.query('SET timezone="UTC";', function (err) {
if (err) {
// first query failed, return error and don't try to make next query
done(err, conn);
} else {
// do the second query...
conn.query('SELECT set_limit(0.01);', function (err) {
// if err is not falsy, connection is discarded from pool
// if connection aquire was triggered by a query the error is passed to query promise
done(err, conn);
});
}
});
}
}
});
The oracledb way of doing that is defining a callback for the sessionCallback attribute:
async function init() {
try {
await oracledb.createPool({
user: 'USER',
password: 'PWD',
connectString: 'HOST:1521/SERVICE',
sessionCallback: initSession //HERE MY DEFAULT SETTINGS
});
} catch (e) {
console.error(e);
}
}
async function initSession(connection, requestedTag, cb) {
try {
await connection.execute(`
ALTER SESSION SET
TIME_ZONE='UTC'
NLS_LANGUAGE = 'BRAZILIAN PORTUGUESE'
NLS_TERRITORY = 'BRAZIL'
NLS_CURRENCY = 'R$'
NLS_ISO_CURRENCY = 'BRAZIL'
NLS_NUMERIC_CHARACTERS = ',.'
NLS_CALENDAR = 'GREGORIAN'
NLS_DATE_FORMAT = 'DD/MM/YYYY'
NLS_DATE_LANGUAGE = 'BRAZILIAN PORTUGUESE'
NLS_SORT = 'WEST_EUROPEAN'
NLS_TIMESTAMP_FORMAT = 'DD/MM/YYYY HH24:MI:SS'
NLS_DUAL_CURRENCY = 'R$'
`);
} catch (e) {
console.error(e);
cb(e);
} finally {
if (connection) {
await connection.close();
}
}
}
I would like to be able of translating this snippet above for a Knex.js implementation. How to do that?
Does your KNEX use Oracle DB's connection pool?? https://github.com/tgriesser/knex/issues/2665 show it as a future project.
Try the pg snippet you posted, buyt using the ALTER SESSION from your other example.
8 out of ten times everything connects well. That said, I sometimes get a MongoClient must be connected before calling MongoClient.prototype.db error. How should I change my code so it works reliably (100%)?
I tried a code snippet from one of the creators of the Now Zeit platform.
My handler
const { send } = require('micro');
const { handleErrors } = require('../../../lib/errors');
const cors = require('../../../lib/cors')();
const qs = require('micro-query');
const mongo = require('../../../lib/mongo');
const { ObjectId } = require('mongodb');
const handler = async (req, res) => {
let { limit = 5 } = qs(req);
limit = parseInt(limit);
limit = limit > 10 ? 10 : limit;
const db = await mongo();
const games = await db
.collection('games_v3')
.aggregate([
{
$match: {
removed: { $ne: true }
}
},
{ $sample: { size: limit } }
])
.toArray();
send(res, 200, games);
};
module.exports = handleErrors(cors(handler));
My mongo script that reuses the connection in case the lambda is still warm:
// Based on: https://spectrum.chat/zeit/now/now-2-0-connect-to-database-on-every-function-invocation~e25b9e64-6271-4e15-822a-ddde047fa43d?m=MTU0NDkxODA3NDExMg==
const MongoClient = require('mongodb').MongoClient;
if (!process.env.MONGODB_URI) {
throw new Error('Missing env MONGODB_URI');
}
let client = null;
module.exports = function getDb(fn) {
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
if (client === null) {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
console.log('[mongo] client init');
} else if (client.isConnected) {
console.log('[mongo] client connected, quick return');
return client.db(process.env.MONGO_DB_NAME);
}
return new Promise((resolve, reject) => {
client.connect(err => {
if (err) {
client = null;
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
};
I need my handler to be 100% reliable.
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
This code can cause problems! Even though you're setting client to null, that client still exists, will continue connecting to mongo, will not be garbage collected, and its callback connection code will still run, but in its callback client will refer to the next client that's created that is not necessarily connected.
A common pattern for this kind of code is to only ever return a single promise from the getDB call:
let clientP = null;
function getDb(fn) {
if (clientP) return clientP;
clientP = new Promise((resolve, reject) => {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
client.connect(err => {
if (err) {
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
return clientP;
};
I had the same issue. In my case it was caused by calling getDb() before a previous getDb() call had returned. In this case, I believe that 'client.isConnected' returns true, even though it is still connecting.
This was caused by forgetting to put an 'await' before the getDb() call in one location. I tracked down which by outputting a callstack from getDb using:
console.log(new Error().stack);
I don't see the same issue in the sample code in the question, though it could be triggered by another bit of code that isn't shown.
I have written this article talking about serverless, lambda e db connections. There are some good concepts which could help you to find the root cause of your problem. There are also example and use cases of how to mitigate connection pool issues.
Just by looking your code I can tell it is missing this:
context.callbackWaitsForEmptyEventLoop = false;
Serverless: Dynamodb x Mongodb x Aurora serverless
In my code below I wish to receive an "order" from the feed and store it in the database.
I understand that the class method marketEvent will be called whenever data is received and accordingly I would need my insert statement within this function.
It would be inefficient to open and close the db connection each time the class method is called so I seek to open the connection and pass the db object through to marketEvent.
I am new to nodejs and web sockets so can't work out how to put it together.
var pg = require("pg")
var conString = "postgres://myusername:mypassword#localhost/poloniex";
var client = new pg.Client(conString);
var autobahn = require('autobahn');
var wsuri = "wss://api.poloniex.com";
var connection = new autobahn.Connection({
url: wsuri,
realm: "realm1"
});
connection.onopen = function (session) {
function marketEvent (args,kwargs) {
client.query("INSERT INTO orderbook(order) values($1)", [args]);
}
session.subscribe('BTC_XMR', marketEvent);
}
connection.onclose = function () {
console.log("Websocket connection closed");
}
client.connect();
connection.open();
I am unaware of "pg" and "autobahn" both. But from the documentation of "pg" package, may I offer you a hint of solution
client.connect() is not meant to be an isolated call. It expects a function delegate to carry out the operation.
connection.onopen = function (session) {
function marketEvent (args,kwargs) {
client.connect(function(err) {
if (err) throw err;
client.query("INSERT ...", [args]);
});
}
session.subscribe('BTC_XMR', marketEvent);
}
About using the connection efficiently, you may be asking for pooling I guess.
var pool = new pg.Pool(config);
pool.connect(function(err, client, done) {
if (err) throw err;
client.query('INSERT ...', function(err, result) {
//call `done()` to release the client back to the pool
done();
});
});
Please refer original source of this suggestion for the exact usage:
https://www.npmjs.com/package/pg
I want to use gridfs-stream in a nodejs application.
A simple example is given in the documentation:
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/test');
// make sure the db instance is open before passing into `Grid`
mongoose.connection.once('open', function () {
var gfs = Grid(mongoose.connection);
// all set!
})
My problem is described by the comment:
make sure the db instance is open before passing into Grid
I try to use gfs in a post request. Now when the code gets initialized, the gfs variable is not defined yet.
api.post('/upload', function(req, res) {
req.pipe(gfs.createWriteStream({
filename: 'test'
}).on('close', function(savedFile){
console.log('file saved', savedFile);
return res.json({file: savedFile});
}));
})
Initializing my route from a callback seems kind of odd.
I read in this post (Asynchronous initialization of Node.js module) that require('') is performed synchronous, and since I rely on the connection being established, I'm kind of forced to wait
Basically I'm not sure if I should use a async pattern on startup now, or if I just miss a more elegant way to solve this.
I have a very similar problem with my server. In my case I am reading https certs asynchronously, the software version from git asynchronously and I want to make sure I have it all together by the time the user comes to log in so I can pass the software version back as a reply to login.
The solution is to use promises. Create the promises on user start up for each activity. Then in the code where you want to be sure its all ready, just call then on either the promise itself or Promise.all(array of promises).then()
Here is an example of what I am doing to read the ssl certs to start the server
class Web {
constructor(manager,logger) {
var self = this;
this.server = false;
this.logger = logger;
var key = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'key.pem'),(err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
var cert = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'certificate.pem'), (err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
Promise.all([key,cert]).then(values => {
var certs = {
key: values[0],
cert: values[1],
};
return certs;
}).then(certs => {
self.server = require('http2').createServer(certs,(req,res) => {
// NOW Started and can do the rest of the stuff
});
self.server.listen(...);
});
NEEDS SOME MORE CLOSING BRACKETS
I'm working on a web app with nodejs, express, and mongodb.
In my 'main' file where I listed for API calls, I include a Users class that has methods like Users.authenticate(userObject, callback), and Users.getById(userId, callback).
Sorry for this long code snippet. It's just a snippet of my users class.
function Users (db) {
if (!db) {
return {'message': 'creating an instance of Users requires a database'}
} else {
this.db = db;
return this;
}
}
Users.prototype.authenticate = function (user, callback) {
if (!user.username) {
return {'message': 'Users.authenticate(user, callback) requires user.username'};
} else if (!user.password) {
return {'message': 'Users.authenticate(user, callback) requires user.password'};
} else if (!callback) {
return {'message': 'Users.authenticate(user, callback) requires callback(err, user)'};
}
this.db.collection('users', function (err, collection) {
if (err) {return {'message': 'could not open users collection'}};
/* query for the user argument */
collection.findOne(user, function (err, doc) {
if (!err) {
if (!doc) {
callback({'message': 'user does not exist'}, null);
} else {
callback(null, doc);
}
} else {
callback({'message': 'error finding user'}, null);
}
});
});
};
exports.Users = Users;
That's it
I pass an open DB connection to my Users class, and make a call like the following:
var server = new mongo.Server('localhost', '27017', {auto_reconnect: true});
var db = new mongo.Db('supportdash', server, {"fsync": true});
// open connection to be, init users
db.open(function (err, db) {
var users = new Users(db);
users.authenticate({"username": "admin", "password": "password"}, function (err, user) {
// do something with the error, or user object
});
});
Now for my questions
Should I be passing an open db connection, or should I be passing the info needed (localhost, port, database name) for the Users class to manage its own connection?
I tried to set up testing with jasmine-node, but I ended up with a lot of problems with async database calls. I wanted to add a user, then test that Users.authenticate was working. I used Jasmines runs() and waitsfor() async helpers, but I could not get it to work. I then ran into an issue that took me a while to debug (with a different class), and testing would have saved me a lot of time. Any advice on how I would test my classes that interact with a mongodb database?