Openwhisk failing to call compose postgre sql - node.js

I have tried for more hours than I care to admit to get an openWhisk function to call a postgre sql datbase on Compose.io. Here is my code:
My latest incarnation is this:
function myAction(params) {
return new Promise(function(resolve, reject) {
console.log('Connecting to Compose database');
// console.log('Params ---> ', params);
var mysql = require('promise-mysql');
var fs = require('fs');
var pg = require('pg');
var request = require('request')
var Promise = require('promise/lib/es6-extensions');
var connString = "postgres:xxxx";
pg.connect(connString, function(err, client, done) {
console.log("connectiong..", err, client, done);
if (err) {
console.log('[connectToCompose] failed to fetch client from pool', err);
reject(err);
} else {
params.client = client; params.done = done;
console.log('[connectToCompose] obtained a Compose client');
return(params);
}
})
// params.client.done();
// console.log("closing connectiong");
})
}
exports.main = myAction;
I have a similar example where I connect to a different SQL database (not Compose) and use sql promise not postgre and it works. What am I doing wrong?

To work with OpenWhisk and any database offering, you need to use promisified JavaScript code; the event loop as it's used in ordinary nodejs isn't available. I have an example that uses pg-promise (taken more or less exactly from the project docs) and it works fine for me. Try something like this:
const promise = require('bluebird');
const initOptions = {
promiseLib: promise // overriding the default (ES6 Promise);
};
const pgp = require('pg-promise')(options);
const conn_info = {...connection info...};
const db = pgp(conn_info);
module.exports.main = function main(args) {
db.any('SELECT * FROM items')
.then(data => {
console.log('DATA:', data);
// return whatever data you wanted
resolve({message: 'success'});
})
.catch(error => {
console.log('ERROR:', error);
});
}
Not all of the dependencies here are available on OpenWhisk by default, so when you deploy the action, include both your *.js file and the whole of node_modules/ in a zip file, and upload that. It's definitely possible to use the Compose Postgres with OpenWhisk, if that helps to encourage you :)

Related

Mongodb native connections from nodejs return undefined databases list

I'm just starting to use Mongodb without mongoose (to get away from the schemas), and wanted to create a simple module with various exported functions to use in the rest of my app. I've pasted the code below.
The problem I'm having is that the databasesList.databases comes back as undefined, and I'm not sure why. There should be 2 databases on my cluster, and one collection in each database.
As a tangential question, I thought maybe I would check the collections instead (now commented out), but though I found this page (https://docs.mongodb.com/manual/reference/method/db.getCollectionNames/) the function getCollectionNames seems not to exist. Now I'm wondering if I'm using the wrong documentation and that is why my databases are coming back undefined.
const client = new MongoClient(uri)
const connection = client.connect( function (err, database) {
if (err) throw err;
else if (!database) console.log('Unknown error connecting to database');
else {
console.log('Connected to MongoDB database server');
}
});
module.exports = {
getDatabaseList: function() {
console.log('start ' + client);
databasesList = client.db().admin().listDatabases();
//collectionList = client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
//databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}```
your code is correct Just need to change few things.
module.exports = {
getDatabaseList: async function() {
console.log('start ' + client);
databasesList = await client.db().admin().listDatabases();
//collectionList = await client.db().getCollectionNames();
//console.log("Collections: " + collectionList);
console.log("Databases: " + databasesList.databases);
databasesList.databases.forEach(db => console.log(` - ${db.name}`));
}
}
You have to declare async function and use await also.
The async and await keywords enable asynchronous, promise-based behaviour to be written in a cleaner style, avoiding the need to explicitly configure promise chains.
You can use this modular approach to build your database access code:
index.js: Run your database application code, like list database names, collection names and read from a collection.
const connect = require('./database');
const dbFunctions = require('./dbFunctions');
const start = async function() {
const connection = await connect();
console.log('Connected...');
const dbNames = await dbFunctions.getDbNames(connection);
console.log(await dbNames.databases.map(e => e.name));
const colls = await dbFunctions.getCollNames(connection, 'test');
console.log(await colls.map(e => e.name));
console.log(await dbFunctions.getDocs(connection, 'test', 'test'));
};
start();
database.js:: Create a connection object. This connection is used for all your database access code. In general, a single connection creates a connection pool and this can be used throughout a small application
const { MongoClient } = require('mongodb');
const url = 'mongodb://localhost:27017/';
const opts = { useUnifiedTopology: true };
async function connect() {
console.log('Connecting to db server...');
return await MongoClient.connect(url, opts );
}
module.exports = connect;
dbFunctions.js:: Various functions to access database details, collection details and query a specific collection.
module.exports = {
// return list of database names
getDbNames: async function(conn) {
return await conn.db().admin().listDatabases( { nameOnly: true } );
},
// return collections list as an array for a given database
getCollNames: async function(conn, db) {
return await conn.db(db).listCollections().toArray();
},
// return documents as an array for a given database and collection
getDocs: async function(conn, db, coll) {
return await conn.db(db).collection(coll).find().toArray();
}
}

How to connect to Mongodb reliably in a serverless setup?

8 out of ten times everything connects well. That said, I sometimes get a MongoClient must be connected before calling MongoClient.prototype.db error. How should I change my code so it works reliably (100%)?
I tried a code snippet from one of the creators of the Now Zeit platform.
My handler
const { send } = require('micro');
const { handleErrors } = require('../../../lib/errors');
const cors = require('../../../lib/cors')();
const qs = require('micro-query');
const mongo = require('../../../lib/mongo');
const { ObjectId } = require('mongodb');
const handler = async (req, res) => {
let { limit = 5 } = qs(req);
limit = parseInt(limit);
limit = limit > 10 ? 10 : limit;
const db = await mongo();
const games = await db
.collection('games_v3')
.aggregate([
{
$match: {
removed: { $ne: true }
}
},
{ $sample: { size: limit } }
])
.toArray();
send(res, 200, games);
};
module.exports = handleErrors(cors(handler));
My mongo script that reuses the connection in case the lambda is still warm:
// Based on: https://spectrum.chat/zeit/now/now-2-0-connect-to-database-on-every-function-invocation~e25b9e64-6271-4e15-822a-ddde047fa43d?m=MTU0NDkxODA3NDExMg==
const MongoClient = require('mongodb').MongoClient;
if (!process.env.MONGODB_URI) {
throw new Error('Missing env MONGODB_URI');
}
let client = null;
module.exports = function getDb(fn) {
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
if (client === null) {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
console.log('[mongo] client init');
} else if (client.isConnected) {
console.log('[mongo] client connected, quick return');
return client.db(process.env.MONGO_DB_NAME);
}
return new Promise((resolve, reject) => {
client.connect(err => {
if (err) {
client = null;
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
};
I need my handler to be 100% reliable.
if (client && !client.isConnected) {
client = null;
console.log('[mongo] client discard');
}
This code can cause problems! Even though you're setting client to null, that client still exists, will continue connecting to mongo, will not be garbage collected, and its callback connection code will still run, but in its callback client will refer to the next client that's created that is not necessarily connected.
A common pattern for this kind of code is to only ever return a single promise from the getDB call:
let clientP = null;
function getDb(fn) {
if (clientP) return clientP;
clientP = new Promise((resolve, reject) => {
client = new MongoClient(process.env.MONGODB_URI, {
useNewUrlParser: true
});
client.connect(err => {
if (err) {
console.error('[mongo] client err', err);
return reject(err);
}
console.log('[mongo] connected');
resolve(client.db(process.env.MONGO_DB_NAME));
});
});
return clientP;
};
I had the same issue. In my case it was caused by calling getDb() before a previous getDb() call had returned. In this case, I believe that 'client.isConnected' returns true, even though it is still connecting.
This was caused by forgetting to put an 'await' before the getDb() call in one location. I tracked down which by outputting a callstack from getDb using:
console.log(new Error().stack);
I don't see the same issue in the sample code in the question, though it could be triggered by another bit of code that isn't shown.
I have written this article talking about serverless, lambda e db connections. There are some good concepts which could help you to find the root cause of your problem. There are also example and use cases of how to mitigate connection pool issues.
Just by looking your code I can tell it is missing this:
context.callbackWaitsForEmptyEventLoop = false;
Serverless: Dynamodb x Mongodb x Aurora serverless

Structure of a synchronous application in Node.js and MongoDb

I need to build an application that does these things (in order):
on load:
01- connect to MongoDB 'db'
02- creates a collection 'cas'
03- check if a web page has updates, if yes go to step 04, if not go to step 07
04- do web scraping (using Cheerio) of the web site and get a $ variable like that $ = cheerio.load(body);
05- elaborate this object to get only informations I'm interested in and organize them in a jsons object like this one:
var jsons = [
{year: 2015, country: Germany, value: 51},
{year: 2015, country: Austria, value: 12},
{year: 2016, country: Germany, value: 84},
{year: 2016, country: Bulgaria, value: 104},
...
];
06- insert each of these elements ({year: 2015, country: Germany, value: 51}, ...) in the collection 'cas' of database 'db'
07- download the data (for example in a csv file)
08- create a web page for data visualization of these data using D3.js
09- disconnect from 'db'
If Node.js were synchronous, I could write something like this:
var url = 'http://...';
var jsons = [];
connectDb('db');
createCollection('db', 'cas');
if(checkForUpdates(url)) {
var $ = scrape(url);
jsons = elaborate($);
for(var i = 0; i < jsons.length; i++) {
saveDocumentOnDbIfNotExistsYet('db', 'cas', jsons[i]);
}
}
downloadCollectionToFile('db', 'cas', './output/casData.csv');
createBarChart('./output/casData.csv');
disconnectDb('db');
But Node.js is asynchronous so this code would not work properly.
I've read that I can use Promise to get the code to run in a certain order.
I read the documentation about the Promise and some sites that showed simple tutorials.
The structure of a Promise is:
// some code (A)
var promise = new Promise(function(resolve, reject) {
// some code (B)
});
promise.then(function() {
// some code (C)
});
promise.catch(function() {
// some code (D)
});
// some code (E)
If I understood correctly, in this case the execution (if Node.js were synchronous) would be equivalent to:
// some code (A)
// some code (E)
if(some code (B) not produce errors) {
// some code (C)
}
else {
// some code (D)
}
or (swap between code A and E, because they are asynchronous)
// some code (E)
// some code (A)
if(some code (B) not produce errors) {
// some code (C)
}
else {
// some code (D)
}
So now I wonder what is the right structure for my application.
I thought about:
var cheerio = require('cheerio');
var express = require('express');
var fs = require('fs');
var MongoClient = require('mongodb').MongoClient;
var dbUrl = 'mongodb://localhost:27017/';
var dbName = 'db';
var collectionName = 'cas';
const app = express(); // run using > node app.js
// connect to db
var connect = function(url) {
return new Promise(function(resolve, reject) {
MongoClient.connect(url + dbName, function(err, db) {
if(err) {
reject(err);
}
else {
console.log('Connected');
resolve(db);
}
});
});
}
// create collection
connect.then(function(db) {
db.createCollection(collectionName, function(err, res) {
if(err) {
throw err;
}
else {
console.log('Collection', collectionName, 'created!');
}
});
});
// connection error
connect.catch(function(err) {
console.log('Error during connection...');
throw err;
});
It's right? If yes, how can I proceed with other steps?
I can I improve my code?
EDIT 1
Following the example of Андрей Щербаков, I modified my code in this way:
app.js:
// my files
var db = require('./middlewares/db.js');
var url = 'mongodb://localhost:27017/';
var dbName = 'db';
var collectionName = 'cas';
const start = async function() {
const connect = await db.connectToMongoDb(url, dbName);
const cas = await connect.createYourCollection(collectionName);
const isPageHasUpdates = oneMoreFunction(); // i don't know how you gonna check it
if(isPageHasUpdates) {
await step 4;
await step 5;
await step 6;
}
await step 7
return something; // if you want
}
start()
.then(res => console.log(res)) // here you can use result of your start function if you return something or skip this then
.catch(err => console.log(err)); // do something with your error
middlewares/db.js:
var MongoClient = require('mongodb').MongoClient;
let dbInstance;
var methods = {};
methods.connectToMongoDb = function(url, dbName) {
if(dbInstance) {
return dbInstance;
}
else {
MongoClient.connect(url + dbName, function(err, db) {
if(!err) {
dbInstance = db;
return db;
}
});
}
}
methods.createYourCollection = function(collectionName) {
?.createCollection(collectionName, function(err, res) {
if(err) {
throw err;
}
});
}
module.exports = methods;
But I'm not sure I'm doing well.
How can I separate function in different files? For example I want to put all the function about db in file middlewares/db.js. But I have some problems in line ?.createCollection(collectionName, function(err, res).
If you are running node version 7.6 or higher, better way will be to use async await which works with promises.
So your code will look like
const start = async() => {
const connect = await connectToMongoDb(url);
const cas = await connect.createYourCollection();
const isPageHasUpdates = oneMoreFunction(); // i don't know how you gonna check it
if(isPageHasUpdates) {
await step 4;
await step 5;
await step 6;
}
await step 7
return something; // if you want
}
start()
.then(res => console.log(res)) // here you can use result of your start function if you return something or skip this then
.catch(err => console.log(err)); // do something with your error
Sure any function you are gonna await should be promisified as you did with your connect function( but if you are using https://www.npmjs.com/package/mongodb functions already promisified)
Update
The best way will be to use mongoose, but if you want to work with native mongodb you can write your mongodb like this https://pastebin.com/BHHc0uVN (just an example)
You can expand this example as you want.
You can create function createCollection
const createCollection = (connection, collectionName) => {
return connection.createCollection(collectionName); // actually i'm not sure that this function exists in mongodb driver
}
And usage will be:
const mongodbLib = require('./lib/mongodb'); //path to db.js file
mongodbLib.init()
.then(connection => mongodbLib.createCollection(connection, 'cas'))
.then(() => doSmthElse())
Or if you are sure that init is done(you can do it once before you main script like starting server or whatever you doing)
const mongodbLib = require('./lib/mongodb'); //path to db.js file
const connection = mongodbLib.getConnection();
Or if you want to simple work with collection like in step 6, add your cas collection(like user in example file). But this you can use when your init function is done as well.
So usage will be
const mongodbLib = require('./lib/mongodb');
const cas = mongodbLib.collections.cas;
cas().insertMany(docs)
.then()
.catch()

Initialization of db connection - nodejs

I want to use gridfs-stream in a nodejs application.
A simple example is given in the documentation:
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/test');
// make sure the db instance is open before passing into `Grid`
mongoose.connection.once('open', function () {
var gfs = Grid(mongoose.connection);
// all set!
})
My problem is described by the comment:
make sure the db instance is open before passing into Grid
I try to use gfs in a post request. Now when the code gets initialized, the gfs variable is not defined yet.
api.post('/upload', function(req, res) {
req.pipe(gfs.createWriteStream({
filename: 'test'
}).on('close', function(savedFile){
console.log('file saved', savedFile);
return res.json({file: savedFile});
}));
})
Initializing my route from a callback seems kind of odd.
I read in this post (Asynchronous initialization of Node.js module) that require('') is performed synchronous, and since I rely on the connection being established, I'm kind of forced to wait
Basically I'm not sure if I should use a async pattern on startup now, or if I just miss a more elegant way to solve this.
I have a very similar problem with my server. In my case I am reading https certs asynchronously, the software version from git asynchronously and I want to make sure I have it all together by the time the user comes to log in so I can pass the software version back as a reply to login.
The solution is to use promises. Create the promises on user start up for each activity. Then in the code where you want to be sure its all ready, just call then on either the promise itself or Promise.all(array of promises).then()
Here is an example of what I am doing to read the ssl certs to start the server
class Web {
constructor(manager,logger) {
var self = this;
this.server = false;
this.logger = logger;
var key = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'key.pem'),(err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
var cert = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'certificate.pem'), (err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
Promise.all([key,cert]).then(values => {
var certs = {
key: values[0],
cert: values[1],
};
return certs;
}).then(certs => {
self.server = require('http2').createServer(certs,(req,res) => {
// NOW Started and can do the rest of the stuff
});
self.server.listen(...);
});
NEEDS SOME MORE CLOSING BRACKETS

Connect synchronously to mongodb

I would like to connect to mongodb first, then run everything else in my application.
To do it I have to write something like:
MongoClient.connect("mongodb://localhost/test", function(err, connection) {
if (err) { console.error(err); }
db = connection;
var app = express();
// Include API V1
require("./apiv1.js")(app, db);
app.listen(3000, function(err) {
if (err) { console.error(err); } else { console.log("Started on *:3000"); }
});
});
This makes my app to be completely indented inside the .connect function... Which looks ugly and takes space while I work on my project.
I think the best solution would be have the MongoDB connection synchronous (even because witout the DB connection my app cannot work so why should I do something while it's connecting?) and then run the rest of my code.
How can I do?
You can't connect to MongoDB synchronously, but you may get rid of this ugly callback from your code.
The best way to do it is to adopt some wrapper around node-mongodb-native driver.
Take a look at the following modules.
mongojs
var mongojs = require('mongojs');
var db = mongojs('localhost/test');
var mycollection = db.collection('mycollection');
mongoskin
var mongo = require('mongoskin');
var db = mongo.db("mongodb://localhost:27017/test", {native_parser:true});
monk
var monk = require('monk');
var db = monk('localhost/test');
var users = db.get('users')
Of course, internally all of them are establishing MongoDB connection asynchronously.
Using the async library, you can aleve some of these issues.
For example in my server startup I do the following :
async.series([
function(callback){
// Initialize the mongodb connection and callback on completion in init.
db.init(function(){
callback();
});
},
function(callback){
// Listen on requests etc.
webServer.init(function(){
callback();
});
},
function(callback){
// Set up anything else that I need
callback();
}
]);
If you are using Node 6 and up versions, you can do something like this:
const MongoClient = require('mongodb').MongoClient;
const url = 'mongodb://localhost:27017/mydb';
let db = null;
getdb();
//your code
async function getdb() {
db = await MongoClient.connect(url);
}
Bring the mongodb library.
Declare the url constant .
Declare the variable db as null.
Call the getdb function.
Create the getdb function which has firt the async word
Assign to the db variable the result of the connection with the key word await.
You can do it with thunky, thunky executes an async function once and caches it, the subsequent calls are returned from the cache.
const MongoClient = require('mongodb').MongoClient;
const thunky = require('thunky');
var connect = thunky(function(cb){
let url = 'mongodb://localhost:27017/test';
MongoClient.connect(url, function(err, client){
console.log('connecting')
cb(err, client);
})
})
connect( (err, client) => {
console.log('connection 1')
})
connect( (err, client) => {
console.log('connection 2')
})
connect( (err, client) => {
console.log('connection 3')
console.log('closing')
client.close();
})
*Note: I am using latest 3.x mongodb driver

Resources