Redshift data access from node jdbc throwing errors - node.js

I am getting below error when I try to data from redshift with the below mentioned code: I feel that it is due to jdbc module not installed correctly.Could
/*
* GET users listing.
*/
var jdbc =require('jdbc');
var express = require('express');
var config = {
libpath: '../RedshiftJDBC41-1.1.6.1006.jar',
drivername: 'com.amazon.redshift.jdbc41.Driver',
url: 'jdbc:redshift://exampleCluster.abcdkcmf0ug1.us-west-2.redshift.amazonaws.com:5439/xyz',
properties: [
['user', 'XXXXX'],
['password', 'XXXXXX']
]
};
console.log(config);
var hsqldb=new jdbc(config);
hsqldb.status();
hsqldb.initialize(config, function(err, res) {
console.log("Connection opened successfully!");
if (err) {
console.log("Error during Initializing");
console.log(err);
}
});
exports.list = function(req, res){
hsqldb.open(function(err, conn) {
if (conn) {
// SELECT statements are called with executeQuery
hsqldb.executeQuery("select * from schema.table", function(err, results) {
if (err) {
console.log(err);
} else if (results) {
console.log(results);
res.type('text/plain'); // set content-type
res.send(results);
}
hsqldb.close(function(err) {
if(err) {
console.log(err);
} else {
console.log("Connection closed successfully!");
}
});
}
);
}
});
// res.send("respond with a resource");
};
Error:
C:\Users\ABCD\node_modules\jdbc\lib\pool.js:97
return callback(null);
^
TypeError: object is not a function
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:97:12
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:52:16
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:363:13
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:52:16
at done (C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:243:17)
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:44:16
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:360:17
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:90:7
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:28:14
at C:\Users\ABCD\node_modules\jdbc\lib\drivermanager.js:26:18
Similar Post: Redshift data access from node jdbc
Could Some one please let me know what went wrong here.Is it some thing wrong with the code? or is it with the modules not installed correctly. I feel it is due to modules not installed correctly since I saw few errors while installing the jdbc module.
Please let me know the steps to be taken to install jdbc module in node js.

The node-jdbc API has been completely reworked since version 0.1.1 was released, so any old documentation relating to older versions of node-jdbc, including my answer to the question linked above, is no longer helpful.
Using node-jdbc 0.1.1, the code to connect to your database should look something like the following:
var jdbc = require('jdbc');
var jinst = require('jdbc/lib/jinst');
if (!jinst.isJvmCreated()) {
jinst.addOption("-Xrs");
jinst.setupClasspath(['../RedshiftJDBC41-1.1.6.1006.jar']);
}
var config = {
drivername: 'com.amazon.redshift.jdbc41.Driver',
url: 'jdbc:redshift://exampleCluster.abcdkcmf0ug1.us-west-2.redshift.amazonaws.com:5439/xyz',
properties: {
"user": "XXXXX",
"password": "XXXXX"
}
};
var hsqldb=new JDBC(config);
hsqldb.status();
hsqldb.initialize(function(err, res) {
/* ... */
});
In particular:
The classpath is now configured in a jinst object and has moved out of the configuration.
properties is now an object, not an array of 2-element arrays as it was in version 0.0.15.
The initialize method now takes only one parameter, the callback. It no longer takes the configuration as a parameter as that was provided in the constructor.
node-jdbc supposedly supports specifying the user and password properties in the config object, which avoids using properties for this. However, I would recommend avoiding this as it does not work. If you try it, you may well get errors about the username and password not being valid, even if you've typed them in correctly.
The reason you are getting the somewhat unintelligible error is because you were passing the configuration object to the initialize method, which was expecting only a callback. The error doesn't get noticed until Node tries to 'call' the configuration object as if it were a function, which of course fails.

Related

Initialization of db connection - nodejs

I want to use gridfs-stream in a nodejs application.
A simple example is given in the documentation:
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/test');
// make sure the db instance is open before passing into `Grid`
mongoose.connection.once('open', function () {
var gfs = Grid(mongoose.connection);
// all set!
})
My problem is described by the comment:
make sure the db instance is open before passing into Grid
I try to use gfs in a post request. Now when the code gets initialized, the gfs variable is not defined yet.
api.post('/upload', function(req, res) {
req.pipe(gfs.createWriteStream({
filename: 'test'
}).on('close', function(savedFile){
console.log('file saved', savedFile);
return res.json({file: savedFile});
}));
})
Initializing my route from a callback seems kind of odd.
I read in this post (Asynchronous initialization of Node.js module) that require('') is performed synchronous, and since I rely on the connection being established, I'm kind of forced to wait
Basically I'm not sure if I should use a async pattern on startup now, or if I just miss a more elegant way to solve this.
I have a very similar problem with my server. In my case I am reading https certs asynchronously, the software version from git asynchronously and I want to make sure I have it all together by the time the user comes to log in so I can pass the software version back as a reply to login.
The solution is to use promises. Create the promises on user start up for each activity. Then in the code where you want to be sure its all ready, just call then on either the promise itself or Promise.all(array of promises).then()
Here is an example of what I am doing to read the ssl certs to start the server
class Web {
constructor(manager,logger) {
var self = this;
this.server = false;
this.logger = logger;
var key = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'key.pem'),(err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
var cert = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'certificate.pem'), (err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
Promise.all([key,cert]).then(values => {
var certs = {
key: values[0],
cert: values[1],
};
return certs;
}).then(certs => {
self.server = require('http2').createServer(certs,(req,res) => {
// NOW Started and can do the rest of the stuff
});
self.server.listen(...);
});
NEEDS SOME MORE CLOSING BRACKETS

provide data for protractor test

I'm new to nodejs and working now to automatize functionnal test doing BDD with cucumber and protractor.
For some of my steps, i need to send query to an oracle database and then use the result to make a search in the tested website.
1- I'm trying with oracleDB witch returns me the expected result that i put in a variable but this one is not available in my steps. Method sendKeys of webdriver puts "undefined" in the input.
2- I'm also wondering if there's not another way because it was hard to install oracledb and the next task is to have jenkins builds
here is my code:
var dbQueryContract = function() {
var oracledb = require('oracledb');
//Database communication
oracledb.getConnection(
{
user : "xxx",
password : "xxx",
connectString : "xxx"
},
function(err, connection)
{
if (err) {
console.error(err.message);
return;
}
connection.execute(
"select xxx, xxx " +
"FROM xxxxx " +
"where xxx is not null and rownum < 5",
{
resultSet: true
},
// bind value for :id [110],
function(err, result)
{
if (err) { console.error(err.message); return; }
console.log(result.rows[0][0]);
});
criteria= result.rows[0][0];
// the connection is ok and i can log result.rows[0][0] i want to use for search
});
};
module.exports = new dbQueryContract();
************************************************************************************************
// Use the external Chai As Promised to deal with resolving promises in
// expectations.
var chai = require('chai');
var chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
var expect = chai.expect;
var page1 = require('../page1.js');
var page2 = require('../page2.js');
var dbQueryContract = require('../dbQueryContract.js');
var EC = protractor.ExpectedConditions;
// Chai expect().to.exist syntax makes default jshint unhappy.
// jshint expr:true
module.exports = function() {
this.Given(/^thanx for help$/, function(next) {
browser.ignoreSynchronization=true;
browser.get('toto.com');
page1.login.sendKeys('login')
page1.password.sendKeys('P#ssword')
page1.validateButton.click();
browser.ignoreSynchronization=false;
page2.searchLink.click();
browser.waitForAngular();
browser.sleep(5000);
// console.log(dbQueryContract.numabo.result.rows[0][0]);
dbQueryContract().then(function(criteria) {
page2.searchInput.sendKeys(criteria, protractor.Key.ENTER);
});
next();
});
this.When(/^i learn more$/, function(next) {
browser.sleep(5000);
next();
});
};
This code doesn't look right. Your module is exporting the result of invoking new dbQueryContract();. However, that function isn't really a constructor function. You're not setting any properties on this or adding any properties to the prototype. Also, you're not returning anything, which is probably why your getting undefined later.
The next problem is that you're chaining a then call, assuming that the driver uses promises - it doesn't (at least not yet). You'd need to have your function return a deferred which you'd need to resolve at the right time yourself. There's some discussion here about adding more robust JavaScript layer:
https://github.com/oracle/node-oracledb/pull/321
If we go that route I'd like to see all async methods return a promise by default which would make things easier...

Mock function return in node.js test

I'm new to testing in node.js and I would like to mock the return of a specific function call in a process that looks like the following.
doSomething(function(err, res){
callAnotherOne(res, function(err, result){
getDataFromDB(result, function(err, docs){
//some logic over the docs here
})
})
})
The function that I want to mock is the getDataFromDB() and specifically the documents (using MongoDB) that it returns.
How could I do something like this with mocha?
Part of the code, strip from the logic in between, is the following:
filterTweets(item, input, function(err, item) {
//Some filtering and logging here
db.getTwitterReplies(item, function(err, result) {
if(err) {
return callback('Failed to retrieve tweet replies');
}
//Do some work here on the item using the result (tweet replies)
/***** Here I want to test that the result is the expected ****/
db.storeTweets(item function (err, result){
//error checks, logging
callback();
});
});
});
Based on the amount of twitter replies (function call "getTwitterReplies"), I will modify my object accordingly (didn't include that code). I want to see if based on different replies result, my object is constructed as expected.
p.s. I also checked into sinon.js after some searching and I managed to mock the return of a callback (by writing some testing code outside my project) but not the return of a callback of a nested function call.
Here's how I would approach this category of problem:
First create a "config.js" that wraps the dependencies that you'd like to inject. This will become your container.
var db = {
doSomeDbWork : function(callback){
callback("db data");
}
};
module.exports = {
db: db
};
From there, you can call config dependencies like so:
var config = require('./index/config');
config.db.doSomeDbWork(function(data){
res.render('index', { title: 'Express' , data:data});
});
And in your tests, inject a mock/spy easily:
var config = require('../routes/index/config');
config.db = {
doSomeDbWork : function(callback){
callback("fake db data");
}
};
var indexRouter = require('../routes/index');
indexRouter.get('/');
Because the require call refers to the same config module exports, the changes made to the config in the spec will be reflected where ever they are imported via require()

What will be the best way to reuse a CouchBase conneciton

I am using the following code for connecting the CouchBase
couchbase.connect(config.CouchBaseConnector, function (err, CouchBaseDB) {
if (err) {
throw (err)
}
CouchBaseDB.set(keyPush, docPush, function (err, meta) {
if (err) { console.log(err); }
});
}
But its creating multiple number of connection.
Can someone help me out to fix the issue. Basically I want to do something like connection pool and keep re-using.
I came across a doc from CouchBase regarding the same. But not able to figure it out how exactly it works and the steps to deploy the same on windows 7 64-bit version.
Update:
I think moxi-server is not released for Windows OS as of now.
The Couchbase Node SDK is a Connection Pool itself. It is responsible of managing connection to the cluster, be alerted about any change in the server topology (add/remove/failed nodes)
This is why most of the time you put your code in a global callback method and reuse the connection
var express = require('express'),
driver = require('couchbase'),
routes = require('./routes');
dbConfiguration = {
"hosts": ["my-couchbase-server:8091"],
"bucket": "bucket"
}
driver.connect(dbConfiguration, function(err, cb) {
if (err) {
throw (err)
}
// put your application logic here
});
If you want to use a global variable you need to wait for the callback and be sure that the connection is established before using it.
I found the following code is working for me.
Please anyone has better solution please post it, I always welcome that.
GLOBAL.CouchBaseDBConnection = undefined;
function OpenCouchBase(callback) {
if (CouchBaseDBConnection == undefined) {
couchbase.connect(config.CouchBaseConnector, function (err, couchbaseOpenCon) {
if (err)
return console.log("Failed to connect to the CouchBase");
else {
CouchBaseDBConnection = couchbaseOpenCon
callback(null, couchbaseOpenCon);
}
});
}
else { callback(null, CouchBaseDBConnection); }
};
module.exports.OpenPoolCouchBase = OpenCouchBase;
You can use generic resource pool module for Node: node-pool
It is generic pool, so you can adapt it for your needs.
EDIT:
Here is example code:
var poolModule = require('generic-pool');
var pool = poolModule.Pool({
name : 'couch',
create : function(callback) {
couchbase.connect(config.CouchBaseConnector, function (err, couchbaseOpenCon) {
if (err)
return console.log("Failed to connect to the CouchBase");
else {
CouchBaseDBConnection = couchbaseOpenCon
callback(null, couchbaseOpenCon);
}
});
},
destroy : function(client) { client.end(); },
max : 10,
// specifies how long a resource can stay idle in pool before being removed
idleTimeoutMillis : 30000,
// if true, logs via console.log - can also be a function
log : true
});
// acquire connection - callback function is called once a resource becomes available
pool.acquire(function(err, client) {
if (err) {
// handle error - this is generally the err from your
// factory.create function
}
else {
console.log("do whatever you want with the client ...");
pool.release(client);
});
}
});

MongoDB (Node.js Native driver) large gridfs upload fails

Mongodb seems to be arbitrarily returning early when I try to upload a large file (1.8GB) to GridFS. Smaller files work just fine.
I'm using the node.js native driver. The code (with a few things omitted for brevity) is as follows:
var objectId = new ObjectID(),
gridStore = new GridStore(db, objectId, filename /*declared elsewhere*/, "w", { "content_type": contentType /* declared elsewhere */ }),
obj = {};
gridStore.open(function (err, gs) {
console.log("gridStore open");
gs.writeFile(tempFile, function (err, doc) {
if (err) {
throw err;
}
console.log("file written");
obj.fileId = doc._id;
// double check the md5 of the uploaded file against what was uploaded
// (md5 variable declared elsewhere)
if (doc.md5 !== md5) {
console.log(doc);
console.log(doc.md5);
console.log(md5);
//delete bad file
GridStore.unlink(db, doc.filename, function (err, gridStore) {
if (err) {
throw err;
}
});
} else {
// do the desired stuff
}
});
});
The "doc" object always seems to return with a different length (and obviously a different md5).
The issue was apparently with earlier versions of Node's stream implementation. 10Gen's Node driver team wrote the new versions of the driver to use the newer Node stream implementation. So, an upgrade of Node and the native client driver fixed this issue.

Resources