I am using node.js v6 LTS and the npm jdbc package to connect to Apache phoenix on hortonworks.
My code is as below:-
var JDBC = require('jdbc');
var jinst = require('jdbc/lib/jinst');
var jar1='/home/ashish/seed-project-phoenix/public/drivers/phoenix-core-4.7.0.2.5.6.0-40.jar';
var jar2='/home/ashish/seed-project-phoenix/public/drivers/phoenix-queryserver-client-4.7.0.2.5.6.0-40.jar';
var jar3='/home/ashish/seed-project-phoenix/public/drivers/hbase-client-1.1.2.2.5.6.0-40.jar';
if (!jinst.isJvmCreated()) {
// Add all java options required by your project here. You get one
//chance to setup the options before the first java call.
jinst.addOption("-Xrs");
// Add all jar files required by your project here. You get one chance to
// setup the classpath before the first java call.
jinst.setupClasspath([jar1,jar2,jar3]); // adding jars
}
var config = {
url:'jdbc:phoenix:ZK1,ZK2,ZK3:2181:/hbase-unsecure',
drivername: 'org.apache.phoenix.jdbc.PhoenixDriver'
};
var phoenixHbase = new JDBC(config);
phoenixHbase.initialize(function(err) {
if (err) {
console.log(err);
}
else{
console.log("connected to Apache phoenix ...");
}
});
But I am getting following errors:-
{ Error: Could not find class org.apache.phoenix.jdbc.PhoenixDriver
java.lang.NoClassDefFoundError: org/apache/commons/logging/LogFactory
at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.<clinit>(PhoenixEmbeddedDriver.java:74)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.logging.LogFactory
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more
at Error (native)
at JDBC.Pool.initialize (/home/ashish/seed-project-phoenix/node_modules/jdbc/lib/pool.js:135:10)
at /home/ashish/seed-project-phoenix/routes/app.js:71:18
at Layer.handle [as handle_request] (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/layer.js:95:5)
at next (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/route.js:131:13)
at Route.dispatch (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/route.js:112:3)
at Layer.handle [as handle_request] (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/layer.js:95:5)
at /home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:277:22
at Function.process_params (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:330:12)
at next (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:271:10) cause: nodeJava_java_lang_NoClassDefFoundError {} }
{ Error: Error running static method
java.sql.SQLException: No suitable driver found for jdbc:phoenix:ZK1,ZK2,ZK3:2181:/hbase-unsecure
at java.sql.DriverManager.getConnection(DriverManager.java:689)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at Error (native) cause: nodeJava_java_sql_SQLException {} }
There should be no problem with the connection string according to me and all the jar files have been added in accordance with the version.
Phoenix version:- 4.7.0.2.5.6.0-40
Hbase version :- 1.1.2.2.5.6.0-40
If anybody has done anything similar, please give some direction.
Related
I am trying to connect to a hive instance from a nodeJS application. I have found an example using jdbc and I am trying to get this configured.
// Node index.js
var JDBC = require('jdbc');
var jinst = require('jdbc/lib/jinst');
var asyncjs = require('async');
var util = require('util');
//create a jvm and specify the jars required in the classpath and other jvm parameters
if (!jinst.isJvmCreated()) {
jinst.addOption("-Xrs");
jinst.setupClasspath(['./hive-jdbc-1.2.2-standalone.jar',
'./hadoop-common-2.7.4.jar']);
}
//read the input arguments
var server = 'lvshdc2en00.myserver.com';
var port = 10025;
var schema = 'default';
var principal = 'hive/_HOST#HDP.LOCAL';
//specify the hive connection parameters
var conf = {
url: 'jdbc:hive2://' + server + ':' + port + '/' + schema + ';principal='+principal,
drivername: 'org.apache.hive.jdbc.HiveDriver',
properties: {
}
};
var hive = new JDBC(conf);
//initialize the connection
hive.initialize(function (err) {
if (err) {
console.log(err);
}
});
When I run this test app, I get the following error:
node index5.js
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Supplied authorities: lvshdc2en00.myserver.com:10025
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Resolved authority: lvshdc2en00.myserver.com:10025
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
{ [Error: Error running static method
java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<init>(DefaultMetricsSystem.java:38)
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<clinit>(DefaultMetricsSystem.java:36)
at org.apache.hadoop.security.UserGroupInformation$UgiMetrics.create(UserGroupInformation.java:122)
at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:238)
at org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.createClientWithConf(HadoopThriftAuthBridge.java:85)
at org.apache.hive.service.auth.KerberosSaslHelper.getKerberosTransport(KerberosSaslHelper.java:55)
at org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:436)
at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:203)
at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:178)
at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:105)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.configuration.Configuration
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 16 more
] cause: nodeJava_java_lang_NoClassDefFoundError {} }
I am unclear about what this issue is referring to.
Do I actually need hadoop/hive installed on this client machine that's making the connection or is that what the standalone jars are supposed to be doing?
I generated a new application with JHipster with gradle and mongoDB choice.
Gradle compiles well :
c:\webs\workspace-jhipster\jpoc>gradle clean compileJava compileTestJava
:clean
:cleanResources UP-TO-DATE
:bootBuildInfo
:nodeSetup SKIPPED
:npmSetup SKIPPED
:webpackBuildDev SKIPPED
:processResources
:compileJava
:classes
:compileTestJava
BUILD SUCCESSFUL
Total time: 6.704 secs
The problem arrives when I wish to run a single test :
gradle test --tests com.jpoc.service.UserServiceIntTest
which outputs :
com.jpoc.service.UserServiceIntTest > assertThatUserMustExistToResetPassword FAILED
java.lang.IllegalStateException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.BeanCreationException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.BeanCreationException
Caused by: org.springframework.beans.BeanInstantiationException
Caused by: de.flapdoodle.embed.process.exceptions.DistributionException
Caused by: java.io.IOException
Caused by: java.net.SocketTimeoutException
I pretty sure this is a misconfiguration problem, but I don't see which one.
I use lastest jhipster 4.2.0
Thank you.
To setup mongoDB proxy in test suite with spring boot, I put for instance:
#BeforeClass
public static void setup_mongo() throws UnknownHostException, IOException{
String proxyHost = "proxy.priv.atos.fr";
String proxyPort = "3128";
String proxy = System.getenv("http_proxy");
System.out.println("Proxy URL : " + proxy);
if(proxy != null){
if(proxyHost == null && proxyPort == null){
URL proxyurl = new URL(proxy);
proxyHost = proxyurl.getHost();
proxyPort = String.valueOf(proxyurl.getPort());
}
}
MongodStarter starter ;
System.out.println("Proxy Host : " + proxyHost);
System.out.println("Proxy Port : " + proxyPort);
if (proxyHost != null && proxyPort != null) {
IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder().defaults(Command.MongoD)
.artifactStore(
new ArtifactStoreBuilder().defaults(Command.MongoD)
.download(
new DownloadConfigBuilder()
.defaultsForCommand(Command.MongoD)
.proxyFactory(
new HttpProxyFactory(
proxyHost,
Integer.parseInt(proxyPort)))
.build()).build()).build();
starter = MongodStarter.getInstance(runtimeConfig);
} else {
starter = MongodStarter.getDefaultInstance();
}
IMongodConfig mongodConfig = new MongodConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(0, Network.localhostIsIPv6())).build();
MongodExecutable mongodExecutable = null;
mongodExecutable = starter.prepare(mongodConfig);
mongodExecutable.start();
}
Like this, it downloads the mongoDB server and try to run it. The next problem is that I don't have permissions to run this executable inside JVM.
I am trying to run the following GQL query:
function gqlExample(callback) {
datastore.runQuery({
gqlQuery: {
queryString: 'SELECT * FROM Person',
}
}).execute(function(err, result) {
if (!err) {
// Iterate over the results and return the entities.
result = (result.batch.entityResults || []).map(
function(entityResult) {
return entityResult.entity;
});
}
callback(err, result);
});
}
I found the above example on the following page:
http://ec2-54-66-129-240.ap-southeast-2.compute.amazonaws.com/httrack/docs/cloud.google.com/datastore/docs/concepts/gql.html
It states that it is a valid node.js example but on running the query I get the following error:
/Users/xxxx/relay-fullstack/server/data/campaign-datastore.js:245
}).execute(function (err, result) {
^
TypeError: ds.runQuery(...).execute is not a function
at Object.campaignSearchGql (campaign-datastore.js:265:6)
at Test.<anonymous> (datastore-test.js:29:16)
at Test.bound [as _cb] (/Users/xxxx/relay-fullstack/node_modules/tape/lib/test.js:63:32)
at Test.run (/Users/xxxx/relay-fullstack/node_modules/tape/lib/test.js:82:10)
at Test.bound [as run] (/Users/xxxx/relay-fullstack/node_modules/tape/lib/test.js:63:32)
at Immediate.next [as _onImmediate] (/Users/xxxx/relay-fullstack/node_modules/tape/lib/results.js:70:15)
at tryOnImmediate (timers.js:534:15)
at processImmediate [as _immediateCallback] (timers.js:514:5)
I am using the latest version of google cloud for node:
"google-cloud": "^0.38.3",
Has anyone got GQL queries executing correctly with gcloud and node? Any help will be greatly appreciated.
That is apparently a cached version of the Datastore docs. Those Node.js snippets are using a different library called googleapis: https://github.com/google/google-api-nodejs-client
The other library, gcloud-node, doesn't support GQL at this time.
I'm trying to build a catch-all route for a subset of my site that contains multiple directories/pages that need to be processed by ejs, but don't need specific routes.
[N.B., I included the sync version in the example below to remove any question of timing... the analogous implementation of fs.stat (and of fs.access) results in the same behavior]
router.get('/foobar/:page(*)', function(req, res, next) {
var fs = require('fs');
var path = require('path');
var viewsPath = req.app.get('views');
try
{
// see if there's an ejs file corresponding to request
var stats = fs.statSync(path.join(viewsPath, 'foobar', req.params.page + '.ejs'));
console.log(stats); // stats object seems accurate
// THROWS WHEN CALLING openSync
res.render('foobar/' + req.params.page, {title: 'foo'});
}
catch (e)
{
var err = new Error();
err.status = 404;
next(err);
}
}
The 404 path works just fine, but when I request a page that actually exists, the render call throws:
Unexpected error code undefined has occurred. Please retry your request
at Error (native)
at Object.fs.openSync (fs.js:500:18)
at Object.fs.readFileSync (fs.js:352:15)
at includeSource (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:194:17)
at C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:528:26
at Array.forEach (native)
at Object.Template.generateSource (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:505:15)
at Object.Template.compile (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:427:12)
at Object.compile (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:288:16)
at handleCache (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:147:16)
at View.exports.renderFile [as engine] (C:\Users\Jim\Documents\myProject\node_modules\ejs\lib\ejs.js:350:14)
at View.render (C:\Users\Jim\Documents\myProject\node_modules\express\lib\view.js:126:8)
at tryRender (C:\Users\Jim\Documents\myProject\node_modules\express\lib\application.js:639:10)
at EventEmitter.render (C:\Users\Jim\Documents\myProject\node_modules\express\lib\application.js:591:3)
at ServerResponse.render (C:\Users\Jim\Documents\myProject\node_modules\express\lib\response.js:961:7)
at C:\Users\Jim\Documents\myProject\routes\power-essentials.js:32:7
at Layer.handle [as handle_request] (C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\layer.js:95:5)
at next (C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\route.js:131:13)
at Route.dispatch (C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\route.js:112:3)
at Layer.handle [as handle_request] (C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\layer.js:95:5)
at C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\index.js:277:22
at param (C:\Users\Jim\Documents\myProject\node_modules\express\lib\router\index.js:349:14)
It's behaving almost like the fs.statSync call has placed a lock on the file that render chokes on when trying to open the view file.
occams razor: the test page I was working with had an invalid include path reference within it! It wasn't complaining about the template I was trying to load, but rather one referenced within it.
I am not able to publish message using Spring Kafka Integration, though my Kafka Java Client is working fine.
The Java code is running on Windows and Kafka is running on Linux box.
KafkaProducerContext<String, String> kafkaProducerContext = new KafkaProducerContext<String, String>();
ProducerMetadata<String, String> producerMetadata = new ProducerMetadata<String, String>("test-cass");
producerMetadata.setValueClassType(String.class);
producerMetadata.setKeyClassType(String.class);
Encoder<String> encoder = new StringEncoder<String>();
producerMetadata.setValueEncoder(encoder);
producerMetadata.setKeyEncoder(encoder);
ProducerFactoryBean<String, String> producer = new ProducerFactoryBean<String, String>(producerMetadata, "172.16.1.42:9092");
ProducerConfiguration<String, String> config = new ProducerConfiguration<String, String>(producerMetadata, producer.getObject());
kafkaProducerContext.setProducerConfigurations(Collections.singletonMap("test-cass", config));
KafkaProducerMessageHandler<String, String> handler = new KafkaProducerMessageHandler<String, String>(kafkaProducerContext);
handler.handleMessage(MessageBuilder.withPayload("foo")
.setHeader("messagekey", "3")
.setHeader("topic", "test-cass")
.build());
I am getting following error
"C:\Program Files\Java\jdk1.7.0_71\bin\java" -Didea.launcher.port=7542 "-Didea.launcher.bin.path=C:\Program Files (x86)\JetBrains\IntelliJ IDEA 13.1.6\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.7.0_71\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jce.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jfxrt.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\resources.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\rt.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\zipfs.jar;C:\projects\SpringCassandraInt\target\classes;C:\Users\hs\.m2\repository\org\springframework\data\spring-data-cassandra\1.1.2.RELEASE\spring-data-cassandra-1.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\data\spring-cql\1.1.2.RELEASE\spring-cql-1.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-context\4.1.4.RELEASE\spring-context-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-aop\4.1.4.RELEASE\spring-aop-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;C:\Users\hs\.m2\repository\org\springframework\spring-beans\4.0.9.RELEASE\spring-beans-4.0.9.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-core\4.1.2.RELEASE\spring-core-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\hs\.m2\repository\org\springframework\spring-expression\4.1.2.RELEASE\spring-expression-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-tx\4.1.4.RELEASE\spring-tx-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\data\spring-data-commons\1.9.2.RELEASE\spring-data-commons-1.9.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\slf4j\slf4j-api\1.7.10\slf4j-api-1.7.10.jar;C:\Users\hs\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.10\jcl-over-slf4j-1.7.10.jar;C:\Users\hs\.m2\repository\com\datastax\cassandra\cassandra-driver-dse\2.0.4\cassandra-driver-dse-2.0.4.jar;C:\Users\hs\.m2\repository\com\datastax\cassandra\cassandra-driver-core\2.0.4\cassandra-driver-core-2.0.4.jar;C:\Users\hs\.m2\repository\io\netty\netty\3.9.0.Final\netty-3.9.0.Final.jar;C:\Users\hs\.m2\repository\com\codahale\metrics\metrics-core\3.0.2\metrics-core-3.0.2.jar;C:\Users\hs\.m2\repository\com\google\guava\guava\15.0\guava-15.0.jar;C:\Users\hs\.m2\repository\org\liquibase\liquibase-core\3.1.1\liquibase-core-3.1.1.jar;C:\Users\hs\.m2\repository\org\yaml\snakeyaml\1.13\snakeyaml-1.13.jar;C:\Users\hs\.m2\repository\ch\qos\logback\logback-classic\1.1.2\logback-classic-1.1.2.jar;C:\Users\hs\.m2\repository\ch\qos\logback\logback-core\1.1.2\logback-core-1.1.2.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-core\4.1.2.RELEASE\spring-integration-core-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\projectreactor\reactor-core\1.1.4.RELEASE\reactor-core-1.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\com\goldmansachs\gs-collections\5.0.0\gs-collections-5.0.0.jar;C:\Users\hs\.m2\repository\com\goldmansachs\gs-collections-api\5.0.0\gs-collections-api-5.0.0.jar;C:\Users\hs\.m2\repository\com\lmax\disruptor\3.2.1\disruptor-3.2.1.jar;C:\Users\hs\.m2\repository\io\gatling\jsr166e\1.0\jsr166e-1.0.jar;C:\Users\hs\.m2\repository\org\springframework\retry\spring-retry\1.1.1.RELEASE\spring-retry-1.1.1.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-messaging\4.1.4.RELEASE\spring-messaging-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-stream\4.1.2.RELEASE\spring-integration-stream-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-xml\4.1.2.RELEASE\spring-integration-xml-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-oxm\4.1.4.RELEASE\spring-oxm-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\ws\spring-xml\2.2.0.RELEASE\spring-xml-2.2.0.RELEASE.jar;C:\Users\hs\.m2\repository\com\jayway\jsonpath\json-path\1.2.0\json-path-1.2.0.jar;C:\Users\hs\.m2\repository\net\minidev\json-smart\2.1.0\json-smart-2.1.0.jar;C:\Users\hs\.m2\repository\net\minidev\asm\1.0.2\asm-1.0.2.jar;C:\Users\hs\.m2\repository\asm\asm\3.3.1\asm-3.3.1.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-kafka\1.0.0.RELEASE\spring-integration-kafka-1.0.0.RELEASE.jar;C:\Users\hs\.m2\repository\org\apache\avro\avro-compiler\1.7.6\avro-compiler-1.7.6.jar;C:\Users\hs\.m2\repository\org\apache\avro\avro\1.7.6\avro-1.7.6.jar;C:\Users\hs\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\hs\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\hs\.m2\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;C:\Users\hs\.m2\repository\org\xerial\snappy\snappy-java\1.0.5\snappy-java-1.0.5.jar;C:\Users\hs\.m2\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;C:\Users\hs\.m2\repository\org\tukaani\xz\1.0\xz-1.0.jar;C:\Users\hs\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\hs\.m2\repository\org\apache\velocity\velocity\1.7\velocity-1.7.jar;C:\Users\hs\.m2\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;C:\Users\hs\.m2\repository\com\yammer\metrics\metrics-annotation\2.2.0\metrics-annotation-2.2.0.jar;C:\Users\hs\.m2\repository\com\yammer\metrics\metrics-core\2.2.0\metrics-core-2.2.0.jar;C:\Users\hs\.m2\repository\org\apache\kafka\kafka_2.10\0.8.1.1\kafka_2.10-0.8.1.1.jar;C:\Users\hs\.m2\repository\org\apache\zookeeper\zookeeper\3.3.4\zookeeper-3.3.4.jar;C:\Users\hs\.m2\repository\log4j\log4j\1.2.15\log4j-1.2.15.jar;C:\Users\hs\.m2\repository\javax\mail\mail\1.4\mail-1.4.jar;C:\Users\hs\.m2\repository\javax\activation\activation\1.1\activation-1.1.jar;C:\Users\hs\.m2\repository\javax\jms\jms\1.1\jms-1.1.jar;C:\Users\hs\.m2\repository\com\sun\jdmk\jmxtools\1.2.1\jmxtools-1.2.1.jar;C:\Users\hs\.m2\repository\com\sun\jmx\jmxri\1.2.1\jmxri-1.2.1.jar;C:\Users\hs\.m2\repository\jline\jline\0.9.94\jline-0.9.94.jar;C:\Users\hs\.m2\repository\net\sf\jopt-simple\jopt-simple\3.2\jopt-simple-3.2.jar;C:\Users\hs\.m2\repository\org\scala-lang\scala-library\2.10.1\scala-library-2.10.1.jar;C:\Users\hs\.m2\repository\com\101tec\zkclient\0.3\zkclient-0.3.jar;C:\Program Files (x86)\JetBrains\IntelliJ IDEA 13.1.6\lib\idea_rt.jar" com.intellij.rt.execution.application.AppMain com.agillic.dialogue.kafka.outbound.SpringKafkaTest
15:39:11.736 [main] INFO o.s.i.k.support.ProducerFactoryBean - Using producer properties => {metadata.broker.list=172.16.1.42:9092, compression.codec=0}
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Verifying properties
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Property compression.codec is overridden to 0
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Property metadata.broker.list is overridden to 172.16.1.42:9092
15:39:12.164 [main] INFO o.s.b.f.config.PropertiesFactoryBean - Loading properties file from URL [jar:file:/C:/Users/hs/.m2/repository/org/springframework/integration/spring-integration-core/4.1.2.RELEASE/spring-integration-core-4.1.2.RELEASE.jar!/META-INF/spring.integration.default.properties]
15:39:12.208 [main] DEBUG o.s.i.k.o.KafkaProducerMessageHandler - org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler#5204db6b received message: GenericMessage [payload=foo, headers={timestamp=1424356752208, id=00c483d9-ecf8-2937-4a2c-985bd3afcae4, topic=test-cass, messagekey=3}]
Exception in thread "main" org.springframework.messaging.MessageHandlingException: error occurred in message handler [org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler#5204db6b]; nested exception is java.lang.NullPointerException
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:84)
at com.agillic.dialogue.kafka.outbound.SpringKafkaTest.main(SpringKafkaTest.java:40)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)
Caused by: java.lang.NullPointerException
at org.springframework.integration.kafka.support.KafkaProducerContext.getTopicConfiguration(KafkaProducerContext.java:58)
at org.springframework.integration.kafka.support.KafkaProducerContext.send(KafkaProducerContext.java:190)
at org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler.handleMessageInternal(KafkaProducerMessageHandler.java:81)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:78)
... 6 more
Process finished with exit code 1
Actually when we introduced KafkaHeaders we did appropriate documentation changes: https://github.com/spring-projects/spring-integration-kafka/blob/master/README.md. See Important note:
Since the last Milestone, we have introduced the KafkaHeaders interface with constants. The messageKey and topic default headers now require a kafka_ prefix. When migrating from an earlier version, you need to specify message-key-expression="headers.messageKey" and topic-expression="headers.topic" on the , or simply change the headers upstream to the new headers from KafkaHeaders using a or MessageBuilder. Or, of course, configure them on the adapter if you are using constant values.
UPDATE
Regarding NullPointerException: it's really an issue. Feel free to raise a JIRA ticket and we'll take care of that. We are even welcome for the contribution!