I am trying to connect to a hive instance from a nodeJS application. I have found an example using jdbc and I am trying to get this configured.
// Node index.js
var JDBC = require('jdbc');
var jinst = require('jdbc/lib/jinst');
var asyncjs = require('async');
var util = require('util');
//create a jvm and specify the jars required in the classpath and other jvm parameters
if (!jinst.isJvmCreated()) {
jinst.addOption("-Xrs");
jinst.setupClasspath(['./hive-jdbc-1.2.2-standalone.jar',
'./hadoop-common-2.7.4.jar']);
}
//read the input arguments
var server = 'lvshdc2en00.myserver.com';
var port = 10025;
var schema = 'default';
var principal = 'hive/_HOST#HDP.LOCAL';
//specify the hive connection parameters
var conf = {
url: 'jdbc:hive2://' + server + ':' + port + '/' + schema + ';principal='+principal,
drivername: 'org.apache.hive.jdbc.HiveDriver',
properties: {
}
};
var hive = new JDBC(conf);
//initialize the connection
hive.initialize(function (err) {
if (err) {
console.log(err);
}
});
When I run this test app, I get the following error:
node index5.js
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Supplied authorities: lvshdc2en00.myserver.com:10025
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Resolved authority: lvshdc2en00.myserver.com:10025
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
{ [Error: Error running static method
java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<init>(DefaultMetricsSystem.java:38)
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<clinit>(DefaultMetricsSystem.java:36)
at org.apache.hadoop.security.UserGroupInformation$UgiMetrics.create(UserGroupInformation.java:122)
at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:238)
at org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.createClientWithConf(HadoopThriftAuthBridge.java:85)
at org.apache.hive.service.auth.KerberosSaslHelper.getKerberosTransport(KerberosSaslHelper.java:55)
at org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:436)
at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:203)
at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:178)
at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:105)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.configuration.Configuration
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 16 more
] cause: nodeJava_java_lang_NoClassDefFoundError {} }
I am unclear about what this issue is referring to.
Do I actually need hadoop/hive installed on this client machine that's making the connection or is that what the standalone jars are supposed to be doing?
Related
I am trying to use the HDFS kafka connector to send protobuf messages from kafka to the HDFS. My connector config looks like the following
{
"name": "hdfs3-connector-test",
"config": {
"connector.class": "io.confluent.connect.hdfs3.Hdfs3SinkConnector",
"tasks.max": "1",
"topics": "test-topic",
"hdfs.url": "hdfs://10.8.0.1:9000",
"flush.size": "3",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "io.confluent.connect.protobuf.ProtobufConverter",
"value.converter.schema.registry.url":"http://10.8.0.1:8081",
"confluent.topic.bootstrap.servers": "10.8.0.1:9092",
"confluent.topic.replication.factor": "1"
}
}
In order to test this, I am trying to send protobuf serialized messages in a small node application. Here are my files:
// data.proto
syntax = "proto3";
package awesomepackage;
message SearchRequest {
string query = 1;
int32 page = 2;
}
and my node app
const { Kafka } = require('kafkajs')
const protobuf = require('protobufjs')
const kafka = new Kafka({
clientId: 'my-app',
brokers: ['10.8.0.1:9092']
})
const producer = kafka.producer()
const run = async () => {
await producer.connect()
protobuf.load('data.proto', async (err, root) => {
console.log("TESTING")
console.log(err)
let SearchRequest = root.lookupType('awesomepackage.SearchRequest')
let payload = {query: "test", page: 2}
var errMsg = SearchRequest.verify(payload);
console.log(errMsg)
let msg = SearchRequest.create(payload)
var buffer = SearchRequest.encode(msg).finish();
console.log(buffer)
await producer.send({
topic: 'test-topic',
messages: [
{key: 'key1', value: buffer}
]
})
})
}
run()
However, when I run this I get the following errors:
Failed to deserialize data for topic test-topic to Protobuf
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing Protobuf message for id -1
Caused by: org.apache.kafka.common.errors.SerializationException: Unknown magic byte!
How do I fix this? My guess is that my protobuf schema is not registered in the kafka schema registry, but I am unsure. If this is the case, is there a way to send the schema to the registry from node?
io.confluent.connect.protobuf.ProtobufConverter requires the Schema Registry, not plain serialized Protobuf. In other words, you're missing the Schema Registry part (or a manual byte creation of a "wrapped" Proto message) in the Node code
Refer Wire Format - Confluent
If you would like to not use the Schema Registry, you can use the BlueApron Protobuf Converter, but seems like you are using one, so best to go with the Confluent converter
I have a Java-spark code that reads certain properties files. These properties are being passed with spark-submit like:
spark-submit
--master yarn \
--deploy-mode cluster \
--files /home/aiman/SalesforceConn.properties,/home/aiman/columnMapping.prop,/home/aiman/sourceTableColumns.prop \
--class com.sfdc.SaleforceReader \
--verbose \
--jars /home/ebdpbuss/aiman/Salesforce/ojdbc-7.jar /home/aiman/spark-salesforce-0.0.1-SNAPSHOT-jar-with-dependencies.jar SalesforceConn.properties columnMapping.prop sourceTableColumns.prop
The code that I have written is:
SparkSession spark = SparkSession.builder().master("yarn").config("spark.submit.deployMode","cluster").getOrCreate();
JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
Configuration config = jsc.hadoopConfiguration();
FileSystem fs = FileSystem.get(config);
//args[] is the file names that is passed as arguments.
String connDetailsFile = args[0];
String mapFile = args[1];
String sourceColumnsFile = args[2];
String connFile = SparkFiles.get(connDetailsFile);
String mappingFile = SparkFiles.get(mapFile);
String srcColsFile = SparkFiles.get(sourceColumnsFile);
Properties prop = loadProperties(fs,connFile);
Properties mappings = loadProperties(fs,mappingFile);
Properties srcColProp = loadProperties(fs,srcColsFile);
The loadProperties() method I used above:
private static Properties loadProperties(FileSystem fs, String path)
{
Properties prop = new Properties();
FSDataInputStream is = null;
try{
is = fs.open(new Path(path));
prop.load(is);
} catch(Exception e){
e.printStackTrace();
System.exit(1);
}
return prop;
}
And its giving me exception:
Exception in thread "main" org.apache.spark.SparkException: Application application_1550650871366_125913 finished with failed status
at org.apache.spark.deploy.yarn.Client.run(Client.scala:1187)
at org.apache.spark.deploy.yarn.Client$.main(Client.scala:1233)
at org.apache.spark.deploy.yarn.Client.main(Client.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:782)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/03/01 14:34:00 INFO ShutdownHookManager: Shutdown hook called
When you pass the path to files using --files they are stored in the local directory (temporary) for each executor. So if the file names do not change then you can just use them as follows instead of using the full path provided in arguments.
String connDetailsFile = "SalesforceConn.properties";
String mapFile = "columnMapping.prop";
String sourceColumnsFile = "sourceTableColumns.prop";
If the file names do change each time then you have to strip off the path to the file and just use the file name. This is because spark doesn't recognize that as a path but considers the whole string to be a file name.
For example, /home/aiman/SalesforceConn.properties will be considered as a file name and spark will give you an exception saying it can't find a file with name /home/aiman/SalesforceConn.properties
So your code should be something like this.
String connDetailsFile = args[0].split("/").last
String mapFile = args[1].split("/").last
String sourceColumnsFile = args[2].split("/").last
I am using node.js v6 LTS and the npm jdbc package to connect to Apache phoenix on hortonworks.
My code is as below:-
var JDBC = require('jdbc');
var jinst = require('jdbc/lib/jinst');
var jar1='/home/ashish/seed-project-phoenix/public/drivers/phoenix-core-4.7.0.2.5.6.0-40.jar';
var jar2='/home/ashish/seed-project-phoenix/public/drivers/phoenix-queryserver-client-4.7.0.2.5.6.0-40.jar';
var jar3='/home/ashish/seed-project-phoenix/public/drivers/hbase-client-1.1.2.2.5.6.0-40.jar';
if (!jinst.isJvmCreated()) {
// Add all java options required by your project here. You get one
//chance to setup the options before the first java call.
jinst.addOption("-Xrs");
// Add all jar files required by your project here. You get one chance to
// setup the classpath before the first java call.
jinst.setupClasspath([jar1,jar2,jar3]); // adding jars
}
var config = {
url:'jdbc:phoenix:ZK1,ZK2,ZK3:2181:/hbase-unsecure',
drivername: 'org.apache.phoenix.jdbc.PhoenixDriver'
};
var phoenixHbase = new JDBC(config);
phoenixHbase.initialize(function(err) {
if (err) {
console.log(err);
}
else{
console.log("connected to Apache phoenix ...");
}
});
But I am getting following errors:-
{ Error: Could not find class org.apache.phoenix.jdbc.PhoenixDriver
java.lang.NoClassDefFoundError: org/apache/commons/logging/LogFactory
at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.<clinit>(PhoenixEmbeddedDriver.java:74)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.logging.LogFactory
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more
at Error (native)
at JDBC.Pool.initialize (/home/ashish/seed-project-phoenix/node_modules/jdbc/lib/pool.js:135:10)
at /home/ashish/seed-project-phoenix/routes/app.js:71:18
at Layer.handle [as handle_request] (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/layer.js:95:5)
at next (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/route.js:131:13)
at Route.dispatch (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/route.js:112:3)
at Layer.handle [as handle_request] (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/layer.js:95:5)
at /home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:277:22
at Function.process_params (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:330:12)
at next (/home/ashish/seed-project-phoenix/node_modules/express/lib/router/index.js:271:10) cause: nodeJava_java_lang_NoClassDefFoundError {} }
{ Error: Error running static method
java.sql.SQLException: No suitable driver found for jdbc:phoenix:ZK1,ZK2,ZK3:2181:/hbase-unsecure
at java.sql.DriverManager.getConnection(DriverManager.java:689)
at java.sql.DriverManager.getConnection(DriverManager.java:208)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at Error (native) cause: nodeJava_java_sql_SQLException {} }
There should be no problem with the connection string according to me and all the jar files have been added in accordance with the version.
Phoenix version:- 4.7.0.2.5.6.0-40
Hbase version :- 1.1.2.2.5.6.0-40
If anybody has done anything similar, please give some direction.
I generated a new application with JHipster with gradle and mongoDB choice.
Gradle compiles well :
c:\webs\workspace-jhipster\jpoc>gradle clean compileJava compileTestJava
:clean
:cleanResources UP-TO-DATE
:bootBuildInfo
:nodeSetup SKIPPED
:npmSetup SKIPPED
:webpackBuildDev SKIPPED
:processResources
:compileJava
:classes
:compileTestJava
BUILD SUCCESSFUL
Total time: 6.704 secs
The problem arrives when I wish to run a single test :
gradle test --tests com.jpoc.service.UserServiceIntTest
which outputs :
com.jpoc.service.UserServiceIntTest > assertThatUserMustExistToResetPassword FAILED
java.lang.IllegalStateException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.BeanCreationException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException
Caused by: org.springframework.beans.factory.BeanCreationException
Caused by: org.springframework.beans.BeanInstantiationException
Caused by: de.flapdoodle.embed.process.exceptions.DistributionException
Caused by: java.io.IOException
Caused by: java.net.SocketTimeoutException
I pretty sure this is a misconfiguration problem, but I don't see which one.
I use lastest jhipster 4.2.0
Thank you.
To setup mongoDB proxy in test suite with spring boot, I put for instance:
#BeforeClass
public static void setup_mongo() throws UnknownHostException, IOException{
String proxyHost = "proxy.priv.atos.fr";
String proxyPort = "3128";
String proxy = System.getenv("http_proxy");
System.out.println("Proxy URL : " + proxy);
if(proxy != null){
if(proxyHost == null && proxyPort == null){
URL proxyurl = new URL(proxy);
proxyHost = proxyurl.getHost();
proxyPort = String.valueOf(proxyurl.getPort());
}
}
MongodStarter starter ;
System.out.println("Proxy Host : " + proxyHost);
System.out.println("Proxy Port : " + proxyPort);
if (proxyHost != null && proxyPort != null) {
IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder().defaults(Command.MongoD)
.artifactStore(
new ArtifactStoreBuilder().defaults(Command.MongoD)
.download(
new DownloadConfigBuilder()
.defaultsForCommand(Command.MongoD)
.proxyFactory(
new HttpProxyFactory(
proxyHost,
Integer.parseInt(proxyPort)))
.build()).build()).build();
starter = MongodStarter.getInstance(runtimeConfig);
} else {
starter = MongodStarter.getDefaultInstance();
}
IMongodConfig mongodConfig = new MongodConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(0, Network.localhostIsIPv6())).build();
MongodExecutable mongodExecutable = null;
mongodExecutable = starter.prepare(mongodConfig);
mongodExecutable.start();
}
Like this, it downloads the mongoDB server and try to run it. The next problem is that I don't have permissions to run this executable inside JVM.
I am not able to publish message using Spring Kafka Integration, though my Kafka Java Client is working fine.
The Java code is running on Windows and Kafka is running on Linux box.
KafkaProducerContext<String, String> kafkaProducerContext = new KafkaProducerContext<String, String>();
ProducerMetadata<String, String> producerMetadata = new ProducerMetadata<String, String>("test-cass");
producerMetadata.setValueClassType(String.class);
producerMetadata.setKeyClassType(String.class);
Encoder<String> encoder = new StringEncoder<String>();
producerMetadata.setValueEncoder(encoder);
producerMetadata.setKeyEncoder(encoder);
ProducerFactoryBean<String, String> producer = new ProducerFactoryBean<String, String>(producerMetadata, "172.16.1.42:9092");
ProducerConfiguration<String, String> config = new ProducerConfiguration<String, String>(producerMetadata, producer.getObject());
kafkaProducerContext.setProducerConfigurations(Collections.singletonMap("test-cass", config));
KafkaProducerMessageHandler<String, String> handler = new KafkaProducerMessageHandler<String, String>(kafkaProducerContext);
handler.handleMessage(MessageBuilder.withPayload("foo")
.setHeader("messagekey", "3")
.setHeader("topic", "test-cass")
.build());
I am getting following error
"C:\Program Files\Java\jdk1.7.0_71\bin\java" -Didea.launcher.port=7542 "-Didea.launcher.bin.path=C:\Program Files (x86)\JetBrains\IntelliJ IDEA 13.1.6\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.7.0_71\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jce.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jfxrt.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\resources.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\rt.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.7.0_71\jre\lib\ext\zipfs.jar;C:\projects\SpringCassandraInt\target\classes;C:\Users\hs\.m2\repository\org\springframework\data\spring-data-cassandra\1.1.2.RELEASE\spring-data-cassandra-1.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\data\spring-cql\1.1.2.RELEASE\spring-cql-1.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-context\4.1.4.RELEASE\spring-context-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-aop\4.1.4.RELEASE\spring-aop-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;C:\Users\hs\.m2\repository\org\springframework\spring-beans\4.0.9.RELEASE\spring-beans-4.0.9.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-core\4.1.2.RELEASE\spring-core-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\hs\.m2\repository\org\springframework\spring-expression\4.1.2.RELEASE\spring-expression-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-tx\4.1.4.RELEASE\spring-tx-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\data\spring-data-commons\1.9.2.RELEASE\spring-data-commons-1.9.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\slf4j\slf4j-api\1.7.10\slf4j-api-1.7.10.jar;C:\Users\hs\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.10\jcl-over-slf4j-1.7.10.jar;C:\Users\hs\.m2\repository\com\datastax\cassandra\cassandra-driver-dse\2.0.4\cassandra-driver-dse-2.0.4.jar;C:\Users\hs\.m2\repository\com\datastax\cassandra\cassandra-driver-core\2.0.4\cassandra-driver-core-2.0.4.jar;C:\Users\hs\.m2\repository\io\netty\netty\3.9.0.Final\netty-3.9.0.Final.jar;C:\Users\hs\.m2\repository\com\codahale\metrics\metrics-core\3.0.2\metrics-core-3.0.2.jar;C:\Users\hs\.m2\repository\com\google\guava\guava\15.0\guava-15.0.jar;C:\Users\hs\.m2\repository\org\liquibase\liquibase-core\3.1.1\liquibase-core-3.1.1.jar;C:\Users\hs\.m2\repository\org\yaml\snakeyaml\1.13\snakeyaml-1.13.jar;C:\Users\hs\.m2\repository\ch\qos\logback\logback-classic\1.1.2\logback-classic-1.1.2.jar;C:\Users\hs\.m2\repository\ch\qos\logback\logback-core\1.1.2\logback-core-1.1.2.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-core\4.1.2.RELEASE\spring-integration-core-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\projectreactor\reactor-core\1.1.4.RELEASE\reactor-core-1.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\com\goldmansachs\gs-collections\5.0.0\gs-collections-5.0.0.jar;C:\Users\hs\.m2\repository\com\goldmansachs\gs-collections-api\5.0.0\gs-collections-api-5.0.0.jar;C:\Users\hs\.m2\repository\com\lmax\disruptor\3.2.1\disruptor-3.2.1.jar;C:\Users\hs\.m2\repository\io\gatling\jsr166e\1.0\jsr166e-1.0.jar;C:\Users\hs\.m2\repository\org\springframework\retry\spring-retry\1.1.1.RELEASE\spring-retry-1.1.1.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-messaging\4.1.4.RELEASE\spring-messaging-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-stream\4.1.2.RELEASE\spring-integration-stream-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-xml\4.1.2.RELEASE\spring-integration-xml-4.1.2.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\spring-oxm\4.1.4.RELEASE\spring-oxm-4.1.4.RELEASE.jar;C:\Users\hs\.m2\repository\org\springframework\ws\spring-xml\2.2.0.RELEASE\spring-xml-2.2.0.RELEASE.jar;C:\Users\hs\.m2\repository\com\jayway\jsonpath\json-path\1.2.0\json-path-1.2.0.jar;C:\Users\hs\.m2\repository\net\minidev\json-smart\2.1.0\json-smart-2.1.0.jar;C:\Users\hs\.m2\repository\net\minidev\asm\1.0.2\asm-1.0.2.jar;C:\Users\hs\.m2\repository\asm\asm\3.3.1\asm-3.3.1.jar;C:\Users\hs\.m2\repository\org\springframework\integration\spring-integration-kafka\1.0.0.RELEASE\spring-integration-kafka-1.0.0.RELEASE.jar;C:\Users\hs\.m2\repository\org\apache\avro\avro-compiler\1.7.6\avro-compiler-1.7.6.jar;C:\Users\hs\.m2\repository\org\apache\avro\avro\1.7.6\avro-1.7.6.jar;C:\Users\hs\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\hs\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\hs\.m2\repository\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;C:\Users\hs\.m2\repository\org\xerial\snappy\snappy-java\1.0.5\snappy-java-1.0.5.jar;C:\Users\hs\.m2\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;C:\Users\hs\.m2\repository\org\tukaani\xz\1.0\xz-1.0.jar;C:\Users\hs\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\hs\.m2\repository\org\apache\velocity\velocity\1.7\velocity-1.7.jar;C:\Users\hs\.m2\repository\commons-collections\commons-collections\3.2.1\commons-collections-3.2.1.jar;C:\Users\hs\.m2\repository\com\yammer\metrics\metrics-annotation\2.2.0\metrics-annotation-2.2.0.jar;C:\Users\hs\.m2\repository\com\yammer\metrics\metrics-core\2.2.0\metrics-core-2.2.0.jar;C:\Users\hs\.m2\repository\org\apache\kafka\kafka_2.10\0.8.1.1\kafka_2.10-0.8.1.1.jar;C:\Users\hs\.m2\repository\org\apache\zookeeper\zookeeper\3.3.4\zookeeper-3.3.4.jar;C:\Users\hs\.m2\repository\log4j\log4j\1.2.15\log4j-1.2.15.jar;C:\Users\hs\.m2\repository\javax\mail\mail\1.4\mail-1.4.jar;C:\Users\hs\.m2\repository\javax\activation\activation\1.1\activation-1.1.jar;C:\Users\hs\.m2\repository\javax\jms\jms\1.1\jms-1.1.jar;C:\Users\hs\.m2\repository\com\sun\jdmk\jmxtools\1.2.1\jmxtools-1.2.1.jar;C:\Users\hs\.m2\repository\com\sun\jmx\jmxri\1.2.1\jmxri-1.2.1.jar;C:\Users\hs\.m2\repository\jline\jline\0.9.94\jline-0.9.94.jar;C:\Users\hs\.m2\repository\net\sf\jopt-simple\jopt-simple\3.2\jopt-simple-3.2.jar;C:\Users\hs\.m2\repository\org\scala-lang\scala-library\2.10.1\scala-library-2.10.1.jar;C:\Users\hs\.m2\repository\com\101tec\zkclient\0.3\zkclient-0.3.jar;C:\Program Files (x86)\JetBrains\IntelliJ IDEA 13.1.6\lib\idea_rt.jar" com.intellij.rt.execution.application.AppMain com.agillic.dialogue.kafka.outbound.SpringKafkaTest
15:39:11.736 [main] INFO o.s.i.k.support.ProducerFactoryBean - Using producer properties => {metadata.broker.list=172.16.1.42:9092, compression.codec=0}
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Verifying properties
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Property compression.codec is overridden to 0
2015-02-19 15:39:12 INFO VerifiableProperties:68 - Property metadata.broker.list is overridden to 172.16.1.42:9092
15:39:12.164 [main] INFO o.s.b.f.config.PropertiesFactoryBean - Loading properties file from URL [jar:file:/C:/Users/hs/.m2/repository/org/springframework/integration/spring-integration-core/4.1.2.RELEASE/spring-integration-core-4.1.2.RELEASE.jar!/META-INF/spring.integration.default.properties]
15:39:12.208 [main] DEBUG o.s.i.k.o.KafkaProducerMessageHandler - org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler#5204db6b received message: GenericMessage [payload=foo, headers={timestamp=1424356752208, id=00c483d9-ecf8-2937-4a2c-985bd3afcae4, topic=test-cass, messagekey=3}]
Exception in thread "main" org.springframework.messaging.MessageHandlingException: error occurred in message handler [org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler#5204db6b]; nested exception is java.lang.NullPointerException
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:84)
at com.agillic.dialogue.kafka.outbound.SpringKafkaTest.main(SpringKafkaTest.java:40)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)
Caused by: java.lang.NullPointerException
at org.springframework.integration.kafka.support.KafkaProducerContext.getTopicConfiguration(KafkaProducerContext.java:58)
at org.springframework.integration.kafka.support.KafkaProducerContext.send(KafkaProducerContext.java:190)
at org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler.handleMessageInternal(KafkaProducerMessageHandler.java:81)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:78)
... 6 more
Process finished with exit code 1
Actually when we introduced KafkaHeaders we did appropriate documentation changes: https://github.com/spring-projects/spring-integration-kafka/blob/master/README.md. See Important note:
Since the last Milestone, we have introduced the KafkaHeaders interface with constants. The messageKey and topic default headers now require a kafka_ prefix. When migrating from an earlier version, you need to specify message-key-expression="headers.messageKey" and topic-expression="headers.topic" on the , or simply change the headers upstream to the new headers from KafkaHeaders using a or MessageBuilder. Or, of course, configure them on the adapter if you are using constant values.
UPDATE
Regarding NullPointerException: it's really an issue. Feel free to raise a JIRA ticket and we'll take care of that. We are even welcome for the contribution!