Connecting to a Cassandra Docker Container Using Cassandra Spring Data - cassandra

So I am running in to issues connecting to a single node cassandra cluster using spring data-cassandra. I am using the docker image found at: https://hub.docker.com/_/cassandra/
using a docker-compose with the following environment variables set:
cassandra_n1:
image: cassandra:latest
ports:
- "9042:9042"
- "9160:9160"
hostname: cassandra_n1
environment:
CASSANDRA_CLUSTER_NAME: "mycluster"
CASSANDRA_ENDPOINT_SNITCH: "PropertyFileSnitch"
CASSANDRA_DC: "DC1"
CASSANDRA_RACK: "R1"
then after this starts I try to connect to it using my spring boot application which is as simple as:
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class MvcApplication {
public static void main(String[] args) {
SpringApplication.run(MvcApplication.class);
}
}
which scans for a configuration file which is:
#Configuration
#PropertySource(value = { "classpath:cassandra.properties" })
#EnableCassandraRepositories(basePackages = { "myproject.repository" })
public class CassandraConfig {
private static final Logger LOG = LoggerFactory.getLogger(CassandraConfig.class);
#Autowired
private Environment env;
#Bean
public CassandraClusterFactoryBean cluster() {
CassandraClusterFactoryBean cluster = new CassandraClusterFactoryBean();
cluster.setContactPoints(env.getProperty("cassandra.contactpoints"));
cluster.setPort(Integer.parseInt(env.getProperty("cassandra.port")));
return cluster;
}
#Bean
public CassandraMappingContext mappingContext() {
return new BasicCassandraMappingContext();
}
#Bean
public CassandraConverter converter() {
return new MappingCassandraConverter(mappingContext());
}
#Bean
public CassandraSessionFactoryBean session() throws Exception {
CassandraSessionFactoryBean session = new CassandraSessionFactoryBean();
session.setCluster(cluster().getObject());
session.setKeyspaceName(env.getProperty("cassandra.keyspace"));
session.setConverter(converter());
session.setSchemaAction(SchemaAction.NONE);
return session;
}
#Bean
public CassandraOperations cassandraTemplate() throws Exception {
return new CassandraTemplate(session().getObject());
}
}
that looks at the property file cassandra.properties which is:
cassandra.contactpoints=192.168.99.100
cassandra.port=9042
cassandra.keyspace=mykeyspace
I am using docker-machine as a docker daemon which has a address of 192.168.99.100
in my pom i'm using the dependencies:
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-cassandra</artifactId>
<version>1.0.0.RELEASE</version>
<exclusions>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-expression</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-expression</artifactId>
<version>4.1.7.RELEASE</version>
</dependency>
After i build and run my application, the application fails to connect to cassandra showing a message:
Caused by: com.datastax.driver.core.exceptions.NoHostAvailableException:
All host(s) tried for query failed (tried: /192.168.99.100:9042 (com.datastax.driver.core.ConnectionException:
[/192.168.99.100:9042] Unexpected error during transport initialization (com.datastax.driver.core.TransportException:
[/192.168.99.100:9042] Unexpected exception triggered (java.lang.IndexOutOfBoundsException:
Not enough readable bytes - Need 4, maximum is 0))))
I have tried to set the listen_address, broadcast_address and the rpc_address to the docker daemon ip but have not had success.
Any help would be appriciated

Related

How do I setup dropwizard metrics for the Cassandra java driver?

I would like to set up the java driver to collect JMX metrics using jConsole or jmxterm. How do I go about exposing those mbean metrics in the Cassandra java driver? In this case, I'm using the 4.14 java driver.
Here's a good link that helped me with the 4.14 driver:
https://docs.datastax.com/en/developer/java-driver/4.14/manual/core/metrics/
Ultimately, the JMX metrics look like the following in jConsole:
And here's my application code:
pom.xml:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.example.cassandra</groupId>
<artifactId>testing-connection</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>testing-connection</name>
<properties>
<java.version>1.8</java.version>
<driver.version>4.14.1</driver.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-core</artifactId>
<version>${driver.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jmx</artifactId>
<version>4.1.2</version>
</dependency>
</dependencies>
</project>
application.conf:
datastax-java-driver
{
basic {
contact-points = [ "10.101.36.152:9042" ]
load-balancing-policy {
local-datacenter = "SearchGraphAnalytics"
}
}
advanced.metrics {
session.enabled = [ bytes-sent,bytes-received,connected-nodes,cql-requests,cql-client-timeouts,cql-prepared-cache-size,throttling.delay,throttling.queue-size,throttling.errors,continuous-cql-requests,graph-requests,graph-client-timeouts ]
node.enabled = [ pool.open-connections,pool.available-streams,pool.in-flight,pool.orphaned-streams,bytes-sent,bytes-received,cql-messages,errors.request.unsent,errors.request.aborted,errors.request.write-timeouts,errors.request.read-timeouts,errors.request.unavailables,errors.request.others,retries.total,retries.aborted,retries.read-timeout,retries.write-timeout,retries.unavailable,retries.other,ignores.total,ignores.aborted,ignores.read-timeout,ignores.write-timeout,ignores.unavailable,ignores.other,speculative-executions,errors.connection.init,errors.connection.auth,graph-messages ]
}
}
Main class
package com.example.cassandra;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.CqlSessionBuilder;
import com.datastax.oss.driver.api.core.metadata.Metadata;
import com.datastax.oss.driver.api.core.cql.ResultSet;
import java.util.ArrayList;
import com.codahale.metrics.jmx.JmxReporter;
import com.codahale.metrics.MetricRegistry;
public class TestingConnections {
private CqlSession session;
static String keyspace = "keyspace1";
static String table = "names";
public void connect() {
CqlSessionBuilder builder = CqlSession.builder();
session = builder.build();
Metadata metadata = session.getMetadata();
System.out.printf("Connected to cluster: %s\n", metadata.getClusterName());
}
public CqlSession getSession() {
return this.session;
}
public void getData(String keyspace, String table) {
ResultSet results = session.execute("select * from " + keyspace + "." + table);
ArrayList<String> first_names = new ArrayList<String>();
results.forEach(row -> first_names.add(row.getString("first")));
first_names.forEach(first_name -> System.out.println(first_name));
}
public void close() {
session.close();
}
public void registerJMX() {
MetricRegistry registry = session.getMetrics()
.orElseThrow(() -> new IllegalStateException("Metrics are disabled"))
.getRegistry();
JmxReporter reporter =
JmxReporter.forRegistry(registry)
.inDomain("com.datastax.oss.driver")
.build();
reporter.start();
}
public static void main(String[] args) {
System.out.printf("Connecting to client");
TestingConnections client = new TestingConnections();
client.connect();
client.registerJMX();
client.getData(keyspace, table);
}
}
That did work to generate the metrics as shown in the first screenshot. I haven't tried with metric packages other than the dropwizard (which is the default).
Also note, I am not calling client.close() because I want my session to stay open so that I can connect jConsole to my java application.

jHipster mongock migration

I am migrating off of mongobee to mongock so we can use Atlas. I've followed the commits on the suggested changes that have been merged into master and have modified CloudDatabaseConfiguration, DatabaseConfiguration, and InitialSetupMigration classes. I've also updated the pom to import the mongock 4.1.17 dependencies.
Running the app there seems to be no issues. I've tested the change log and everything operates as it should. When i run my tests, however, i am getting an error stating it cannot find the class org/springframework/data/mongodb/MongoDatabaseFactory.
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'mongockInitializingBeanRunner' defined in class path resource [com/ioi/helpdesk/gateway/config/DatabaseConfiguration.class]: Invocation of init method failed; nested exception is java.lang.NoClassDefFoundError: org/springframework/data/mongodb/MongoDatabaseFactory
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1796)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:595)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:517)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:323)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:226)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:321)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:202)
<dependency>
<groupId>com.github.cloudyrock.mongock</groupId>
<artifactId>mongock-spring-v5</artifactId>
<version>4.1.17</version>
</dependency>
<dependency>
<groupId>com.github.cloudyrock.mongock</groupId>
<artifactId>mongodb-springdata-v3-driver</artifactId>
<version>4.1.17</version>
</dependency>
I have not changed the starter data dependency
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
</dependency>
#Configuration
#EnableMongoRepositories("com.ioi.helpdesk.gateway.repository")
#Profile("!" + JHipsterConstants.SPRING_PROFILE_CLOUD)
#Import(value = MongoAutoConfiguration.class)
#EnableMongoAuditing(auditorAwareRef = "springSecurityAuditorAware")
public class DatabaseConfiguration {
private final Logger log = LoggerFactory.getLogger(DatabaseConfiguration.class);
#Bean
public ValidatingMongoEventListener validatingMongoEventListener() {
return new ValidatingMongoEventListener(validator());
}
#Bean
public LocalValidatorFactoryBean validator() {
return new LocalValidatorFactoryBean();
}
#Bean
public MongoCustomConversions customConversions() {
List<Converter<?, ?>> converters = new ArrayList<>();
converters.add(DateToZonedDateTimeConverter.INSTANCE);
converters.add(ZonedDateTimeToDateConverter.INSTANCE);
return new MongoCustomConversions(converters);
}
#Bean
public MongockSpring5.MongockInitializingBeanRunner mongockInitializingBeanRunner(ApplicationContext springContext,
MongoTemplate mongoTemplate,
#Value("${mongock.lockAcquiredForMinutes:5}") long lockAcquiredForMinutes,
#Value("${mongock.maxWaitingForLockMinutes:3}") long maxWaitingForLockMinutes,
#Value("${mongock.maxTries:3}") int maxTries) {
try {
log.info("INITIALIZING MONGOCK!");
SpringDataMongo3Driver driver = SpringDataMongo3Driver.withLockSetting(mongoTemplate, lockAcquiredForMinutes, maxWaitingForLockMinutes, maxTries);
MongockSpring5.MongockInitializingBeanRunner runner = MongockSpring5.builder()
.setDriver(driver)
.addChangeLogsScanPackage("com.ioi.helpdesk.gateway.config.dbmigrations")
.setSpringContext(springContext)
.buildInitializingBeanRunner();
log.info("MONGOCK INITIALIZED!");
return runner;
} catch(Exception e) {
log.info("Error during Mongock initalization - " + ExceptionUtils.getStackTrace(e));
}
return null;
}
}
Am I missing a test dependency or incorrectly included one?

com.datastax.driver.core.OperationTimedOutException

I created the following test class using Cassandra.
private static String server_ip = "127.0.0.1";
private static String keyspace = "hr";
private static Cluster cluster = null;
private static Session session = null;
public static void main (String [] args)
{
if(cluster!=null) return;
cluster = Cluster.builder().addContactPoints(server_ip).withPort(9042).build();
final Metadata metadata = cluster.getMetadata();
String msg = String.format("Connected to cluster: %s", metadata.getClusterName());
System.out.println(msg);
System.out.println("List of hosts");
for (final Host host : metadata.getAllHosts())
{
msg = String.format("Datacenter: %s; Host: %s; Rack: %s",
host.getDatacenter(),
host.getAddress(),
host.getRack());
System.out.println(msg);
}
session = cluster.connect(keyspace);
}
(Cluster.java:407)
at samples.SampleB.main(SampleB.java:28)
After googling, I try all the proposing solution without success. Could you please help me solving this issue?. Thanks a lot.
you should verify those dependencies:
<!-- Apache Cassandra Datastax's CQL driver. -->
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-mapping</artifactId>
<version>3.1.0</version>
</dependency>
HTH

kafka spark-streaming data not getting written into cassandra. zero rows inserted

While writing data to cassandra from spark, data is not getting written.
The flash back is:
I am doing a kafka-sparkStreaming-cassandra integration.
I am reading kafka messages and trying to put it in a cassandra table CREATE TABLE TEST_TABLE(key INT PRIMARY KEY, value TEXT).
kafka to spark-streaming is running cool, but spark to cassandra, there is some issue...data not getting written to table.
I am able to create a connection with cassandra, but the data is not getting inserted into the cassandra table. The output shows its getting connected and the next second getting disconnected.
The strings for System.out.print() is all at the output.
+++++++++++cassandra connector created++++++++++++++++++++++++++++
+++++++++++++streaming Connection done!+++++++++++++++++++++++++++
++++++++++++++++JavaDStream<TestTable> created++++++++++++++++++++++++++++
Cassandra shell shows 0 rows.
the full code and the logs and dependencies are below:
public class SparkStream {
static int key=0;
public static void main(String args[]) throws Exception
{
if(args.length != 3)
{
System.out.println("parameters not given properly");
System.exit(1);
}
Logger.getLogger("org").setLevel(Level.OFF);
Logger.getLogger("akka").setLevel(Level.OFF);
Map<String,Integer> topicMap = new HashMap<String,Integer>();
String[] topic = args[2].split(",");
for(String t: topic)
{
topicMap.put(t, new Integer(3));
}
/* Connection to Spark */
SparkConf conf = new SparkConf();
conf.set("spark.cassandra.connection.host", "localhost");
JavaSparkContext sc = new JavaSparkContext("local[4]", "SparkStream",conf);
JavaStreamingContext jssc = new JavaStreamingContext(sc, new Duration(5000));
/* connection to cassandra */
CassandraConnector connector = CassandraConnector.apply(sc.getConf());
System.out.println("+++++++++++cassandra connector created++++++++++++++++++++++++++++");
/* Receive Kafka streaming inputs */
JavaPairReceiverInputDStream<String, String> messages = KafkaUtils.createStream(jssc, args[0], args[1], topicMap );
System.out.println("+++++++++++++streaming Connection done!+++++++++++++++++++++++++++");
/* Create DStream */
JavaDStream<TestTable> data = messages.map(new Function< Tuple2<String,String>, TestTable >()
{
public TestTable call(Tuple2<String, String> message)
{
return new TestTable(new Integer(++key), message._2() );
}
}
);
System.out.println("++++++++++++++++JavaDStream<TestTable> created++++++++++++++++++++++++++++");
/* Write to cassandra */
javaFunctions(data).writerBuilder("testkeyspace", "test_table", mapToRow(TestTable.class)).saveToCassandra();
jssc.start();
jssc.awaitTermination();
}
}
class TestTable implements Serializable
{
Integer key;
String value;
public TestTable() {}
public TestTable(Integer k, String v)
{
key=k;
value=v;
}
public Integer getKey(){
return key;
}
public void setKey(Integer k){
key=k;
}
public String getValue(){
return value;
}
public void setValue(String v){
value=v;
}
public String toString(){
return MessageFormat.format("TestTable'{'key={0}, value={1}'}'", key, value);
}
}
The log is:
+++++++++++cassandra connector created++++++++++++++++++++++++++++
+++++++++++++streaming Connection done!+++++++++++++++++++++++++++
++++++++++++++++JavaDStream<TestTable> created++++++++++++++++++++++++++++
14/12/09 12:07:33 INFO core.Cluster: New Cassandra host localhost/127.0.0.1:9042 added
14/12/09 12:07:33 INFO cql.CassandraConnector: Connected to Cassandra cluster: Test Cluster
14/12/09 12:07:33 INFO cql.LocalNodeFirstLoadBalancingPolicy: Adding host 127.0.0.1 (datacenter1)
14/12/09 12:07:33 INFO cql.LocalNodeFirstLoadBalancingPolicy: Adding host 127.0.0.1 (datacenter1)
14/12/09 12:07:34 INFO cql.CassandraConnector: Disconnected from Cassandra cluster: Test Cluster
14/12/09 12:07:45 INFO core.Cluster: New Cassandra host localhost/127.0.0.1:9042 added
14/12/09 12:07:45 INFO cql.CassandraConnector: Connected to Cassandra cluster: Test Cluster
14/12/09 12:07:45 INFO cql.LocalNodeFirstLoadBalancingPolicy: Adding host 127.0.0.1 (datacenter1)
14/12/09 12:07:45 INFO cql.LocalNodeFirstLoadBalancingPolicy: Adding host 127.0.0.1 (datacenter1)
14/12/09 12:07:46 INFO cql.CassandraConnector: Disconnected from Cassandra cluster: Test Cluster
The POM.xml dependencies are:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.10</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector-java_2.10</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>com.msiops.footing</groupId>
<artifactId>footing-tuple</artifactId>
<version>0.2</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>2.1.3</version>
</dependency>
is there something wrong with the code? or cassandra configuration?
solved the issue.
the columnMapper wasnt able to access the getters and setters of class TestTable.
So changed the access modifier to public.
but now i had 2 public classes in one file. which is an error.
so created another java file TestTable.java with class as
public class TestTable implements Serializable {
//code
}
now the messages are being read from kafka and getting stored in cassandra table

Kie (Drools) + Spring + Camel + Excel decision table

I currently have working excel decision tables being read into camel routes with following libraries:
Drools 5.6.0.final
Camel 2.14.0
Spring-boot 1.0.2
JDK 7
We are in process of upgrading spring-boot to 1.1.5 and after upgrade the drools decision table compilation started to crash , even with 2 GB of permGen space assigned.
It seems new spring-boot version causes some sort of classloader loop that eats all the PerGem it can find.
If we use JDK8, problem "goes away", but start up process eats 1,6GB of memory during drools decision table compilation.
I tried upgrading drools to 6.1.0 (KIE), but then i am not able to use excel-based decision tables anymore as there seems not to be any way to configure Spring/Camel/Drools beans anymore. Or at least http://docs.jboss.org/drools/release/6.1.0.Final/drools-docs/html/ch.kie.spring.html#d0e12957 doesn't provide any clues on how to achieve this.
Does anyone know of a way to either make drools 5.6.0 work more efficiently under JDK 7 OR how to allow excel decision tables under JDK 8 with Drools/Kie 6+?
Exception stacktrace on JDK 7 permGen error:
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'excelDTABLE_KBase': Invocation of init method failed; nested exception is java.lang.OutOfMemoryError: PermGen space
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1554)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:539)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:475)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:228)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:298)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:193)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:687)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:762)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:482)
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:109)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:691)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:320)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:952)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:941)
at my.domain.Application.main(Application.java:54)
Caused by: java.lang.OutOfMemoryError: PermGen space
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:800)
at org.drools.rule.JavaDialectRuntimeData$PackageClassLoader.fastFindClass(JavaDialectRuntimeData.java:624)
at org.drools.util.CompositeClassLoader$CachingLoader.load(CompositeClassLoader.java:254)
at org.drools.util.CompositeClassLoader$CachingLoader.load(CompositeClassLoader.java:237)
at org.drools.util.CompositeClassLoader.loadClass(CompositeClassLoader.java:88)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.drools.rule.JavaDialectRuntimeData.wire(JavaDialectRuntimeData.java:434)
at org.drools.rule.JavaDialectRuntimeData.wire(JavaDialectRuntimeData.java:429)
at org.drools.rule.JavaDialectRuntimeData.onBeforeExecute(JavaDialectRuntimeData.java:257)
at org.drools.rule.DialectRuntimeRegistry.onBeforeExecute(DialectRuntimeRegistry.java:139)
at org.drools.compiler.PackageBuilder.reloadAll(PackageBuilder.java:1202)
at org.drools.compiler.PackageBuilder.compileAllRules(PackageBuilder.java:951)
at org.drools.compiler.PackageBuilder.addPackage(PackageBuilder.java:938)
at org.drools.compiler.PackageBuilder.addPackageFromDecisionTable(PackageBuilder.java:451)
at org.drools.compiler.PackageBuilder.addKnowledgeResource(PackageBuilder.java:715)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:51)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:40)
at org.drools.container.spring.beans.KnowledgeBaseBeanFactory.afterPropertiesSet(KnowledgeBaseBeanFactory.java:110)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1613)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1550)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:539)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:475)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:228)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:298)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:193)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:687)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:762)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:482)
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:109)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:691)
solution provided is for drool decision table using kie using spring boot
refference for drool table FYI-do mindful that object shared across condition must have merged cells across those conditons
#Configuration
public class DroolConfig {
private KieServices kieServices = KieServices.Factory.get();
private KieFileSystem getKieFileSystem() throws IOException {
KieFileSystem kieFileSystem = kieServices.newKieFileSystem();
String xls[] = new String[] {"wallet.xls","offer.xls"}; // path to file
for(int i=0 ; i< xls.length; i++) {
kieFileSystem.write(ResourceFactory.newClassPathResource(xls[i]));
}
return kieFileSystem;
}
#Bean
public KieContainer getKieContainer() throws IOException {
System.out.println("Container created...");
getKieRepository();
KieBuilder kb = kieServices.newKieBuilder(getKieFileSystem());
kb.buildAll();
KieModule kieModule = kb.getKieModule();
KieContainer kContainer = kieServices.newKieContainer(kieModule.getReleaseId());
return kContainer;
}
private void getKieRepository() {
final KieRepository kieRepository = kieServices.getRepository();
kieRepository.addKieModule(new KieModule() {
public ReleaseId getReleaseId() {
return kieRepository.getDefaultReleaseId();
}
});
}
#Bean
public KieSession getKieSession() throws IOException {
System.out.println("session created...");
return getKieContainer().newKieSession();
}
}
#RestController
public class MegaOfferController {
#Autowired
private KieSession session;
#PostMapping("/order")
public Order orderNow(#RequestBody Order order) {
session.insert(order);
session.fireAllRules();
return order;
}
<properties>
<drools.version>6.4.0.Final</drools.version>
</properties>
<dependencies>
<dependency>
<groupId>org.kie</groupId>
<artifactId>kie-api</artifactId>
<version>${drools.version}</version>
</dependency>
<dependency>
<groupId>org.drools</groupId>
<artifactId>drools-core</artifactId>
<version>${drools.version}</version>
</dependency>
<dependency>
<groupId>org.drools</groupId>
<artifactId>drools-compiler</artifactId>
<version>${drools.version}</version>
</dependency>
<dependency>
<groupId>org.drools</groupId>
<artifactId>drools-decisiontables</artifactId>
<version>${drools.version}</version>
</dependency>
<dependency>
<groupId>org.kie</groupId>
<artifactId>kie-ci</artifactId>
<version>${drools.version}</version>
</dependency>
</dependencies>

Resources