Exception when using #Query Annotation - "org.springframework.dao.InvalidDataAccessApiUsageException: declarative query methods are a todo" - spring-data-cassandra

I'm new to Spring-data-cassandra. My sample code looks something like this:
Customer.java
#Table
public class Customer {
#PrimaryKey
private String id;
private String firstName;
private String lastName;
#CassandraType(type = Name.BIGINT)
private Long age;
protected Customer() {}
public Customer(String id, String firstName, String lastName, Long age) {
this.firstName = firstName;
this.lastName = lastName;
this.id = id;
this.age = age;
}
public String getId() {
return id;
}
public Long getAge() {
return age;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
#Override
public String toString() {
return String.format(
"Customer[id=%s, firstName='%s', lastName='%s', age=%d]",
id, firstName, lastName, age);
}
}
CustomerRepository.java
public interface CustomerRepository extends CrudRepository<Customer, Long> {
#Query("select * from customer where lastname = :lName")
List<Customer> findByLastName(#Param("lName") String lName);
List<Customer> findAll();
}
Application.java
#SpringBootApplication
public class Application implements CommandLineRunner {
#Autowired
CustomerRepository repository;
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
#Override
public void run(String... strings) throws Exception {
repository.save(new Customer("0", "Jack", "Bauer", 20L));
repository.save(new Customer("1", "Chloe", "O'Brian", 21L));
repository.save(new Customer("2", "Kim", "Bauer", 22L));
repository.save(new Customer("3", "David", "Palmer", 23L));
repository.save(new Customer("4", "Michelle", "Dessler", 24L));
// fetch all customers
System.out.println("Customers found with findAll():");
System.out.println("-------------------------------");
for (Customer customer : repository.findAll()) {
System.out.println(customer);
}
// fetch customers by last name
System.out.println("Customer found with findByLastName('Bauer'):");
System.out.println("--------------------------------------------");
for (Customer bauer : repository.findByLastName("Bauer")) {
System.out.println(bauer);
}
System.out.println("Done.");
}
}
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>mine.samples</groupId>
<artifactId>spring-data-cassandra-test</artifactId>
<version>0.1.0</version>
<packaging>jar</packaging>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.2.5.RELEASE</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-cassandra</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>javax.transaction-api</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
I get the following exception when I try to run the code:
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'application': Injection of autowired dependencies failed; nested exception is org.springframework.beans.factory.BeanCreationException: Could not autowire field: mine.samples.CustomerRepository mine.samples.Application.repository; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'customerRepository': Invocation of init method failed; nested exception is org.springframework.dao.InvalidDataAccessApiUsageException: declarative query methods are a todo
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessPropertyValues(AutowiredAnnotationBeanPostProcessor.java:334)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1210)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:537)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:303)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:299)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:755)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:757)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:480)
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:118)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:686)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:320)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:957)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:946)
at mine.samples.Application.main(Application.java:15)
Caused by: org.springframework.beans.factory.BeanCreationException: Could not autowire field: mine.samples.CustomerRepository mine.samples.Application.repository; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'customerRepository': Invocation of init method failed; nested exception is org.springframework.dao.InvalidDataAccessApiUsageException: declarative query methods are a todo
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:561)
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:88)
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessPropertyValues(AutowiredAnnotationBeanPostProcessor.java:331)
... 16 common frames omitted
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'customerRepository': Invocation of init method failed; nested exception is org.springframework.dao.InvalidDataAccessApiUsageException: declarative query methods are a todo
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1574)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:539)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:303)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:299)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.findAutowireCandidates(DefaultListableBeanFactory.java:1120)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1044)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:942)
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:533)
... 18 common frames omitted
Caused by: org.springframework.dao.InvalidDataAccessApiUsageException: declarative query methods are a todo
at org.springframework.data.cassandra.repository.support.CassandraRepositoryFactory$CassandraQueryLookupStrategy.resolveQuery(CassandraRepositoryFactory.java:115)
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.<init>(RepositoryFactorySupport.java:369)
at org.springframework.data.repository.core.support.RepositoryFactorySupport.getRepository(RepositoryFactorySupport.java:192)
at org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport.initAndReturn(RepositoryFactoryBeanSupport.java:239)
at org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport.afterPropertiesSet(RepositoryFactoryBeanSupport.java:225)
at org.springframework.data.cassandra.repository.support.CassandraRepositoryFactoryBean.afterPropertiesSet(CassandraRepositoryFactoryBean.java:62)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1633)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1570)
... 28 common frames omitted
The gist of the exception is that "declarative query methods are a todo". The documentation is a little unclear on what is and what isn't supported. Could anyone shed light on this? Thanks.

I was importing the wrong #Query in CustomerRepository.java. I fixed it by changing the import to:
import org.springframework.data.cassandra.repository.Query;

Related

Injecting FacesContext with CDI

I have my Bean:
import java.io.Serializable;
import javax.enterprise.context.SessionScoped;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.inject.Inject;
import javax.inject.Named;
import javax.persistence.EntityManager;
import br.com.dropper.web.dao.UsuarioDAO;
import br.com.dropper.web.model.Usuario;
import br.com.dropper.web.util.JpaUtil;
#Named
#SessionScoped
public class LoginBean implements Serializable {
private static final long serialVersionUID = 1L;
#Inject
private FacesContext context;
#Inject
private Usuario usuario;
//TODO: Persistencia e Transacao controladas por EJB
private EntityManager em = new JpaUtil().getEntityManager();
private UsuarioDAO usuarioDAO = new UsuarioDAO(em);
public Usuario getUsuario() {
return usuario;
}
public String autenticar() {
Usuario usuario = usuarioDAO.obterUsuarioPorEmail(this.usuario);
if (usuario == null) {
context.addMessage(null, new FacesMessage("Usuario não encontrado."));
context.getExternalContext().getFlash().setKeepMessages(true);
return "login?faces-redirect=true";
} else {
context.getExternalContext().getSessionMap().put("usuarioLogado", usuario);
return "dashboardImagem.xhtml?faces-redirect=true";
}
}
public String cadastrarUsuario() {
System.out.println("Redirecionando para cadastroUsuario.xhtml");
return "cadastroUsuario.xhtml?faces-redirect=true";
}
public String logout() {
context.getExternalContext().getSessionMap().remove("usuarioLogado");
context.getExternalContext().invalidateSession();
return "login.xhtml?faces-redirect=true";
}
}
And my Factory:
package br.com.dropper.web.factory;
import java.io.Serializable;
import javax.enterprise.inject.Produces;
import javax.faces.context.FacesContext;
import javax.faces.view.ViewScoped;
public class FacesContextFactory implements Serializable{
private static final long serialVersionUID = 1L;
#Produces
#ViewScoped
public FacesContext getFacesContext(){
return FacesContext.getCurrentInstance();
}
}
When i run my application, im getting this exception:
Caused by: org.jboss.weld.exceptions.IllegalProductException: WELD-000053: Producers cannot declare passivating scope and return a non-serializable class: Producer for Producer Method [FacesContext] with qualifiers [#Any #Default] declared as [[BackedAnnotatedMethod] #Produces #ViewScoped public br.com.dropper.web.factory.FacesContextFactory.getFacesContext()] declared on Managed Bean [class br.com.dropper.web.factory.FacesContextFactory] with qualifiers [#Any #Default]
at br.com.dropper.web.factory.FacesContextFactory.getFacesContext(FacesContextFactory.java:16)
And when i change the produces method to #RequestScoped, my FacesContext it's inject only one time, and in my second page xhtml, i get a nullpointer =(
UPDATE: pom
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>br.com.dropper</groupId>
<artifactId>dropper-web</artifactId>
<version>0.1</version>
<packaging>war</packaging>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>javax</groupId>
<artifactId>javaee-api</artifactId>
<version>7.0</version>
<scope>provided</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>javax</groupId>
<artifactId>javaee-api</artifactId>
<scope>provided</scope>
</dependency>
<!-- <dependency>
<groupId>dropper-web</groupId>
<artifactId>postgresql</artifactId>
<version>9.4.1212</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/postgresql-9.4.1212.jre6.jar</systemPath>
</dependency> -->
<dependency>
<groupId>dropper-web</groupId>
<artifactId>bootstrap</artifactId>
<version>1.0.10</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/bootstrap-1.0.10.jar</systemPath>
</dependency>
<dependency>
<groupId>dropper-web</groupId>
<artifactId>commons-io</artifactId>
<version>2.5</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/commons-io-2.5.jar</systemPath>
</dependency>
<dependency>
<groupId>dropper-web</groupId>
<artifactId>cupertino</artifactId>
<version>1.0.10</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/cupertino-1.0.10.jar</systemPath>
</dependency>
<dependency>
<groupId>dropper-web</groupId>
<artifactId>primefaces-6.0</artifactId>
<version>6.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/primefaces-6.0.jar</systemPath>
</dependency>
</dependencies>
<build>
<finalName>dropper-web</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
Att
The FacesContext itself is indeed not Serializable. And, more important, it is definitely not #ViewScoped. It's actually shorter than #RequestScoped, but until JSF 2.3 is released which comes with improved CDI support so you could just #Inject FacesContext without having a custom producer, you could less or more safely use #Produces #RequestScoped instead.
#Produces
#RequestScoped
public FacesContext getFacesContext(){
return FacesContext.getCurrentInstance();
}
One case where this would still fail is when you explicitly perform a forward within the same request using RequestDispatcher#forward() or ExternalContext#dispatch(). You will then face java.lang.IllegalStateException at com.sun.faces.context.FacesContextImpl.assertNotReleased. This is however a very rare case and usually only performed in a badly designed (ajax) exception handler. See also a.o. using ExternalContext.dispatch in JSF error handler causes corrupt page rendering.

Facing issues with Cassandra Proof of concepts: Exception - com.datastax.driver.core.DataType.asJavaClass()Ljava/lang/Class;

I am trying few proof of concepts to test it for time series data. I took the sample from spring website and configured it on eclipse.
Below are few details:
Cassandra Version** - 3.0.9
Spring data cassandra version - 1.0.0.RELEASE
DataStax java driver core- cassandra-driver-core-3.1.2
I am executing the below code:
Cluster cluster = Cluster.builder().addContactPoints("127.0.0.1").withPort(9042).build();
Session session = cluster.connect("axiaglobal");
CassandraOperations cassandraOps = new CassandraTemplate(session);
cassandraOps.insert(new Person("1234567890", "David", 40));
and Person.java is below:
#Table
public class Person {
#PrimaryKey
private String id;
private String name;
private int age;
public Person(String id, String name, int age) {
this.id = id;
this.name = name;
this.age = age;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
#Override
public String toString() {
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
}
}
While exeucting the code, i get the below exception:
Exception in thread "main" java.lang.NoSuchMethodError: com.datastax.driver.core.DataType.asJavaClass()Ljava/lang/Class;
at org.springframework.data.cassandra.mapping.CassandraSimpleTypeHolder.<clinit>(CassandraSimpleTypeHolder.java:62)
at org.springframework.data.cassandra.mapping.BasicCassandraMappingContext.<init>(BasicCassandraMappingContext.java:73)
at org.springframework.data.cassandra.convert.MappingCassandraConverter.<init>(MappingCassandraConverter.java:77)
at org.springframework.data.cassandra.core.CassandraTemplate.<init>(CassandraTemplate.java:75)
at com.axia.global.dao.cassandra.service.CassandraApp.main(CassandraApp.java:26)
Can someone please help me out with the suggestions or pointers?
change pom file as below solved my issue:
<dependency>
<groupId>org.cassandraunit</groupId>
<artifactId>cassandra-unit-spring</artifactId>
<version>3.1.1.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.cassandraunit</groupId>
<artifactId>cassandra-unit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.cassandraunit</groupId>
<artifactId>cassandra-unit</artifactId>
<classifier>shaded</classifier>
<version>3.1.1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.1.1</version>
<optional>true</optional>
<!--<exclusions>-->
<!--<exclusion>-->
<!--<groupId>com.google.guava</groupId>-->
<!--<artifactId>guava</artifactId>-->
<!--</exclusion>-->
<!--</exclusions>-->
</dependency>

JSF does not convert LinkedHashMap to the correct object (javax.el.MethodNotFoundException)

I have a JSF application, and everything is working fine, except for calling methods in the ManagedBean that have a object as parameter:
<p:dataTable value="#{sContr.privileges}" var="privilege">
<p:column>
<h:outputText value="#{privilege.name}"/>
</p:column>
<p:column>
<h:outputText value="#{privilege.description}"/>
</p:column>
<p:column>
<h:commandButton action="#{sContr.deletePrivilege(privilege)}" image="somePath"/>
</p:column>
</p:dataTable>
In the sContr:
public void deletePrivilege(Privilege privilege) {
System.out.println("test");
}
All methods, that do not have own defined objects, work (e.g. Maps, Strings, Lists, ...) and accessing the objects and even sub-objects in the xhtml works like a charm.
However, calling this deletePrivilege results in:
javax.servlet.ServletException: javax.el.MethodNotFoundException: [...]privileges.xhtml #31,138 action="#{sContr.deletePrivilege(privilege)}": Method not found: [...]sContrl#604b7816.deletePrivilege(java.util.LinkedHashMap)
javax.faces.webapp.FacesServlet.service(FacesServlet.java:659)
org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
[...]
The objects are defined in another module than the JSF application and are normal java objects (no specific JSF annotations).
So, for some reason, JSF can't auto-convert the LinkedHashMap back to an instance of the class that was used to render the page.
faces-config.xml:
<?xml version='1.0' encoding='UTF-8'?>
<faces-config xmlns="http://xmlns.jcp.org/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee
http://xmlns.jcp.org/xml/ns/javaee/web-facesconfig_2_2.xsd"
version="2.2">
<managed-bean>
<managed-bean-name>currentDate</managed-bean-name>
<managed-bean-class>java.util.Date</managed-bean-class>
<managed-bean-scope>request</managed-bean-scope>
</managed-bean>
<application>
<el-resolver>org.springframework.web.jsf.el.SpringBeanFacesELResolver</el-resolver>
</application>
</faces-config>
The maven includes for JSF and primefaces:
<dependency>
<groupId>com.sun.faces</groupId>
<artifactId>jsf-api</artifactId>
<version>2.2.6</version>
</dependency>
<dependency>
<groupId>com.sun.faces</groupId>
<artifactId>jsf-impl</artifactId>
<version>2.2.6</version>
</dependency>
<dependency>
<groupId>org.primefaces</groupId>
<artifactId>primefaces</artifactId>
<version>5.1</version>
</dependency>
<dependency>
<groupId>org.primefaces.extensions</groupId>
<artifactId>primefaces-extensions</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.primefaces.themes</groupId>
<artifactId>all-themes</artifactId>
<version>1.0.10</version>
</dependency>
My object:
#Entity
public class Privilege implements IEntity {
#Id
#GeneratedValue(strategy = GenerationType.AUTO)
#Column()
private Long id;
#Column()
private String name;
#Column()
private String description;
public Privilege(String name) {
this.name = name;
}
public Privilege(Long id, String name) {
this.id = id;
this.name = name;
}
public Privilege(Long id, String name, String description) {
this(id, name);
this.description = description;
}
public Privilege() {
}
#Override
public Long getId() {
return id;
}
#Override
public void setId(Long id) {
this.id = id;
}
#Override
public String getName() {
return name;
}
#Override
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(final String description) {
this.description = description;
}
}
Also to clarify: the LinkedHashMap contains the id, the name and the description (so it should have everything to build the correct object)
What could be the problem here? Do I need to define the package somehow in faces-config?
Using: Mojarra 2.2.6, primefaces 5.1, primefaces-extensions 2.1.0
This depends on how you are solving the Privilege so if you are deserializing it some how it may, I had the same issue, I was deserializing Device model from json using jackson
class Device{}
devices = objectMapper.readValue(responseBody, List.class);
and it gave the same error exactly
I solve deserializing like
devices = objectMapper.readValue(responseBody, new TypeReference<>() {});
and it worked perfectly.

Spark Cassandra Connector Error

Trying to connect Cassandra from spark-Shell and spark-Submit, but both throwing same error.
SPARK version : 1.2.0
Apache Cassandra version 2.1.1 connect with Spark 1.2.0 using Datastax Cassandra Driver and connector ( versions are listed in POM file). Other than cassandra, scala or java programs works fine. Please some one help to resolve this error.
Error:
`**java.lang.AbstractMethodError
at org.apache.spark.Logging$class.log(Logging.scala:52)
at com.datastax.spark.connector.cql.CassandraConnector$.log(CassandraConnector.scala:144)
at org.apache.spark.Logging$class.logDebug(Logging.scala:63)
at com.datastax.spark.connector.cql.CassandraConnector$.logDebug(CassandraConnector.scala:144)**
at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:151)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:151)
at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:36)
at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:61)
at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:73)
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:98)
at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:109)
at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:131)
at com.datastax.spark.connector.rdd.CassandraRDD.tableDef$lzycompute(CassandraRDD.scala:206)
at com.datastax.spark.connector.rdd.CassandraRDD.tableDef(CassandraRDD.scala:205)
at com.datastax.spark.connector.rdd.CassandraRDD.<init>(CassandraRDD.scala:212)
at com.datastax.spark.connector.SparkContextFunctions.cassandraTable(SparkContextFunctions.scala:48)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:25)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
at $iwC$$iwC$$iwC$$iwC.<init>(<console>:40)
at $iwC$$iwC$$iwC.<init>(<console>:42)
at $iwC$$iwC.<init>(<console>:44)
at $iwC.<init>(<console>:46)
at <init>(<console>:48)
at .<init>(<console>:52)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:873)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:785)
at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:628)
at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:636)
at org.apache.spark.repl.SparkILoop.loop(SparkILoop.scala:641)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:968)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)`
> Commands I tried in spark-shell:
scala> import com.datastax.spark.connector._
scala> val conf = new SparkConf()
scala> conf.set("cassandra.connection.host", "node1.pc.datastax.com")
scala> val sc = new SparkContext("local[2]", "Cassandra Connector Test", conf)
scala> val table = sc.cassandraTable("keyspace", "table")
scala> table.count
Java Code:
package com.madhes;
import com.datastax.driver.core.Session;
import com.datastax.spark.connector.cql.CassandraConnector;
import com.google.common.base.Optional;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import scala.Tuple2;
import java.io.Serializable;
import java.math.BigDecimal;
import java.text.MessageFormat;
import java.util.*;
import static com.datastax.spark.connector.CassandraJavaUtil.*;
public class App implements Serializable {
private transient SparkConf conf;
private App(SparkConf conf) {
this.conf = conf;
}
private void run() {
JavaSparkContext sc = new JavaSparkContext(conf);
generateData(sc);
compute(sc);
showResults(sc);
sc.stop();
}
private void generateData(JavaSparkContext sc) {
CassandraConnector connector = CassandraConnector.apply(sc.getConf());
// Prepare the schema
try (Session session = connector.openSession()) {
// session.execute("DROP KEYSPACE IF EXISTS java_api");
// session.execute("CREATE KEYSPACE java_api WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}");
// session.execute("CREATE TABLE java_api.products (id INT PRIMARY KEY, name TEXT, parents LIST<INT>)");
// session.execute("CREATE TABLE java_api.sales (id UUID PRIMARY KEY, product INT, price DECIMAL)");
// session.execute("CREATE TABLE java_api.summaries (product INT PRIMARY KEY, summary DECIMAL)");
}
// Prepare the products hierarchy
List<Product> products = Arrays.asList(
new Product(0, "All products", Collections.<Integer>emptyList()),
new Product(1, "Product A", Arrays.asList(0)),
new Product(4, "Product A1", Arrays.asList(0, 1)),
new Product(5, "Product A2", Arrays.asList(0, 1)),
new Product(2, "Product B", Arrays.asList(0)),
new Product(6, "Product B1", Arrays.asList(0, 2)),
new Product(7, "Product B2", Arrays.asList(0, 2)),
new Product(3, "Product C", Arrays.asList(0)),
new Product(8, "Product C1", Arrays.asList(0, 3)),
new Product(9, "Product C2", Arrays.asList(0, 3))
);
JavaRDD<Product> productsRDD = sc.parallelize(products);
javaFunctions(productsRDD, Product.class).saveToCassandra("java_api", "products");
JavaRDD<Sale> salesRDD = productsRDD.filter(new Function<Product, Boolean>() {
#Override
public Boolean call(Product product) throws Exception {
return product.getParents().size() == 2;
}
}).flatMap(new FlatMapFunction<Product, Sale>() {
#Override
public Iterable<Sale> call(Product product) throws Exception {
Random random = new Random();
List<Sale> sales = new ArrayList<>(1000);
for (int i = 0; i < 1000; i++) {
sales.add(new Sale(UUID.randomUUID(), product.getId(), BigDecimal.valueOf(random.nextDouble())));
}
return sales;
}
});
javaFunctions(salesRDD, Sale.class).saveToCassandra("java_api", "sales");
}
private void compute(JavaSparkContext sc) {
JavaPairRDD<Integer, Product> productsRDD = javaFunctions(sc)
.cassandraTable("java_api", "products", Product.class)
.keyBy(new Function<Product, Integer>() {
#Override
public Integer call(Product product) throws Exception {
return product.getId();
}
});
JavaPairRDD<Integer, Sale> salesRDD = javaFunctions(sc)
.cassandraTable("java_api", "sales", Sale.class)
.keyBy(new Function<Sale, Integer>() {
#Override
public Integer call(Sale sale) throws Exception {
return sale.getProduct();
}
});
JavaPairRDD<Integer, Tuple2<Sale, Product>> joinedRDD = salesRDD.join(productsRDD);
JavaPairRDD<Integer, BigDecimal> allSalesRDD = joinedRDD.flatMapToPair(new PairFlatMapFunction<Tuple2<Integer, Tuple2<Sale, Product>>, Integer, BigDecimal>() {
#Override
public Iterable<Tuple2<Integer, BigDecimal>> call(Tuple2<Integer, Tuple2<Sale, Product>> input) throws Exception {
Tuple2<Sale, Product> saleWithProduct = input._2();
List<Tuple2<Integer, BigDecimal>> allSales = new ArrayList<>(saleWithProduct._2().getParents().size() + 1);
allSales.add(new Tuple2<>(saleWithProduct._1().getProduct(), saleWithProduct._1().getPrice()));
for (Integer parentProduct : saleWithProduct._2().getParents()) {
allSales.add(new Tuple2<>(parentProduct, saleWithProduct._1().getPrice()));
}
return allSales;
}
});
JavaRDD<Summary> summariesRDD = allSalesRDD.reduceByKey(new Function2<BigDecimal, BigDecimal, BigDecimal>() {
#Override
public BigDecimal call(BigDecimal v1, BigDecimal v2) throws Exception {
return v1.add(v2);
}
}).map(new Function<Tuple2<Integer, BigDecimal>, Summary>() {
#Override
public Summary call(Tuple2<Integer, BigDecimal> input) throws Exception {
return new Summary(input._1(), input._2());
}
});
javaFunctions(summariesRDD, Summary.class).saveToCassandra("java_api", "summaries");
}
private void showResults(JavaSparkContext sc) {
JavaPairRDD<Integer, Summary> summariesRdd = javaFunctions(sc)
.cassandraTable("java_api", "summaries", Summary.class)
.keyBy(new Function<Summary, Integer>() {
#Override
public Integer call(Summary summary) throws Exception {
return summary.getProduct();
}
});
JavaPairRDD<Integer, Product> productsRdd = javaFunctions(sc)
.cassandraTable("java_api", "products", Product.class)
.keyBy(new Function<Product, Integer>() {
#Override
public Integer call(Product product) throws Exception {
return product.getId();
}
});
List<Tuple2<Product, Optional<Summary>>> results = productsRdd.leftOuterJoin(summariesRdd).values().toArray();
for (Tuple2<Product, Optional<Summary>> result : results) {
System.out.println(result);
}
}
public static void main(String[] args) {
if (args.length != 2) {
System.err.println("Syntax: com.datastax.spark.demo.JavaDemo <Spark Master URL> <Cassandra contact point>");
System.exit(1);
}
SparkConf conf = new SparkConf();
conf.setAppName("Java API demo");
conf.setMaster(args[0]);
conf.set("spark.cassandra.connection.host", args[1]);
App app = new App(conf);
app.run();
}
public static class Product implements Serializable {
private Integer id;
private String name;
private List<Integer> parents;
public Product() { }
public Product(Integer id, String name, List<Integer> parents) {
this.id = id;
this.name = name;
this.parents = parents;
}
public Integer getId() { return id; }
public void setId(Integer id) { this.id = id; }
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public List<Integer> getParents() { return parents; }
public void setParents(List<Integer> parents) { this.parents = parents; }
#Override
public String toString() {
return MessageFormat.format("Product'{'id={0}, name=''{1}'', parents={2}'}'", id, name, parents);
}
}
public static class Sale implements Serializable {
private UUID id;
private Integer product;
private BigDecimal price;
public Sale() { }
public Sale(UUID id, Integer product, BigDecimal price) {
this.id = id;
this.product = product;
this.price = price;
}
public UUID getId() { return id; }
public void setId(UUID id) { this.id = id; }
public Integer getProduct() { return product; }
public void setProduct(Integer product) { this.product = product; }
public BigDecimal getPrice() { return price; }
public void setPrice(BigDecimal price) { this.price = price; }
#Override
public String toString() {
return MessageFormat.format("Sale'{'id={0}, product={1}, price={2}'}'", id, product, price);
}
}
public static class Summary implements Serializable {
private Integer product;
private BigDecimal summary;
public Summary() { }
public Summary(Integer product, BigDecimal summary) {
this.product = product;
this.summary = summary;
}
public Integer getProduct() { return product; }
public void setProduct(Integer product) { this.product = product; }
public BigDecimal getSummary() { return summary; }
public void setSummary(BigDecimal summary) { this.summary = summary; }
#Override
public String toString() {
return MessageFormat.format("Summary'{'product={0}, summary={1}'}'", product, summary);
}
}
}
POM.XML
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.madhes</groupId>
<artifactId>App</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<!--Spark Cassandra Connector-->
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector-java_2.10</artifactId>
<version>1.0.0</version>
</dependency>
<!--Spark-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>net.jpountz.lz4</groupId>
<artifactId>lz4</artifactId>
<version>1.3.0</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>2.1.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.9.1</version>
</dependency>
</dependencies>
</project>
I faced the same issue and solved by adding following maven dependency
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>1.2.6</version>
<exclusions>
<!-- <exclusion> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId>
</exclusion> -->
<exclusion>
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
<artifactId>concurrentlinkedhashmap-lru</artifactId>
</exclusion>
<exclusion>
<groupId>com.ning</groupId>
<artifactId>compress-lzf</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
<exclusion>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.cassandra.deps</groupId>
<artifactId>avro</artifactId>
</exclusion>
</exclusions>
</dependency>

Running Liquibase with CDI on Wildfly 8

I am trying to run Liquibase scripts using CDI on WildFly 8.1.0.Final and I am getting this error:
Unsatisfied dependencies for type ResourceAccessor with qualifiers #LiquibaseType
My POM has these dependencies:
<dependencies>
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>3.3.0</version>
</dependency>
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-cdi</artifactId>
<version>3.3.0</version>
</dependency>
<dependency>
<groupId>com.mattbertolini</groupId>
<artifactId>liquibase-slf4j</artifactId>
<version>1.2.1</version>
</dependency>
</dependencies>
My CDI Bean is as follows:
import javax.annotation.Resource;
import javax.enterprise.inject.Produces;
import javax.sql.DataSource;
import liquibase.integration.cdi.CDILiquibaseConfig;
import liquibase.integration.cdi.annotations.LiquibaseType;
import liquibase.resource.ClassLoaderResourceAccessor;
import liquibase.resource.ResourceAccessor;
public class LiquibaseStarter {
#Produces
#LiquibaseType
public CDILiquibaseConfig createConfig() {
CDILiquibaseConfig config = new CDILiquibaseConfig();
config.setChangeLog("liquibase/parser/core/xml/simpleChangeLog.xml");
return config;
}
#Resource(name="java:jboss/datasources/ExampleDS")
private DataSource ds;
#Produces
#LiquibaseType
public DataSource createDataSource() {
return ds;
}
#Produces
#LiquibaseType
public ResourceAccessor create() {
return new ClassLoaderResourceAccessor(getClass().getClassLoader());
}
}
My project is a simple WAR. What am I doing wrong?
LiquibaseStarter has no bean-defining annotation. Add #Dependent at class-level.

Resources