cannot create second cassandra keyspace with NetworkReplication with spring data - cassandra

i have two keyspaces. one of them is created perfectly but the other one is only created with with SimpleReplication. When i create it with network replication it is created but when i do
describe keyspace in cqlsh it returns:
'NoneType' object has no attribute 'export_for_schema'
But the keyspace with network replication can be created from cqlsh.
In spring logs everything looks ok
[CREATE KEYSPACE IF NOT EXISTS a_events_local WITH replication = { 'class' : 'NetworkTopologyStrategy', 'dс1' : 3, 'dс2' : 3 }
My Cassandra Config
public abstract class CassandraConfig extends AbstractCassandraConfiguration {
#Value("${cassandra.host}")
private String host;
#Value("${cassandra.port}")
private int port;
#Value("${cassandra.user}")
private String usr;
#Value("${cassandra.psswd}")
private String psswd;
#Value("${cassandra.keyspace.a}")
private String keyspaceA;
#Value("${cassandra.keyspace.b}")
private String keyspaceB;
#Override
#Bean
public CassandraClusterFactoryBean cluster() {
CassandraClusterFactoryBean cluster =
new CassandraClusterFactoryBean();
cluster.setContactPoints(host);
cluster.setPort(port);
cluster.setUsername(usr);
cluster.setPassword(psswd);
cluster.setKeyspaceCreations(getKeyspaceCreations());
cluster.setJmxReportingEnabled(false);
return cluster;
}
#Override
protected String getContactPoints() {
return host;
}
#Override
protected int getPort() {
return port;
}
#Override
public SchemaAction getSchemaAction() {
return SchemaAction.NONE;
}
#Override
protected List<CreateKeyspaceSpecification> getKeyspaceCreations() {
return Arrays.asList(getAKeySpaceSpecification(), getBKeySpaceSpecification());
}
private CreateKeyspaceSpecification getAKeySpaceSpecification() {
return CreateKeyspaceSpecification.createKeyspace(keyspaceA)
.ifNotExists(true)
//.withSimpleReplication(3);
.withNetworkReplication(DataCenterReplication.of("dс1", 3) , DataCenterReplication.of("dс2", 3));
}
private CreateKeyspaceSpecification getBKeySpaceSpecification() {
return CreateKeyspaceSpecification.createKeyspace(keyspaceB)
.ifNotExists(true)
.withNetworkReplication(DataCenterReplication.of("dc1", 3), DataCenterReplication.of("dc2", 3));
}
Keyspace A config
#Configuration
#EnableCassandraRepositories(
cassandraTemplateRef = "keyspaceACassandraTemplate")
public class CassandraDDConfig extends CassandraConfig {
#Value("${cassandra.keyspace.a}")
private String keyspace;
#Value("${cassandra.a-entities-package}")
private String aEntityPackage;
#Override
#Bean("keyspaceDDSession")
public CassandraSessionFactoryBean session() {
CassandraSessionFactoryBean session = new CassandraSessionFactoryBean();
session.setCluster(cluster().getObject());
session.setConverter(cassandraConverter());
session.setKeyspaceName(getKeyspaceName());
session.setSchemaAction(getSchemaAction());
session.setStartupScripts(getStartupScripts());
session.setShutdownScripts(getShutdownScripts());
return session;
}
#Override
#Bean("keyspaceACassandraTemplate")
public CassandraAdminTemplate cassandraTemplate() throws Exception {
return new CassandraAdminTemplate(this.session().getObject(), cassandraConverter());
}
#Override
protected String getKeyspaceName() {
return keyspace;
}
#Override
public String[] getEntityBasePackages() {
return new String[] {aEntityPackage};
}
}
Keyspace B config
#Configuration
#EnableCassandraRepositories(
cassandraTemplateRef = "keyspaceBCassandraTemplate")
public class CassandraSparkConfig extends CassandraConfig {
#Value("${cassandra.keyspace.b}")
private String keyspace;
#Value("${cassandra.b-entities-package}")
private String dcEntityPackage;
#Override
#Bean("keyspaceBSession")
public CassandraSessionFactoryBean session() {
CassandraSessionFactoryBean session = new CassandraSessionFactoryBean();
session.setCluster(cluster().getObject());
session.setConverter(cassandraConverter());
session.setKeyspaceName(getKeyspaceName());
session.setSchemaAction(getSchemaAction());
session.setStartupScripts(getStartupScripts());
session.setShutdownScripts(getShutdownScripts());
return session;
}
#Override
#Bean("keyspaceSparkCassandraTemplate")
public CassandraAdminTemplate cassandraTemplate() throws Exception {
return new CassandraAdminTemplate(this.session().getObject(), cassandraConverter());
}
#Override
protected String getKeyspaceName() {
return keyspace;
}
#Override
public String[] getEntityBasePackages() {
String[] entities = new String[1];
if(saveAll){
entities[0] = bEntityPackage;
}
return entities;
}
}

in the end i used getStartupScripts to create keyspaces. and it works, data is written to and read from corresponding tables.
#Override
#Bean
public CassandraClusterFactoryBean cluster() {
CassandraClusterFactoryBean cluster =
new CassandraClusterFactoryBean();
cluster.setContactPoints(host);
cluster.setPort(port);
cluster.setUsername(usr);
cluster.setPassword(psswd);
cluster.setStartupScripts(getStartupScripts());
cluster.setJmxReportingEnabled(false);
cluster.setMetricsEnabled(false);
#Override
protected List<String> getStartupScripts() {
final String scriptCreateAkeyspace =
"CREATE KEYSPACE IF NOT EXISTS "
+ keyspaceA
+ " WITH durable_writes = true"
+ " AND replication = {'class' : 'NetworkTopologyStrategy', 'dc1' : 3, 'dc2' : 1 };";
final String scriptCreateSparkKeyspace = "CREATE KEYSPACE IF NOT EXISTS "
+ keyspaceB
+ " WITH durable_writes = true"
+ " AND replication = {'class' : 'NetworkTopologyStrategy', 'dc1' : 1, 'dc2' : 3 };";
return Arrays.asList(scriptCreateAkeyspace, scriptCreateBKeyspace);
}

Related

Connect to 2 cassandra clusters in spring data

I am trying to connect to 2 different cassandra clusters using spring data cassandra. But it always uses only the first cassandra cluster config. The second one is not taking affect. Any idea what am I doing wrong? This is the config that I am using:
First cassandra cluster config:
#Configuration
#EnableCassandraRepositories(
basePackageClasses = SourceRepository.class
)
public class SourceCassandraConfig extends AbstractCassandraConfiguration {
#Override
public String getContactPoints() {
return "localhost";
}
#Override
public int getPort() {
return "9051";
}
#Override
protected String getKeyspaceName() {
return "source_keyspace";
}
}
Second cassandra cluster config:
#Configuration
#EnableCassandraRepositories(
basePackageClasses = TargetRepository.class,
cassandraTemplateRef = "targetCassandraTemplate"
)
public class TargetCassandraConfig extends AbstractCassandraConfiguration {
#Override
public String getContactPoints() {
return "localhost";
}
#Override
public int getPort() {
return "9052";
}
#Override
protected String getKeyspaceName() {
return "target_keyspace";
}
#Override
#Bean("targetSession")
public CassandraSessionFactoryBean session() throws ClassNotFoundException {
final CassandraSessionFactoryBean session = super.session();
session.setKeyspaceName(getKeyspaceName());
session.setCluster(cluster().getObject());
return session;
}
#Override
public CassandraCqlClusterFactoryBean cluster() {
CassandraCqlClusterFactoryBean cluster = super.cluster();
cluster.setContactPoints(contactPoints);
cluster.setPort(port);
return cluster;
}
#Bean("targetCassandraTemplate")
public CassandraAdminOperations cassandraTemplate(
#Qualifier("targetSession") final CassandraSessionFactoryBean session) throws Exception {
return new CassandraAdminTemplate(session.getObject(), cassandraConverter());
}
}
I always see that only the first cluster node is getting added
com.datastax.driver.core.Cluster : New Cassandra host localhost/127.0.0.1:9051 added
What am I doing wrong?
I spent 2 days debugging this and 10 mins after posting this question, I found the fix :)
I wasn't using the cluster bean that I created properly in the session bean. So I did the following and it worked:
#Override
#Bean("targetCassandraCluster")
public CassandraCqlClusterFactoryBean cluster() {
CassandraCqlClusterFactoryBean cluster = super.cluster();
cluster.setContactPoints(contactPoints);
cluster.setPort(port);
return cluster;
}
#Bean("targetCassandraSession")
public CassandraSessionFactoryBean session(
#Qualifier("targetCassandraCluster") final CassandraCqlClusterFactoryBean cluster
) throws ClassNotFoundException {
final CassandraSessionFactoryBean session = super.session();
session.setKeyspaceName(getKeyspaceName());
session.setCluster(cluster.getObject());
return session;
}
#Bean("targetCassandraTemplate")
public CassandraAdminOperations cassandraTemplate(
#Qualifier("targetCassandraSession") final CassandraSessionFactoryBean session) throws Exception {
return new CassandraAdminTemplate(session.getObject(), cassandraConverter());
}

SFTP #Poller not triggering polling nothing happens

I am trying to set the spring boot application which will pool the csv . i do not see any activity happning in the spring boot application nor on filezilla SFTP server but if I change the same code to FTP then it works
#Component
#EnableIntegration
public class IntegrationConfiguration {
#Autowired
FTPConfigProperties ftpConfigProperties;
#Autowired
private BeanFactory beanFactory;
#Value("classpath:certificate.crt")
Resource certficateFile;
#Bean
public SessionFactory<ChannelSftp.LsEntry> ftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory();
factory.setHost("127.0.0.1");
factory.setPort(990);
factory.setUser("abhinav");
factory.setPassword("nssdw");
factory.setPrivateKey(certficateFile);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<ChannelSftp.LsEntry>(factory, 100000);
}
#Bean
public SftpInboundFileSynchronizer ftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(ftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setRemoteDirectory("/");
fileSynchronizer.setFilter(filter());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setPreserveTimestamp(true);
fileSynchronizer.setBeanFactory(beanFactory);
return fileSynchronizer;
}
//here the poller is configured
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(fixedDelay = "10000"))
public MessageSource<File> ftpMessageSource() throws Exception {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
ftpInboundFileSynchronizer());
source.setLocalDirectory(new File("ftp-inbound"));
source.setAutoCreateLocalDirectory(true);
source.setMaxFetchSize(1);
source.setBeanFactory(beanFactory);
source.setUseWatchService(true);
return source;
}
public CompositeFileListFilter<ChannelSftp.LsEntry> filter() {
CompositeFileListFilter<ChannelSftp.LsEntry> filter = new CompositeFileListFilter<ChannelSftp.LsEntry>();
filter.addFilter(new SftpSimplePatternFileListFilter("*.csv"));
filter.addFilter(acceptOnceFilter());
filter.addFilter(new LastModifiedLsEntryFileListFilter());
return filter;
}
#Bean
public SftpPersistentAcceptOnceFileListFilter acceptOnceFilter() {
SftpPersistentAcceptOnceFileListFilter filter = new SftpPersistentAcceptOnceFileListFilter(metadataStore(),"ftpPersistentAcceptOnce");
filter.setFlushOnUpdate(true);
return filter;
}
#Bean
public ConcurrentMetadataStore metadataStore() {
PropertiesPersistingMetadataStore propertiesPersistingMetadataStore = new PropertiesPersistingMetadataStore();
propertiesPersistingMetadataStore.setBaseDirectory("./metastore");
propertiesPersistingMetadataStore.setFileName("ftpStream.properties");
return propertiesPersistingMetadataStore;
}
#Bean
#ServiceActivator(inputChannel = "jobChannel", outputChannel = "nullChannel")
protected JobLaunchingMessageHandler launcher(JobLauncher jobLauncher) {
return new JobLaunchingMessageHandler(jobLauncher);
}
}
here the next call where I trigger the spring batch then it goes to service actuator
#Component
public class FileToJobTransformer implements ApplicationContextAware {
private ApplicationContext context;
#Autowired
private Job job;
#Transformer(inputChannel = "fromSftpChannel", outputChannel = "jobChannel")
public JobLaunchRequest transform(File aFile) throws Exception {
String fileName = aFile.getName();
JobParameters jobParameters = new JobParametersBuilder().addString(
"input.file", aFile.getAbsolutePath()).toJobParameters();
JobLaunchRequest request = new JobLaunchRequest(job, jobParameters);
return request;
}
#Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.context = applicationContext;
}
}
the custome code is as follow
public class LastModifiedLsEntryFileListFilter implements FileListFilter<ChannelSftp.LsEntry> {
private static final long DEFAULT_AGE = 60;
private volatile long age = DEFAULT_AGE;
public long getAge() {
return this.age;
}
public void setAge(long age) {
setAge(age, TimeUnit.SECONDS);
}
public void setAge(long age, TimeUnit unit) {
this.age = unit.toSeconds(age);
}
#Override
public List<ChannelSftp.LsEntry> filterFiles(ChannelSftp.LsEntry[] files) {
System.out.println("files = [" + files.length + "]");
List<ChannelSftp.LsEntry> list = new ArrayList<ChannelSftp.LsEntry>();
long now = System.currentTimeMillis() / 1000;
for (ChannelSftp.LsEntry file : files) {
if (file.getAttrs()
.isDir()) {
continue;
}
int lastModifiedTime = file.getAttrs()
.getMTime();
if (lastModifiedTime + this.age <= now) {
list.add(file);
}
}
Collections.reverse(list);
ArrayList<ChannelSftp.LsEntry> oneElementList = new ArrayList<ChannelSftp.LsEntry>(1) ;
oneElementList.add(list.get(0));
return oneElementList;
}
}

Spring 4.1 to 4.2 migrattion : Why the persistence does not work?

I used Spring 4.1.0 with Hibernate 4.3.6 and all is Ok.
After Spring migration to 4.2.8, the persistence does not work.
No exception, no trace the persist methode of entity manager is called but nothing in the database.
It's like if the transaction manager was not working.
this is my persistence configuration :
#Configuration
#EnableTransactionManagement
public class PersistenceConfiguration {
#Bean
public BasicDataSource driverManagerDataSource() {
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUrl("jdbc:mysql://localhost:3306/xxx");
dataSource.setUsername("root");
dataSource.setPassword("root");
dataSource.setValidationQuery("SELECT 1");
dataSource.setDefaultAutoCommit(false);
dataSource.setInitialSize(10);
dataSource.setMaxActive(20);
dataSource.setMaxIdle(10);
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean() {
final LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setDataSource(driverManagerDataSource());
localContainerEntityManagerFactoryBean.setPersistenceUnitName("xxxPersistenceUnitName");
localContainerEntityManagerFactoryBean.setPackagesToScan("org.xxx.model");
localContainerEntityManagerFactoryBean.setJpaVendorAdapter(new org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter());
final HashMap<String, String> map = new HashMap<>();
map.put("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
map.put("hibernate.hbm2ddl.auto", "update");
map.put("hibernate.show_sql", "false");
map.put("hibernate.format_sql", "false");
localContainerEntityManagerFactoryBean.setJpaPropertyMap(map);
localContainerEntityManagerFactoryBean.setJpaDialect(new org.springframework.orm.jpa.vendor.HibernateJpaDialect());
return localContainerEntityManagerFactoryBean;
}
#Bean
public JpaTransactionManager transactionManager() {
final JpaTransactionManager jpaTransactionManager = new JpaTransactionManager();
jpaTransactionManager.setEntityManagerFactory(localContainerEntityManagerFactoryBean().getNativeEntityManagerFactory());
return jpaTransactionManager;
}
Dependence injection :
#Configuration
#Import({PersistenceConfiguration.class, UserConfiguration.class, SecurityConfiguration.class})
#ComponentScan(basePackages = "org.xxx")
#EnableWebMvc
public class XxxProjectConfiguration {
private static Logger LOG = Logger.getLogger(XxxProjectConfiguration.class);
#Autowired
private Environment env;
#PostConstruct
public void initApp() {
LOG.debug("Looking for Spring profiles...");
if (env.getActiveProfiles().length == 0) {
LOG.info("No Spring profile configured, running with default configuration.");
} else {
for (String profile : env.getActiveProfiles()) {
LOG.info("Detected Spring profile: {}" + profile);
}
}
}
#Autowired
private UserConfiguration userConfiguration;
// DAO
#Bean
public RelationshipDAO relationshipDAO() {
return new RelationshipDAOImpl();
}
#Bean
public RelationshipStatusDAO relationshipStatusDAO() {
return new RelationshipStatusDAOImpl();
}
#Bean
public MessageDAO messageDAO() {
return new MessageDAOImpl();
}
// Services
#Bean
public UserServiceImpl userService() {
return new UserServiceImpl(userConfiguration.userDAO(), relationshipDAO(), relationshipStatusDAO(), messageDAO());
}
}
And
#Configuration
#Import(PersistenceConfiguration.class)
public class UserConfiguration {
#Bean
public UserDAO userDAO() {
return new UserDAOImpl();
}
}
The service :
#Transactional(propagation=Propagation.SUPPORTS)
public class UserServiceImpl implements UserService, Serializable {
private static final long serialVersionUID = 1L;
private UserDAO userDAO;
private RelationshipDAO relationshipDAO;
private RelationshipStatusDAO relationshipStatusDAO;
private MessageDAO messageDAO;
public UserServiceImpl(final UserDAO userDAO, final RelationshipDAO relationshipDAO, final RelationshipStatusDAO relationshipStatusDAO, final MessageDAO messageDAO) {
this.userDAO = userDAO;
this.relationshipDAO = relationshipDAO;
this.relationshipStatusDAO = relationshipStatusDAO;
this.messageDAO = messageDAO;
}
#Override
#Transactional(propagation = Propagation.REQUIRED, rollbackFor = UserServiceException.class)
public RelationshipStatus wantsRelationship(final long fromUserId, final long toUserId) throws UserServiceException {
try {
final Relationship relationship = new Relationship(new Date());
User fromUser = userDAO.get(fromUserId);
User toUser = new User(toUserId);
relationship.getUsers().add(fromUser);
fromUser.getRelationships().add(relationship);
relationship.getUsers().add(toUser);
toUser.getRelationships().add(relationship);
relationship.setWantsFromUserId(fromUserId);
final Message message = new Message(fromUserId, "Hi ! My name is " + fromUser.getFirstName() + ", I want to meet you");
relationship.getMessages().add(message);
relationship.setStatus(new RelationshipStatus(Status.WANTS));
relationshipDAO.persist(relationship);
return relationship.getStatus();
} catch (Exception e) {
throw new UserServiceException(e);
}
}
...
}
I do not understand anything...
The missing code is :
#Bean
public PlatformTransactionManager transactionManager(EntityManagerFactory emf){
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}

Spring data support to create tables using java entities

I am using Spring data Cassandra, to connect with Cassandra database, with configuration file extending AbstractCassandraConfiguration and overriding functions -
#Override
public SchemaAction getSchemaAction() {
return SchemaAction.RECREATE_DROP_UNUSED;
}
#Override
public String[] getEntityBasePackages() {
return new String[] {"com.example"};
}
My aim is to create tables automatically in Cassandra from the mentioned entities in com.example package with #Table annotation.
For example -
package com.example;
import org.springframework.data.cassandra.mapping.PrimaryKey;
import org.springframework.data.cassandra.mapping.Table;
#Table(value="goal")
public class Goal {
#PrimaryKey
private int id;
private String description;
public Goal(int id, String description) {
this.id = id;
this.description = description;
}
public Goal() {
}
public int getId() {
return id;
}
public String getDescription() {
return description;
}
public void setId(int id) {
this.id = id;
}
public void setDescription(String description) {
this.description = description;
}
#Override
public String toString() {
return "Goals [id=" + id + ", description=" + description + "]";
}
}
For this entity, with the given configuration, one table should get created during spring initialization, But it fails to do so.
No exception though, It just doesn't create anything in Cassandra.
Any help would be appreciated. Thanks.
I also faced similar requirement and following works for me:-
#Bean
public CassandraClusterFactoryBean cluster() {
CassandraClusterFactoryBean cluster = new CassandraClusterFactoryBean();
cluster.setContactPoints("127.0.0.1");
cluster.setPort(9042);
return cluster;
}
public String[] getEntityBasePackages() {
return new String[] { "com.user.entity" };
}
protected String getKeyspaceName() {
return "user_db";
}
public SchemaAction getSchemaAction() {
return SchemaAction.CREATE_IF_NOT_EXISTS;
}
Once your context is up, you can check the table in cassandra.
It is assumed your keyspace is already created in cassandra.
Please inform if you also want to create keyspace on the fly.

Why Cassandra throwing com.datastax.driver.core.exceptions.InvalidQueryException: Multiple definitions found for column

Context:
I am running a jUnit test in eclipse by using embedded Cassandra to test my DAO class which is using an Astyanax client configured for JavaDriver. When DAO object instance insert into Cassandra I am getting this exception com.datastax.driver.core.exceptions.InvalidQueryException: Multiple definitions found for column ..columnname
TestClass
public class LeaderBoardDaoTest {
private static LeaderBoardDao dao;
public static CassandraCQLUnit cassandraCQLUnit;
private String hostIp = "127.0.0.1";
private int port = 9142;
public Session session;
public Cluster cluster;
#BeforeClass
public static void startCassandra() throws IOException, TTransportException, ConfigurationException, InterruptedException {
System.setProperty("archaius.deployment.applicationId", "leaderboardapi");
System.setProperty("archaius.deployment.environment", "test");
EmbeddedCassandraServerHelper.startEmbeddedCassandra("cassandra.yaml");
// cassandraCQLUnit = new CassandraCQLUnit(new
// ClassPathCQLDataSet("simple.cql", "lbapi"), "cassandra.yaml");
Injector injector = Guice.createInjector(new TestModule());
dao = injector.getInstance(LeaderBoardDao.class);
}
#Before
public void load() {
cluster = new Cluster.Builder().withClusterName("leaderboardcassandra").addContactPoints(hostIp).withPort(port).build();
session = cluster.connect();
CQLDataLoader dataLoader = new CQLDataLoader(session);
dataLoader.load(new ClassPathCQLDataSet("simple.cql", "lbapi"));
session = dataLoader.getSession();
}
#Test
public void test() {
ResultSet result = session.execute("select * from mytable WHERE id='myKey01'");
Assert.assertEquals(result.iterator().next().getString("value"), "myValue01");
}
#Test
public void testInsert() {
LeaderBoard lb = new LeaderBoard();
lb.setName("name-1");
lb.setDescription("description-1");
lb.setActivityType(ActivityType.FUEL);
lb.setImage("http:/");
lb.setLbId(UUID.fromString("3F2504E0-4F89-41D3-9A0C-0305E82C3301"));
lb.setStartTime(new Date());
lb.setEndTime(new Date());
dao.insert(lb);
ResultSet resultSet = session.execute("select * from leaderboards WHERE leaderboardid='3F2504E0-4F89-41D3-9A0C-0305E82C3301'");
}
#After
public void clearCassandra() {
EmbeddedCassandraServerHelper.cleanEmbeddedCassandra();
}
#AfterClass
public static void stopCassandra() {
EmbeddedCassandraServerHelper.stopEmbeddedCassandra();
}
}
Class under test
#Singleton
public class LeaderBoardDao {
private static final Logger log = LoggerFactory.getLogger(LeaderBoardDao.class);
#Inject
private AstyanaxMutationsJavaDriverClient client;
private static final String END_TIME = "end_time";
private static final String START_TIME = "start_time";
private static final String IMAGE = "image";
private static final String ACTIVITY_TYPE = "activity_type";
private static final String DESCRIPTION = "description";
private static final String NAME = "name";
private static final String LEADERBOARD_ID = "leaderboardID";
private static final String COLUMN_FAMILY_NAME = "leaderboards";
private ColumnFamily<UUID, String> cf;
public LeaderBoardDao() throws ConnectionException {
cf = ColumnFamily.newColumnFamily(COLUMN_FAMILY_NAME, UUIDSerializer.get(), StringSerializer.get());
}
/**
* Writes the Leaderboard to the database.
*
* #param lb
*/
public void insert(LeaderBoard lb) {
try {
MutationBatch m = client.getKeyspace().prepareMutationBatch();
cf.describe(client.getKeyspace());
m.withRow(cf, lb.getLbId()).putColumn(LEADERBOARD_ID, UUIDUtil.asByteArray(lb.getLbId()), null).putColumn(NAME, lb.getName(), null).putColumn(DESCRIPTION, lb.getDescription(), null)
.putColumn(ACTIVITY_TYPE, lb.getActivityType().name(), null).putColumn(IMAGE, lb.getImage()).putColumn(START_TIME, lb.getStartTime()).putColumn(END_TIME, lb.getEndTime());
m.execute();
} catch (ConnectionException e) {
Throwables.propagate(e);
}
}
/**
* Reads leaderboard from database
*
* #param id
* #return {#link LeaderBoard}
*/
public LeaderBoard read(UUID id) {
OperationResult<ColumnList<String>> result;
LeaderBoard lb = null;
try {
result = client.getKeyspace().prepareQuery(cf).getKey(id).execute();
ColumnList<String> cols = result.getResult();
if (!cols.isEmpty()) {
lb = new LeaderBoard();
lb.setLbId(cols.getUUIDValue(LEADERBOARD_ID, null));
lb.setName(cols.getStringValue(NAME, null));
lb.setActivityType(ActivityType.valueOf(cols.getStringValue(ACTIVITY_TYPE, null)));
lb.setDescription(cols.getStringValue(DESCRIPTION, null));
lb.setEndTime(cols.getDateValue(END_TIME, null));
lb.setStartTime(cols.getDateValue(START_TIME, null));
lb.setImage(cols.getStringValue(IMAGE, null));
} else {
log.warn("read: is empty: no record found for " + id);
}
return lb;
} catch (ConnectionException e) {
log.error("failed to read from C*", e);
throw new RuntimeException("failed to read from C*", e);
}
}
}
When the Java driver throws an InvalidQueryException, it's rethrowing an error from Cassandra. The error "Multiple definitions found for column..." indicates that a column is mentioned more than once in an update statement. You can simulate it in cqlsh:
cqlsh> create table test(i int primary key);
cqlsh> insert into test (i, i) values (1, 2);
code=2200 [Invalid query] message="Multiple definitions found for column i"
I'm not familiar with Astyanax, but my guess is that it already adds the id to the query when you call withRow, so you don't need to add it again with putColumn. Try removing that call (second line in reformatted sample below):
m.withRow(cf, lb.getLbId())
.putColumn(LEADERBOARD_ID, UUIDUtil.asByteArray(lb.getLbId()), null)
... // other putColumn calls

Resources