How do I insert into a table with a frozen type with the Java driver? - cassandra

I am trying to write a query to insert data in the table "users". How do I write the query to insert into users? And how do i write the code in Java
CREATE TYPE address (
street text,
city text,
zip int
);
CREATE TABLE users (
login text PRIMARY KEY,
first_name text,
last_name text,
email text,
addresses map<text, address>
);
And how do I write the code below correctly in java?
public void insertHotels(String login, String first_name, String last_name, String email, addresses) {
Cluster cluster = Cluster.builder()
.addContactPoints(serverIP)
.build();
try (Session session = cluster.connect("hotel_db")) {
String insert_query = "INSERT INTO users (login, first_name, last_name, email, address) VALUES (?, ?, ?, ?, ?);"
PreparedStatement pStatement = session.prepare(insert_query);
BoundStatement bStatement = pStatement.bind(login, first_name, last_name, email, address);
session.execute(bStatement);
}
}

I'm assuming you're using 4.15.0 version of Java driver here. I'm using DataStax Astra DB for my demonstration.
The table looks like,
CREATE TABLE payloadtest.users (
login text PRIMARY KEY,
addresses map<text, frozen<address>>,
email text,
first_name text,
last_name text
)...;
Your code will look like,
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.cql.BoundStatement;
import com.datastax.oss.driver.api.core.cql.PreparedStatement;
import com.datastax.oss.driver.api.core.data.UdtValue;
import com.datastax.oss.driver.api.core.type.UserDefinedType;
public class UdtValueExample {
private static final String KEYSPACE = "payloadtest";
private static final String UDT_ADDRESSES = "addresses";
public static void main(String... args) {
// Create the CqlSession object:
try (CqlSession session = CqlSession.builder()
.withCloudSecureConnectBundle(Paths.get("./src/main/resources/secure-connect-payloadtest.zip"))
//Org Admin Role
.withAuthCredentials("CHANGE_ME", "AstraCS:CHANGE_ME")
.build()) {
PreparedStatement ps = session.prepare("INSERT INTO payloadtest.users(login,first_name,last_name,email,addresses) VALUES (?,?,?,?,?)");
// User define type
UserDefinedType addressesUdt = session.getMetadata()
.getKeyspace(KEYSPACE)
.flatMap(ks -> ks.getUserDefinedType("address"))
.orElseThrow(() -> new IllegalArgumentException("Missing UDT definition"));
UdtValue addressUdtValue = addressesUdt.newValue("some_street", "some_city", 12345);
Map<String, UdtValue> addressesMap = new HashMap<>();
addressesMap.put("key1", addressUdtValue);
BoundStatement bs =
ps.boundStatementBuilder()
.setString("login", "some_login")
.setString("email", "some#email.com")
.setString("first_name", "first")
.setString("last_name", "last")
.setMap(UDT_ADDRESSES, addressesMap, String.class, UdtValue.class)
.build();
session.execute(bs);
}
System.out.println("COMPLETED");
}
}
and results would look like,
token#cqlsh:payloadtest> select * from users;
login | addresses | email | first_name | last_name
------------+------------------------------------------------------------------+----------------+------------+-----------
some_login | {'key1': {street: 'some_street', city: 'some_city', zip: 12345}} | some#email.com | first | last
(1 rows)
Read References:
Documentation
Example repo

Related

Cassandra throwing CodecNotFoundException [bigint <-> java.util.Date]

I have a java entity that has a attribute of Date type and I have a database table which stores the date attribute to bigint cloumn but when I run the code it gives me this error:
com.datastax.driver.core.exceptions.CodecNotFoundException: Codec not found for requested operation: [bigint <-> java.util.Date]
Can you please help me with the exception cassandra is throwing and solution for that?
You are inserting java.util.Date to bigint column, that's why you are getting this error.
Use getTime() method to get times in milliseconds, which is long to inset into bigint column.
Example :
Date date = ; // you have the date
long timeInMilis = date.getTime();
Use timeInMilis to insert into cassandra
or
you can change the column type bigint to timestamp, then you can insert java.util.Date directly, don't have to get times in milliseconds,
-------------------------------------
| CQL3 data type | Java type |
|-------------------|----------------|
| bigint | long |
|-------------------|----------------|
| timestamp | java.util.Date |
--------------------------------------
More on CQL - Java Mapping : https://docs.datastax.com/en/developer/java-driver/3.1/manual/#cql-to-java-type-mapping
I believe the issue is that you are attempting to store a java.util.Date object in a cql bigint. The type that maps to bigint in the java driver is a long (see 'CQL to Java type mapping' section of the docs).
Assuming you mean to store the epoch milliseconds in this column you have a few options.
Change the column type to timestamp which maps to java.util.Date (and is set/accessed via setTiemstamp/getTimstamp).
Use setLong in conjunction with Date.getTime() to convert the Date to a long representing epoch milliseconds.
Create and register a custom codec that maps java.util.Date to bigint, i.e.:
import com.datastax.driver.core.*;
import java.util.Date;
public class CodecTest {
static class DateToBigintCodec extends MappingCodec<Date, Long> {
DateToBigintCodec() {
// creates a mapping from bigint <-> Date.
super(TypeCodec.bigint(), Date.class);
}
#Override
protected Date deserialize(Long value) {
return new Date(value);
}
#Override
protected Long serialize(Date value) {
return value.getTime();
}
}
public static void main(String args[]) {
TypeCodec<Date> codec = new DateToBigintCodec();
Cluster cluster = Cluster.builder().addContactPoint("127.0.0.1").build();
try {
// register custom codec
cluster.getConfiguration().getCodecRegistry().register(codec);
Date date = new Date();
Session session = cluster.connect();
// insert Date value into column v, which is a bigint.
// schema:
// CREATE TABLE simple.tbl (k int PRIMARY KEY, v bigint)
PreparedStatement prepared = session.prepare("insert into simple.tbl (k, v) values (?, ?)");
BoundStatement bound = prepared.bind();
bound.setInt("k", 0);
bound.setTimestamp("v", date);
session.execute(bound);
// Retrieve column v as a Date.
Row row = session.execute("select v from simple.tbl").one();
System.out.println(row.getTimestamp("v"));
} finally {
cluster.close();
}
}
}
I have a tried and tested solution to this problem
Following is the code to insert or retrieve timestamp in Cassandra
import java.sql.Timestamp;
import com.datastax.driver.core.Session;
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Timestamp timeStamp= Timestamp.valueOf(df.format(new Date()));
PreparedStatement ps = session.prepare("SELECT id,time from keySpace.tableName where
timestamp >=? order by timestamp allow filtering");
BoundStatement bs = ps.bind(timeStamp);
ResultSet rs = session.execute(bs);
if (rs != null) {
for (Row row : rs) {
row.getTimestamp(time);
row.getString(id);
}}
Note:- session.execute(bs), if you guys want to know from where this session came refer the following link
How to connect Cassandra using Java class
https://stackoverflow.com/a/16871984/9292502

Cassandra UDT mapping issue

I am not able to map data which has UDT type.
The table is definition is the following.
CREATE TABLE IF NOT EXISTS members_data.Test(
priority int,
name text,
test_links list<frozen<TestLinks>>,
PRIMARY KEY(name)
);
The model is the following.
#JsonAutoDetect
#JsonSerialize
#Table(keyspace="members_data", caseSensitiveKeyspace=false, caseSensitiveTable=false, name="Test")
public class Test{
#Column(name="name", caseSensitive=false)
private String name;
#Column(name="priority", caseSensitive=false)
private int priority;
#Frozen
#Column(name="test_links", caseSensitive=false)
private List<TestLinks> test_links;
#JsonAutoDetect
#JsonSerialize
#UDT(keyspace = "members_data", name = "Testlinks")
public class TestLinks {
#Field(name = "test_link")
private String test_link;
#Field(name = "link_title")
private String link_title;
The mapper usage.
MappingManager manager = new MappingManager(sessionManager.getSession());
manager.udtCodec(TestLinks.class);
Mapper<Test> mapper = manager.mapper(Test.class);
Result<Test> result = mapper.map(testResultSet);
test = result.one(); //test object would be null here
the cassandra-driver-mapping is 3.1.0.
Mapper is not throwing any error and now even mapping data to model. Could someone tell me what is the problem?

How to LeftJoin to the same table twice using ServiceStack OrmLite?

I have table structures that look like below:
table Tenant: Id[PK], etc
table Contact: Id[PK], FirstName, LastName etc
table Sale: Id[PK], TenantId[FK], SellerId[FK], BuyerId[FK], etc
SellerId is a FK to Contact.Id
BuyerId is a FK to Contact.Id
TenantId is a FK to Tenant.Id
I want to use OrmLite to generate SQL similar to below:
select sale.*
, buyer.FirstName 'BuyerFirstName'
, buyer.LastName 'BuyerLastName'
, seller.FirstName 'SellerFirstName'
, seller.LastName 'SellerLastName'
from sale
left join
contact seller
on sale.SellerId = seller.Id
left join
contact buyer
on sale.BuyerId = buyer.Id
where tenantId = 'guid' -- this is being filtered at a global level
Because I want to have a strongly typed global filter to filter out result by tenantId (on database side) I have code looks like below
public List<TOut> Exec<TIn, TOut>(SqlExpression<TIn> exp) where TIn : IHaveTenantId
{
exp.Where(x => x.TenantId == _tenantId);
return _conn.Select<TOut>(exp);
}
The poco of Sale looks like below:
public class Sale : IHaveTenantId
{
public Guid Id { get; set; }
[ForeignKey(typeof(Contact), OnDelete = "CASCADE")]
public Guid BuyerId { get; set; }
[ForeignKey(typeof(Contact), OnDelete = "CASCADE")]
public Guid SellerId { get; set; }
//etc
}
And I'm trying to use strongly typed LeftJoin syntax like below:
public class SaleView
{
public Guid Id { get; set; }
public string BuyerFirstName { get; set; }
public string SellerLastName { get; set; }
//etc
}
var result = Exec<SaleView, Sale>(_conn
.From<Sale>()
.LeftJoin<Contact>((sale, seller) => sale.SellerId == seller.Id)
.LeftJoin<Contact>((sale, buyer) => sale.BuyerId == buyer.Id));
I couldn't figure out how to join the same table multiple times and have an alias per join (e.g. left join contact as 'seller', hence I can select seller.FirstName, buyer.FirstName) and I don't want to use parameterised raw sql.
Is this possible at all with OrmLite?
Support for typed JOIN aliases were added in v4.0.62, e.g:
var q = db.From<Sale>()
.LeftJoin<ContactIssue>((s,c) => s.SellerId == c.Id, db.JoinAlias("seller"))
.LeftJoin<ContactIssue>((s,c) => s.BuyerId == c.Id, db.JoinAlias("buyer"))
.Select<Sale, ContactIssue>((s,c) => new {
s,
BuyerFirstName = Sql.JoinAlias(c.FirstName, "buyer"),
BuyerLastName = Sql.JoinAlias(c.LastName, "buyer"),
SellerFirstName = Sql.JoinAlias(c.FirstName, "seller"),
SellerLastName = Sql.JoinAlias(c.LastName, "seller"),
});
Prior to v4.0.62 you can continue to use a Typed SqlExpression with custom SQL for this, e.g:
var q = db.From<Sale>()
.CustomJoin("LEFT JOIN Contact seller ON (Sale.SellerId = seller.Id)")
.CustomJoin("LEFT JOIN Contact buyer ON (Sale.BuyerId = buyer.Id)")
.Select(#"Sale.*
, buyer.FirstName AS BuyerFirstName
, buyer.LastName AS BuyerLastName
, seller.FirstName AS SellerFirstName
, seller.LastName AS SellerLastName");
The benefit of which is that it still leaves a Typed API where you can add additional filters like a global TenantId filter, e.g:
q.Where(x => x.TenantId == tenantId);
And then project it into your Custom POCO with:
var sales = db.Select<SaleView>(q);
The new CustomJoin API is available from v4.0.37+ that's now available on MyGet.

How to use static columns from astyanax

Is there a way to create and use following column family with Astyanax:
CREATE TABLE mytest ( id text, subid text, clustering text, static_value text static, value text, PRIMARY KEY((id, subid), clustering));
If not, what are the best options for static columns?
The Astyanax Getting Started section contains a section on how to ensure proper serialization by annotating key fields with the "ordinal" keyword:
// Annotated composite class
Class SessionEvent{
private #Component(ordinal=0) String sessiondId;
private #Component(ordinal=1) UUID timestamp;
public SessionEvent() {
}
public int hashCode() { ... }
public boolean equals(Object o) { ... }
public int compareTo(Object o) { ... }
}
Otherwise, the Astyanax repo also has an example showing how to work directly with CQL3. To create your CF:
String CREATE_STATEMENT = "CREATE TABLE mytest ( id text, subid text, clustering text, static_value text static, value text, PRIMARY KEY((id, subid), clustering))";
try {
#SuppressWarnings("unused")
OperationResult<CqlResult<Integer, String>> result = keyspace
.prepareQuery(EMP_CF)
.withCql(CREATE_STATEMENT)
.execute();
} catch (ConnectionException e) {
logger.error("failed to create CF", e);
throw new RuntimeException("failed to create CF", e);
}
The (CQL3) link above also contains example methods that demonstrate reading and inserting as well.

How to execute ranged query in cassandra with astyanax and composite column

I am developing a blog using cassandra and astyanax. It is only an exercise of course.
I have modelled the CF_POST_INFO column family in this way:
private static class PostAttribute {
#Component(ordinal = 0)
UUID postId;
#Component(ordinal = 1)
String category;
#Component
String name;
public PostAttribute() {}
private PostAttribute(UUID postId, String category, String name) {
this.postId = postId;
this.category = category;
this.name = name;
}
public static PostAttribute of(UUID postId, String category, String name) {
return new PostAttribute(postId, category, name);
}
}
private static AnnotatedCompositeSerializer<PostAttribute> postSerializer = new AnnotatedCompositeSerializer<>(PostAttribute.class);
private static final ColumnFamily<String, PostAttribute> CF_POST_INFO =
ColumnFamily.newColumnFamily("post_info", StringSerializer.get(), postSerializer);
And a post is saved in this way:
MutationBatch m = keyspace().prepareMutationBatch();
ColumnListMutation<PostAttribute> clm = m.withRow(CF_POST_INFO, "posts")
.putColumn(PostAttribute.of(post.getId(), "author", "id"), post.getAuthor().getId().get())
.putColumn(PostAttribute.of(post.getId(), "author", "name"), post.getAuthor().getName())
.putColumn(PostAttribute.of(post.getId(), "meta", "title"), post.getTitle())
.putColumn(PostAttribute.of(post.getId(), "meta", "pubDate"), post.getPublishingDate().toDate());
for(String tag : post.getTags()) {
clm.putColumn(PostAttribute.of(post.getId(), "tags", tag), (String) null);
}
for(String category : post.getCategories()) {
clm.putColumn(PostAttribute.of(post.getId(), "categories", category), (String)null);
}
the idea is to have some row like bucket of some time (one row per month or year for example).
Now if I want to get the last 5 posts for example, how can I do a rage query for that? I can execute a rage query based on the post id (UUID) but I don't know the available post ids without doing another query to get them. What are the cassandra best practice here?
Any suggestion about the data model is welcome of course, I'm very newbie to cassandra.
If your use case works the way I think it works you could modify your PostAttribute so that the first component is a TimeUUID that way you can store it as time series data and you'd easily be able to pull the oldest 5 or newest 5 using the standard techniques. Anyway...here's a sample of what it would look like to me since you don't really need to make multiple columns if you're already using composites.
public class PostInfo {
#Component(ordinal = 0)
protected UUID timeUuid;
#Component(ordinal = 1)
protected UUID postId;
#Component(ordinal = 2)
protected String category;
#Component(ordinal = 3)
protected String name;
#Component(ordinal = 4)
protected UUID authorId;
#Component(ordinal = 5)
protected String authorName;
#Component(ordinal = 6)
protected String title;
#Component(ordinal = 7)
protected Date published;
public PostInfo() {}
private PostInfo(final UUID postId, final String category, final String name, final UUID authorId, final String authorName, final String title, final Date published) {
this.timeUuid = TimeUUIDUtils.getUniqueTimeUUIDinMillis();
this.postId = postId;
this.category = category;
this.name = name;
this.authorId = authorId;
this.authorName = authorName;
this.title = title;
this.published = published;
}
public static PostInfo of(final UUID postId, final String category, final String name, final UUID authorId, final String authorName, final String title, final Date published) {
return new PostInfo(postId, category, name, authorId, authorName, title, published);
}
}
private static AnnotatedCompositeSerializer<PostInfo> postInfoSerializer = new AnnotatedCompositeSerializer<>(PostInfo.class);
private static final ColumnFamily<String, PostInfo> CF_POSTS_TIMELINE =
ColumnFamily.newColumnFamily("post_info", StringSerializer.get(), postInfoSerializer);
You should save it like this:
MutationBatch m = keyspace().prepareMutationBatch();
ColumnListMutation<PostInfo> clm = m.withRow(CF_POSTS_TIMELINE, "all" /* or whatever makes sense for you such as year or month or whatever */)
.putColumn(PostInfo.of(post.getId(), post.getCategory(), post.getName(), post.getAuthor().getId(), post.getAuthor().getName(), post.getTitle(), post.getPublishedOn()), /* maybe just null bytes as column value */)
m.execute();
Then you could query like this:
OperationResult<ColumnList<PostInfo>> result = getKeyspace()
.prepareQuery(CF_POSTS_TIMELINE)
.getKey("all" /* or whatever makes sense like month, year, etc */)
.withColumnRange(new RangeBuilder()
.setLimit(5)
.setReversed(true)
.build())
.execute();
ColumnList<PostInfo> columns = result.getResult();
for (Column<PostInfo> column : columns) {
// do what you need here
}

Resources