I followed the example given in the docs but the following fails with Codec not found for requested operation: [varchar <-> java.util.UUID]. How is one supposed to provide a custom Cassandra codec with Quill?
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import io.getquill.{CassandraAsyncContext, LowerCase, MappedEncoding}
import scala.concurrent.duration._
object Main extends App {
case class MyOrder(id: String, uuid: UUID)
trait OrderRepository {
implicit val encodeUUID: MappedEncoding[UUID, String] = MappedEncoding[UUID, String](_.toString)
implicit val decodeUUID: MappedEncoding[String, UUID] = MappedEncoding[String, UUID](UUID.fromString)
def list(): Future[Iterable[MyOrder]]
def get(id: String): Future[Option[MyOrder]]
}
class OrderRepositoryImpl(db: CassandraAsyncContext[LowerCase]) extends OrderRepository {
import db._
override def list(): Future[Iterable[MyOrder]] = {
run(quote(query[MyOrder]))
}
override def get(id: String): Future[Option[MyOrder]] = {
val q = quote {
(id: String) =>
query[MyOrder].filter(_.id == id)
}
val res = run(q(lift(id.toString)))
res.map(_.headOption)
}
}
val db = new CassandraAsyncContext[LowerCase]("db")
val repo = new OrderRepositoryImpl(db)
val maybeOrder = Await.result(repo.get("id1"), 2 seconds)
println(s"maybeOrder $maybeOrder")
}
and the Cassandra ddl:
CREATE TABLE myorder (
id text,
uuid text,
PRIMARY KEY (id)
);
INSERT INTO myorder (
id,
uuid
) values (
'id1',
'a8a8a416-2436-4e18-82b3-d5881c8fec1a'
);
Obviously I can just create a class whose uuid field is of type String but the goal here is to figure out how to use custom decoders.
Related
I have tesktask to create a weather app that show weather in users city be GeoLocation.
I use stack of technologies : Dagger,Room, Retrofit,Coroutines.
I have error in dao class.
I create db , because , I want that my should remember state of weather when I want to search city(Not realized yet)
Idk how to fix it , so hope you'll help me.
package di
import Repository.ForecastRepositoryImpl
import android.content.Context
import android.os.Build
import androidx.annotation.RequiresApi
import androidx.room.Room
import com.google.android.gms.location.FusedLocationProviderClient
import const.URL_CURRENT
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import db.CurrentWeatherDao
import db.ForecastDb
import db.FutureWeatherDao
import locationprovider.LocationProvider
import locationprovider.LocationProviderImpl
import okhttp3.OkHttpClient
import retrofit.*
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import viewmodel.WeatherForeCastViewModels.FactoryCurrentVM
import viewmodel.WeatherForeCastViewModels.ViewmodelFutureWeather.FutureListViewModelFactory
import javax.inject.Singleton
#Module
#InstallIn(SingletonComponent::class)
object WeatherModule {
#Provides
#Singleton
fun providedaocurrent(appdatabase:ForecastDb): CurrentWeatherDao {
return appdatabase.currentWeatherDao()
}
#RequiresApi(Build.VERSION_CODES.M)
#Provides
#Singleton
fun providesOkHttpClient(connectivityInterceptorImpl:
ConnectivityInterceptorImpl) =
OkHttpClient
.Builder()
.addInterceptor(connectivityInterceptorImpl)
.build()
#Provides
#Singleton
fun provideRetrofit(okHttpClient: OkHttpClient) = Retrofit.Builder()
.addConverterFactory(GsonConverterFactory.create())
.baseUrl(URL_CURRENT)
.client(okHttpClient)
.build()
#Provides
#Singleton
fun provideWeatherApiService(retrofit: Retrofit) =
retrofit.create(WeatherServiceAPI::class.java)
#Provides
#Singleton
fun provideweathernetworkdatasource
(weatherNetworkDataSource: WeatherNetworkDataSource):
WeatherNetworkDataSource{
return weatherNetworkDataSource
}
#Provides
#Singleton
fun providerepository(repositoryImpl: ForecastRepositoryImpl):
ForecastRepositoryImpl {
return repositoryImpl
}
#Provides
#Singleton
fun providevm(vm:FactoryCurrentVM):FactoryCurrentVM{
return vm
}
#Provides
#Singleton
fun providelocationprovider(locationProviderImpl: LocationProviderImpl):
LocationProvider{
return locationProviderImpl
}
#Provides
#Singleton
fun provideforecastdb(#ApplicationContext appContext:Context):
ForecastDb{
return Room.databaseBuilder(
appContext,
ForecastDb::class.java,
"forecast.db"
).build()
}
#Provides
#Singleton
fun providefusedlocationproviderclient(fusedLocationProviderClient:
FusedLocationProviderClient):
FusedLocationProviderClient{
return fusedLocationProviderClient
}
#Provides
#Singleton
fun providefutureweatherdao(#ApplicationContext appdb:ForecastDb):
FutureWeatherDao {
return appdb.futureweatherdao()
}
#Provides
#Singleton
fun providevmfactoryfuture(vm:FutureListViewModelFactory):FutureListViewModelFactory{
return vm
}
}
package db
import android.content.Context
import androidx.room.*
import db.entities.Current
import db.entities.FutureWeatherEntry
import db.entities.WeatherLocation
#Database(entities = [Current::class,WeatherLocation::class,FutureWeatherEntry::class], version = 1)
#TypeConverters(LocaleDateConverter::class)
abstract class ForecastDb : RoomDatabase() {
abstract fun futureweatherdao():FutureWeatherDao
abstract fun currentWeatherDao():CurrentWeatherDao
abstract fun weatherLocationDao():WeatherLocationDao
companion object{
#Volatile private var instance:ForecastDb? = null
private val lock= Any()
operator fun invoke(context: Context) = instance ?: synchronized(lock){
instance ?: builddb(context)
}
private fun builddb(context: Context)=
Room.databaseBuilder(context.applicationContext,RoomDatabase::class.java,"forecast.db").build()
}
}
package db
import androidx.lifecycle.LiveData
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import db.entities.FutureWeatherEntry
import db.unitlocalized.future.MetricSimpleFutureWeatherEntry
import java.time.LocalDate
#Dao
interface FutureWeatherDao {
#Insert(onConflict = OnConflictStrategy.REPLACE)
fun insert(futureweatherEntries:List<FutureWeatherEntry>)
#Query("select * from future_weather where date >=:startDate")
fun getsimpleweatherforecastmetric(startDate: LocalDate,) :
LiveData<List<MetricSimpleFutureWeatherEntry>> // error on this line
#Query("select * from future_weather where date >=:startDate")
fun countfutureweather(startDate: LocalDate):Int
#Query("delete from future_weather where date < :firstDateToKeep")
fun deleteOldEntries(firstDateToKeep:LocalDate)
}
#Dao
interface CurrentWeatherDao {
#Insert(onConflict = OnConflictStrategy.REPLACE )
fun upsert(weather: Current)
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather> // error here
}
const val Current_Weather_id = 0
#Entity(tableName = "current_weather")
data class Current(
val location:String,
val cloud: Int,
#Embedded(prefix = "condition_")
val condition: Condition,
#SerializedName("feelslike_c")
val feelslikeC: Double,
#SerializedName("feelslike_f")
val feelslikeF: Double,
#SerializedName("gust_kph")
val gustKph: Double,
#SerializedName("gust_mph")
val gustMph: Double,
val humidity: Int,
#SerializedName("is_day")
val isDay: Int,
#SerializedName("last_updated")
val lastUpdated: String,
#SerializedName("last_updated_epoch")
val lastUpdatedEpoch: Int,
#SerializedName("precip_in")
val precipIn: Double,
#SerializedName("precip_mm")
val precipMm: Double,
#SerializedName("pressure_in")
val pressureIn: Double,
#SerializedName("pressure_mb")
val pressureMb: Double,
#SerializedName("temp_c")
val tempC: Double,
#SerializedName("temp_f")
val tempF: Double,
val uv: Double,
#SerializedName("vis_km")
val visKm: Double,
#SerializedName("vis_miles")
val visMiles: Double,
#SerializedName("wind_degree")
val windDegree: Int,
#SerializedName("wind_dir")
val windDir: String,
#SerializedName("wind_kph")
val windKph: Double,
#SerializedName("wind_mph")
val windMph: Double
) {
#PrimaryKey
var id : Int = Current_Weather_id
}
#Entity(tableName = "future_weather", indices = [Index(value = ["date"], unique = true )])
data class FutureWeatherEntry(
#PrimaryKey(autoGenerate = false)
val id: Int? = null,
val date: String,
#Embedded
val day: Day
)
data class CurrentWeatherResponse(
#SerializedName("current")
val current: Current,
val location: WeatherLocation
)
data class Day(
val avghumidity: Double,
#SerializedName("avgtemp_c")
val avgtempC: Double,
#SerializedName("avgtemp_f")
val avgtempF: Double,
#SerializedName("avgvis_km")
val avgvisKm: Double,
#SerializedName("avgvis_miles")
val avgvisMiles: Double,
#Embedded(prefix = "condition_")
val condition: Condition,
#SerializedName("daily_chance_of_rain")
val dailyChanceOfRain: Int,
#SerializedName("daily_chance_of_snow")
val dailyChanceOfSnow: Int,
#SerializedName("daily_will_it_rain")
val dailyWillItRain: Int,
#SerializedName("daily_will_it_snow")
val dailyWillItSnow: Int,
#SerializedName("maxtemp_c")
val maxtempC: Double,
#SerializedName("maxtemp_f")
val maxtempF: Double,
#SerializedName("maxwind_kph")
val maxwindKph: Double,
#SerializedName("maxwind_mph")
val maxwindMph: Double,
#SerializedName("mintemp_c")
val mintempC: Double,
#SerializedName("mintemp_f")
val mintempF: Double,
#SerializedName("totalprecip_in")
val totalprecipIn: Double,
#SerializedName("totalprecip_mm")
val totalprecipMm: Double,
#SerializedName("totalsnow_cm")
val totalsnowCm: Double,
val uv: Double
)
const val weather_location_id = 0
#Entity(tableName = "weather_location")
data class WeatherLocation(
val country: String,
val lat: Double,
val localtime: String,
#SerializedName("localtime_epoch")
val localtimeEpoch: Long,
val lon: Double,
val name: String,
val region: String,
#SerializedName("tz_id")
val tzId: String
){
#PrimaryKey(autoGenerate = false)
var id : Int= weather_location_id
val zonedDateTime:ZonedDateTime
#RequiresApi(Build.VERSION_CODES.O)
get() {
val instant = Instant.ofEpochSecond(localtimeEpoch)
val zoneId = ZoneId.of(tzId)
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
ZonedDateTime.ofInstant(instant,zoneId)
} else {
TODO("VERSION.SDK_INT < O")
}
}
}
You are trying to retrieve a MetricCurrentWeather object which requires, according to Room, values for fields date and temp from output columns so named.
As it stands the query
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather> // error here
Will extract data that would have the fields sufficient to create a Current object. (i.e. the Current aka the current_weather table has columns and thus fields avgtemp_c, avgtemp_f and so on).
So you could use:-
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<Current>
If you need MetricCurrentWeather to be returned, then you need to alter the query to output the additional columns date and temp with values that are suitable for how they are defined.
You could use
#Query("select *, 0 AS date, 0 AS temp from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather>
this would add the two additional columns to the output. However, 0 for both date and temp could be useless and perhaps damaging.
You need to understand what values the date and temp field of the MetricCurrentWeather should hold and how those values should be obtained.
e.g. it could be that the last_updated is the date and that temp_c the temp (both fields in the Current object and thus columns in the output) in which case the query could be:-
#Query("select *, last_updated AS date, temp_c AS temp from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather>
That is the output will be all the columns of the current_weather table (* representing all columns) plus two additional columns
date (AS date names the column to the required name) which will be the value of the last_updated column, and
temp (AS temp names the new column to the required name)) which will be the value of the temp_c column.
I am trying to add a generic Slick Entity / Table helper class:
trait HasSlickProfile {
val profile: JdbcProfile
}
trait SlickedDBConfig
extends HasSlickProfile {
lazy val profile: JdbcProfile = ???
}
abstract class EntityHelper[E]
extends HasSlickProfile {
import profile.api._
type T <: Table[E]
def table: TableQuery[_ <: Table[E]]
// TONS OF USEFULL ENTITY / TABLE STUFF
}
but it does not compile in:
object ValueOfHelper
extends EntityHelper[ValueOf]
with SlickedDBConfig {
import profile.api._
type T = ValueOfTable
val table = ValueOfTable
}
since there are to errors found:
[error] /.../ValueOfHelper.scala:22: overriding type T in class EntityHelper with bounds <: fp.model.ValueOfHelper.profile.api.Table[fp.model.Tables.ValueOf];
[error] type T has incompatible type
[error] type T = ValueOfTable
[error] ^
[error] /.../ValueOfHelper.scala:23: overriding method table in class EntityHelper of type => fp.model.ValueOfHelper.profile.api.TableQuery[_ <: fp.model.ValueOfHelper.profile.api.Table[fp.model.Tables.ValueOf]];
[error] value table has incompatible type
[error] val table = ValueOfTable
[error] ^
The autogenerated Tables looks like this:
import slicked.codegen.SlickedDBConfig
object Tables extends {
} with Tables
with SlickedDBConfig
trait Tables {
val profile: slick.jdbc.JdbcProfile
import profile.api._
// Custom imports start
import slicked.SlickedRow
import slicked.SlickedTable
import slicked.SlickMappers._
// Custom imports end
import slick.model.ForeignKeyAction
import slick.jdbc.{GetResult => GR}
lazy val schema: profile.SchemaDescription = ValueOfTable.schema
case class ValueOf(of: String, in: String, timestamp: org.joda.time.DateTime, `val`: Int, info: Option[String] = None) extends SlickedRow
implicit def GetResultValueOf(implicit e0: GR[String], e1: GR[org.joda.time.DateTime], e2: GR[Int], e3: GR[Option[String]]): GR[ValueOf] = GR{
prs => import prs._
ValueOf.tupled((<<[String], <<[String], <<[org.joda.time.DateTime], <<[Int], <<?[String]))
}
class ValueOfTable(_tableTag: Tag) extends profile.api.Table[ValueOf](_tableTag, Some("forex"), "VALUE_OF") with SlickedTable {
def * = (of, in, timestamp, `val`, info) <> (ValueOf.tupled, ValueOf.unapply)
def ? = (Rep.Some(of), Rep.Some(in), Rep.Some(timestamp), Rep.Some(`val`), info).shaped.<>({r=>import r._; _1.map(_=> ValueOf.tupled((_1.get, _2.get, _3.get, _4.get, _5)))}, (_:Any) => throw new Exception("Inserting into ? projection not supported."))
val of: Rep[String] = column[String]("OF", O.Length(16,varying=true))
val in: Rep[String] = column[String]("IN", O.Length(16,varying=true))
val timestamp: Rep[org.joda.time.DateTime] = column[org.joda.time.DateTime]("TIMESTAMP")
val `val`: Rep[Int] = column[Int]("VAL")
val info: Rep[Option[String]] = column[Option[String]]("INFO", O.Default(None))
val index1 = index("VALUEOF_TIMESTAMP_OF_IN_pk", (timestamp, of, in), unique=true)
}
lazy val ValueOfTable = new TableQuery(tag => new ValueOfTable(tag))
}
How do you write it to be profile agnostic?
I have a DAO helper trait that provides common functionality to DAOs. It needs to be able to access the table query, and run actions. I'm having trouble defining or otherwise providing the query type to the helper trait.
Below is some code, also available in a short demo project on GitHub, in the action branch.
First, db is defined in trait DBComponent:
trait DBComponent {
import slick.driver.JdbcProfile
val driver: JdbcProfile
import driver.api._
val db: Database
}
The classes to be persisted extend HasId:
trait HasId {
def id: Option[Int] = None
}
Here is one such class to be persisted:
case class BankInfo(
owner: String,
branches: Int,
bankId: Int,
override val id: Option[Int] = None
) extends HasId
The problem is that I don't know how to set QueryType in the following DAO helper trait; I expect that most of the errors that follow are a consequence of the improper type that I used:
/** Handles all actions pertaining to HasId or that do not require parameters */
trait DbAction[T <: HasId] { this: DBComponent =>
import driver.api._ // defines DBIOAction
type QueryType <: slick.lifted.TableQuery[Table[T]] // this type is wrong! What should it be?
def tableQuery: QueryType
// Is this defined correctly?
def run[R](action: DBIOAction[R, NoStream, Nothing]): Future[R] = db.run { action }
def deleteById(id: Option[Long]): Unit =
for { i <- id } run { tableQuery.filter(_.id === id).delete } // id is unknown because QueryType is wrong
def findAll: Future[List[T]] = run { tableQuery.to[List].result } // also b0rked
// Remaining methods shown on GitHub
}
FYI, here is how the above will be used. First, the trait that defines the table query:
trait BankInfoTable extends BankTable { this: DBComponent =>
import driver.api._
class BankInfoTable(tag: Tag) extends Table[BankInfo](tag, "bankinfo") {
val id = column[Int]("id", O.PrimaryKey, O.AutoInc)
val owner = column[String]("owner")
val bankId = column[Int]("bank_id")
val branches = column[Int]("branches")
def bankFK = foreignKey("bank_product_fk", bankId, bankTableQuery)(_.id)
def * = (owner, branches, bankId, id.?) <> (BankInfo.tupled, BankInfo.unapply)
}
val tableQuery = TableQuery[BankInfoTable]
def autoInc = tableQuery returning tableQuery.map(_.id)
}
It all comes together here:
trait BankInfoRepositoryLike extends BankInfoTable with DbAction[BankInfo]
{ this: DBComponent =>
import driver.api._
#inline def updateAsync(bankInfo: BankInfo): Future[Int] =
run { tableQuery.filter(_.id === bankInfo.id.get).update(bankInfo) }
#inline def getByIdAsync(id: Int): Future[Option[BankInfo]] =
run { tableQuery.filter(_.id === id).result.headOption }
}
Suggestions?
Full working example:
package com.knol.db.repo
import com.knol.db.connection.DBComponent
import com.knol.db.connection.MySqlDBComponent
import scala.concurrent.{Await, Future}
import concurrent.duration.Duration
trait LiftedHasId {
def id: slick.lifted.Rep[Int]
}
trait HasId {
def id: Option[Int]
}
trait GenericAction[T <: HasId]{this: DBComponent =>
import driver.api._
type QueryType <: slick.lifted.TableQuery[_ <: Table[T] with LiftedHasId]
val tableQuery: QueryType
#inline def deleteAsync(id: Int): Future[Int] = db.run { tableQuery.filter(_.id === id).delete }
#inline def delete(id: Int): Int = Await.result(deleteAsync(id), Duration.Inf)
#inline def deleteAllAsync(): Future[Int] = db.run { tableQuery.delete }
#inline def deleteAll(): Int = Await.result(deleteAllAsync(), Duration.Inf)
#inline def getAllAsync: Future[List[T]] = db.run { tableQuery.to[List].result }
#inline def getAll: List[T] = Await.result(getAllAsync, Duration.Inf)
#inline def getByIdAsync(id: Int): Future[Option[T]] =
db.run { tableQuery.filter(_.id === id).result.headOption }
#inline def getById(id: Int): Option[T] = Await.result(getByIdAsync(id), Duration.Inf)
#inline def deleteById(id: Option[Int]): Unit =
db.run { tableQuery.filter(_.id === id).delete }
#inline def findAll: Future[List[T]] = db.run { tableQuery.to[List].result }
}
trait BankInfoRepository extends BankInfoTable with GenericAction[BankInfo] { this: DBComponent =>
import driver.api._
type QueryType = TableQuery[BankInfoTable]
val tableQuery=bankInfoTableQuery
def create(bankInfo: BankInfo): Future[Int] = db.run { bankTableInfoAutoInc += bankInfo }
def update(bankInfo: BankInfo): Future[Int] = db.run { bankInfoTableQuery.filter(_.id === bankInfo.id.get).update(bankInfo) }
/**
* Get bank and info using foreign key relationship
*/
def getBankWithInfo(): Future[List[(Bank, BankInfo)]] =
db.run {
(for {
info <- bankInfoTableQuery
bank <- info.bank
} yield (bank, info)).to[List].result
}
/**
* Get all bank and their info.It is possible some bank do not have their product
*/
def getAllBankWithInfo(): Future[List[(Bank, Option[BankInfo])]] =
db.run {
bankTableQuery.joinLeft(bankInfoTableQuery).on(_.id === _.bankId).to[List].result
}
}
private[repo] trait BankInfoTable extends BankTable{ this: DBComponent =>
import driver.api._
class BankInfoTable(tag: Tag) extends Table[BankInfo](tag, "bankinfo") with LiftedHasId {
val id = column[Int]("id", O.PrimaryKey, O.AutoInc)
val owner = column[String]("owner")
val bankId = column[Int]("bank_id")
val branches = column[Int]("branches")
def bank = foreignKey("bank_product_fk", bankId, bankTableQuery)(_.id)
def * = (owner, branches, bankId, id.?) <> (BankInfo.tupled, BankInfo.unapply)
}
protected val bankInfoTableQuery = TableQuery[BankInfoTable]
protected def bankTableInfoAutoInc = bankInfoTableQuery returning bankInfoTableQuery.map(_.id)
}
object BankInfoRepository extends BankInfoRepository with MySqlDBComponent
case class BankInfo(owner: String, branches: Int, bankId: Int, id: Option[Int] = None) extends HasId
You're trying to abstract over the result type with HasId but your code doesn't actually care about that. The id values that you're using are the ones from the lifted type, i.e. the table row class, so you need an abstraction at this level:
trait LiftedHasId {
def id: slick.lifted.Rep[Int]
}
Then in DbAction:
type QueryType <: slick.lifted.TableQuery[_ <: Table[T] with LiftedHasId]
And BankInfoTable must define a concrete type for it:
type QueryType = slick.lifted.TableQuery[BankInfoTable]
Or you could add it as a second type parameter to DbAction (just like Query has two type parameters for the lifted type and the result type).
Coming from JPA and Hibernate, I find using Slick pretty straight forward apart from using some Joins and Aggregate queries.
Now is there some best practices that I could employ when defining my tables and the mapping case classes? I currently have a single class that holds all the queries, Table definitions and the case classes to which I map to when I query the database. This class deals with say 10 to 15 tables and the file has become quite big.
I'm now wondering if I should split this into different packages for readability. What do you guys think?
You can separate out slick mapping table from slick queries. Put slick mapping table into trait. Mixed this trait where you want write quires and join into table. for example:
package com.knol.db.repo
import com.knol.db.connection.DBComponent
import scala.concurrent.Future
trait BankRepository extends BankTable { this: DBComponent =>
import driver.api._
def create(bank: Bank): Future[Int] = db.run { bankTableAutoInc += bank }
def update(bank: Bank): Future[Int] = db.run { bankTableQuery.filter(_.id === bank.id.get).update(bank) }
def getById(id: Int): Future[Option[Bank]] = db.run { bankTableQuery.filter(_.id === id).result.headOption }
def getAll(): Future[List[Bank]] = db.run { bankTableQuery.to[List].result }
def delete(id: Int): Future[Int] = db.run { bankTableQuery.filter(_.id === id).delete }
}
private[repo] trait BankTable { this: DBComponent =>
import driver.api._
private[BankTable] class BankTable(tag: Tag) extends Table[Bank](tag,"bank") {
val id = column[Int]("id", O.PrimaryKey, O.AutoInc)
val name = column[String]("name")
def * = (name, id.?) <> (Bank.tupled, Bank.unapply)
}
protected val bankTableQuery = TableQuery[BankTable]
protected def bankTableAutoInc = bankTableQuery returning bankTableQuery.map(_.id)
}
case class Bank(name: String, id: Option[Int] = None)
For joining two table:
package com.knol.db.repo
import com.knol.db.connection.DBComponent
import scala.concurrent.Future
trait BankInfoRepository extends BankInfoTable { this: DBComponent =>
import driver.api._
def create(bankInfo: BankInfo): Future[Int] = db.run { bankTableInfoAutoInc += bankInfo }
def update(bankInfo: BankInfo): Future[Int] = db.run { bankInfoTableQuery.filter(_.id === bankInfo.id.get).update(bankInfo) }
def getById(id: Int): Future[Option[BankInfo]] = db.run { bankInfoTableQuery.filter(_.id === id).result.headOption }
def getAll(): Future[List[BankInfo]] = db.run { bankInfoTableQuery.to[List].result }
def delete(id: Int): Future[Int] = db.run { bankInfoTableQuery.filter(_.id === id).delete }
def getBankWithInfo(): Future[List[(Bank, BankInfo)]] =
db.run {
(for {
info <- bankInfoTableQuery
bank <- info.bank
} yield (bank, info)).to[List].result
}
def getAllBankWithInfo(): Future[List[(Bank, Option[BankInfo])]] =
db.run {
bankTableQuery.joinLeft(bankInfoTableQuery).on(_.id === _.bankId).to[List].result
}
}
private[repo] trait BankInfoTable extends BankTable { this: DBComponent =>
import driver.api._
private[BankInfoTable] class BankInfoTable(tag: Tag) extends Table[BankInfo](tag,"bankinfo") {
val id = column[Int]("id", O.PrimaryKey, O.AutoInc)
val owner = column[String]("owner")
val bankId = column[Int]("bank_id")
val branches = column[Int]("branches")
def bank = foreignKey("bank_product_fk", bankId, bankTableQuery)(_.id)
def * = (owner, branches, bankId, id.?) <> (BankInfo.tupled, BankInfo.unapply)
}
protected val bankInfoTableQuery = TableQuery[BankInfoTable]
protected def bankTableInfoAutoInc = bankInfoTableQuery returning bankInfoTableQuery.map(_.id)
}
case class BankInfo(owner: String, branches: Int, bankId: Int, id: Option[Int] = None)
For more explanation see Blog and github.
It might be helpful!!!
I'm trying to create a user defined type in spark sql, but I receive:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType even when using their example. Has anyone made this work?
My code:
test("udt serialisation") {
val points = Seq(new ExamplePoint(1.3, 1.6), new ExamplePoint(1.3, 1.8))
val df = SparkContextForStdout.context.parallelize(points).toDF()
}
#SQLUserDefinedType(udt = classOf[ExamplePointUDT])
case class ExamplePoint(val x: Double, val y: Double)
/**
* User-defined type for [[ExamplePoint]].
*/
class ExamplePointUDT extends UserDefinedType[ExamplePoint] {
override def sqlType: DataType = ArrayType(DoubleType, false)
override def pyUDT: String = "pyspark.sql.tests.ExamplePointUDT"
override def serialize(obj: Any): Seq[Double] = {
obj match {
case p: ExamplePoint =>
Seq(p.x, p.y)
}
}
override def deserialize(datum: Any): ExamplePoint = {
datum match {
case values: Seq[_] =>
val xy = values.asInstanceOf[Seq[Double]]
assert(xy.length == 2)
new ExamplePoint(xy(0), xy(1))
case values: util.ArrayList[_] =>
val xy = values.asInstanceOf[util.ArrayList[Double]].asScala
new ExamplePoint(xy(0), xy(1))
}
}
override def userClass: Class[ExamplePoint] = classOf[ExamplePoint]
}
The usefull stackstrace is this:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
java.lang.ClassCastException: com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:316)
at org.apache.spark.sql.SQLContext$implicits$.rddToDataFrameHolder(SQLContext.scala:254)
It seems that the UDT needs to be used inside of another class to work (as the type of a field). One solution to use it directly is to wrap it into a Tuple1:
test("udt serialisation") {
val points = Seq(new Tuple1(new ExamplePoint(1.3, 1.6)), new Tuple1(new ExamplePoint(1.3, 1.8)))
val df = SparkContextForStdout.context.parallelize(points).toDF()
df.collect().foreach(println(_))
}