The columns returned by the query does not have the fields [date,temp] in db.unitlocalized.future.MetricSimpleFutureWeatherEntry - retrofit2

I have tesktask to create a weather app that show weather in users city be GeoLocation.
I use stack of technologies : Dagger,Room, Retrofit,Coroutines.
I have error in dao class.
I create db , because , I want that my should remember state of weather when I want to search city(Not realized yet)
Idk how to fix it , so hope you'll help me.
package di
import Repository.ForecastRepositoryImpl
import android.content.Context
import android.os.Build
import androidx.annotation.RequiresApi
import androidx.room.Room
import com.google.android.gms.location.FusedLocationProviderClient
import const.URL_CURRENT
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import db.CurrentWeatherDao
import db.ForecastDb
import db.FutureWeatherDao
import locationprovider.LocationProvider
import locationprovider.LocationProviderImpl
import okhttp3.OkHttpClient
import retrofit.*
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import viewmodel.WeatherForeCastViewModels.FactoryCurrentVM
import viewmodel.WeatherForeCastViewModels.ViewmodelFutureWeather.FutureListViewModelFactory
import javax.inject.Singleton
#Module
#InstallIn(SingletonComponent::class)
object WeatherModule {
#Provides
#Singleton
fun providedaocurrent(appdatabase:ForecastDb): CurrentWeatherDao {
return appdatabase.currentWeatherDao()
}
#RequiresApi(Build.VERSION_CODES.M)
#Provides
#Singleton
fun providesOkHttpClient(connectivityInterceptorImpl:
ConnectivityInterceptorImpl) =
OkHttpClient
.Builder()
.addInterceptor(connectivityInterceptorImpl)
.build()
#Provides
#Singleton
fun provideRetrofit(okHttpClient: OkHttpClient) = Retrofit.Builder()
.addConverterFactory(GsonConverterFactory.create())
.baseUrl(URL_CURRENT)
.client(okHttpClient)
.build()
#Provides
#Singleton
fun provideWeatherApiService(retrofit: Retrofit) =
retrofit.create(WeatherServiceAPI::class.java)
#Provides
#Singleton
fun provideweathernetworkdatasource
(weatherNetworkDataSource: WeatherNetworkDataSource):
WeatherNetworkDataSource{
return weatherNetworkDataSource
}
#Provides
#Singleton
fun providerepository(repositoryImpl: ForecastRepositoryImpl):
ForecastRepositoryImpl {
return repositoryImpl
}
#Provides
#Singleton
fun providevm(vm:FactoryCurrentVM):FactoryCurrentVM{
return vm
}
#Provides
#Singleton
fun providelocationprovider(locationProviderImpl: LocationProviderImpl):
LocationProvider{
return locationProviderImpl
}
#Provides
#Singleton
fun provideforecastdb(#ApplicationContext appContext:Context):
ForecastDb{
return Room.databaseBuilder(
appContext,
ForecastDb::class.java,
"forecast.db"
).build()
}
#Provides
#Singleton
fun providefusedlocationproviderclient(fusedLocationProviderClient:
FusedLocationProviderClient):
FusedLocationProviderClient{
return fusedLocationProviderClient
}
#Provides
#Singleton
fun providefutureweatherdao(#ApplicationContext appdb:ForecastDb):
FutureWeatherDao {
return appdb.futureweatherdao()
}
#Provides
#Singleton
fun providevmfactoryfuture(vm:FutureListViewModelFactory):FutureListViewModelFactory{
return vm
}
}
package db
import android.content.Context
import androidx.room.*
import db.entities.Current
import db.entities.FutureWeatherEntry
import db.entities.WeatherLocation
#Database(entities = [Current::class,WeatherLocation::class,FutureWeatherEntry::class], version = 1)
#TypeConverters(LocaleDateConverter::class)
abstract class ForecastDb : RoomDatabase() {
abstract fun futureweatherdao():FutureWeatherDao
abstract fun currentWeatherDao():CurrentWeatherDao
abstract fun weatherLocationDao():WeatherLocationDao
companion object{
#Volatile private var instance:ForecastDb? = null
private val lock= Any()
operator fun invoke(context: Context) = instance ?: synchronized(lock){
instance ?: builddb(context)
}
private fun builddb(context: Context)=
Room.databaseBuilder(context.applicationContext,RoomDatabase::class.java,"forecast.db").build()
}
}
package db
import androidx.lifecycle.LiveData
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import db.entities.FutureWeatherEntry
import db.unitlocalized.future.MetricSimpleFutureWeatherEntry
import java.time.LocalDate
#Dao
interface FutureWeatherDao {
#Insert(onConflict = OnConflictStrategy.REPLACE)
fun insert(futureweatherEntries:List<FutureWeatherEntry>)
#Query("select * from future_weather where date >=:startDate")
fun getsimpleweatherforecastmetric(startDate: LocalDate,) :
LiveData<List<MetricSimpleFutureWeatherEntry>> // error on this line
#Query("select * from future_weather where date >=:startDate")
fun countfutureweather(startDate: LocalDate):Int
#Query("delete from future_weather where date < :firstDateToKeep")
fun deleteOldEntries(firstDateToKeep:LocalDate)
}
#Dao
interface CurrentWeatherDao {
#Insert(onConflict = OnConflictStrategy.REPLACE )
fun upsert(weather: Current)
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather> // error here
}
const val Current_Weather_id = 0
#Entity(tableName = "current_weather")
data class Current(
val location:String,
val cloud: Int,
#Embedded(prefix = "condition_")
val condition: Condition,
#SerializedName("feelslike_c")
val feelslikeC: Double,
#SerializedName("feelslike_f")
val feelslikeF: Double,
#SerializedName("gust_kph")
val gustKph: Double,
#SerializedName("gust_mph")
val gustMph: Double,
val humidity: Int,
#SerializedName("is_day")
val isDay: Int,
#SerializedName("last_updated")
val lastUpdated: String,
#SerializedName("last_updated_epoch")
val lastUpdatedEpoch: Int,
#SerializedName("precip_in")
val precipIn: Double,
#SerializedName("precip_mm")
val precipMm: Double,
#SerializedName("pressure_in")
val pressureIn: Double,
#SerializedName("pressure_mb")
val pressureMb: Double,
#SerializedName("temp_c")
val tempC: Double,
#SerializedName("temp_f")
val tempF: Double,
val uv: Double,
#SerializedName("vis_km")
val visKm: Double,
#SerializedName("vis_miles")
val visMiles: Double,
#SerializedName("wind_degree")
val windDegree: Int,
#SerializedName("wind_dir")
val windDir: String,
#SerializedName("wind_kph")
val windKph: Double,
#SerializedName("wind_mph")
val windMph: Double
) {
#PrimaryKey
var id : Int = Current_Weather_id
}
#Entity(tableName = "future_weather", indices = [Index(value = ["date"], unique = true )])
data class FutureWeatherEntry(
#PrimaryKey(autoGenerate = false)
val id: Int? = null,
val date: String,
#Embedded
val day: Day
)
data class CurrentWeatherResponse(
#SerializedName("current")
val current: Current,
val location: WeatherLocation
)
data class Day(
val avghumidity: Double,
#SerializedName("avgtemp_c")
val avgtempC: Double,
#SerializedName("avgtemp_f")
val avgtempF: Double,
#SerializedName("avgvis_km")
val avgvisKm: Double,
#SerializedName("avgvis_miles")
val avgvisMiles: Double,
#Embedded(prefix = "condition_")
val condition: Condition,
#SerializedName("daily_chance_of_rain")
val dailyChanceOfRain: Int,
#SerializedName("daily_chance_of_snow")
val dailyChanceOfSnow: Int,
#SerializedName("daily_will_it_rain")
val dailyWillItRain: Int,
#SerializedName("daily_will_it_snow")
val dailyWillItSnow: Int,
#SerializedName("maxtemp_c")
val maxtempC: Double,
#SerializedName("maxtemp_f")
val maxtempF: Double,
#SerializedName("maxwind_kph")
val maxwindKph: Double,
#SerializedName("maxwind_mph")
val maxwindMph: Double,
#SerializedName("mintemp_c")
val mintempC: Double,
#SerializedName("mintemp_f")
val mintempF: Double,
#SerializedName("totalprecip_in")
val totalprecipIn: Double,
#SerializedName("totalprecip_mm")
val totalprecipMm: Double,
#SerializedName("totalsnow_cm")
val totalsnowCm: Double,
val uv: Double
)
const val weather_location_id = 0
#Entity(tableName = "weather_location")
data class WeatherLocation(
val country: String,
val lat: Double,
val localtime: String,
#SerializedName("localtime_epoch")
val localtimeEpoch: Long,
val lon: Double,
val name: String,
val region: String,
#SerializedName("tz_id")
val tzId: String
){
#PrimaryKey(autoGenerate = false)
var id : Int= weather_location_id
val zonedDateTime:ZonedDateTime
#RequiresApi(Build.VERSION_CODES.O)
get() {
val instant = Instant.ofEpochSecond(localtimeEpoch)
val zoneId = ZoneId.of(tzId)
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
ZonedDateTime.ofInstant(instant,zoneId)
} else {
TODO("VERSION.SDK_INT < O")
}
}
}

You are trying to retrieve a MetricCurrentWeather object which requires, according to Room, values for fields date and temp from output columns so named.
As it stands the query
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather> // error here
Will extract data that would have the fields sufficient to create a Current object. (i.e. the Current aka the current_weather table has columns and thus fields avgtemp_c, avgtemp_f and so on).
So you could use:-
#Query("select * from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<Current>
If you need MetricCurrentWeather to be returned, then you need to alter the query to output the additional columns date and temp with values that are suitable for how they are defined.
You could use
#Query("select *, 0 AS date, 0 AS temp from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather>
this would add the two additional columns to the output. However, 0 for both date and temp could be useless and perhaps damaging.
You need to understand what values the date and temp field of the MetricCurrentWeather should hold and how those values should be obtained.
e.g. it could be that the last_updated is the date and that temp_c the temp (both fields in the Current object and thus columns in the output) in which case the query could be:-
#Query("select *, last_updated AS date, temp_c AS temp from current_weather where id = $Current_Weather_id and isDay =:date")
fun getWeatherMetric(date:String) : LiveData<MetricCurrentWeather>
That is the output will be all the columns of the current_weather table (* representing all columns) plus two additional columns
date (AS date names the column to the required name) which will be the value of the last_updated column, and
temp (AS temp names the new column to the required name)) which will be the value of the temp_c column.

Related

Retrieve nested Data from Firebase Database android

Snapshot of my firebase realtime database
I want to extract the entire data under the "Orders" node, please tell me how should I model my data class for android in Kotlin?
I tried with this type of modeling,
After getting the reference of (Orders/uid/)
Order.kt
data class Order(
val items:ArrayList<Myitems>=ArrayList(),
val timeStamp:Long=0,
val totalCost:Int=0
)
MyItems.kt
data class MyItems(
val Item:ArrayList<Menu>=ArrayList()
)
Menu.kt
data class Menu(
val menCategory:String="",
val menName:String="",
val menImage:String="",
val menId:String="",
val menQuantity:Int=0,
val menCost:Int=0
)
After a lot of thinking and research online. I was finally able to model my classes and call add value event listener to it. Here it goes:
Order.kt
data class Order(
val items: ArrayList<HashMap<String, Any>> = ArrayList(),
val timeStamp: Long = 0,
val totalCost: Int = 0
)
OItem.kt
data class OItem(
val menCategory: String = "",
val menId: String = "",
val menImage: String = "",
val menName: String = "",
val menPrice: Int = 0,
var menQuantity: Int = 0
)
MainActivity.kt
val uid = FirebaseAuth.getInstance().uid
val ref = FirebaseDatabase.getInstance().getReference("Orders/$uid")
ref.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onCancelled(error: DatabaseError) {
//
}
override fun onDataChange(p0: DataSnapshot) {
p0.children.forEach {
val order = it.getValue(Order::class.java)
ordList.add(order!!)
}
Log.d("hf", ordList.toString())
}
})

Same value was passed as the nextKey in two sequential Pages loaded from a PagingSource in Paging Library 3 Android

I migrated from paging 2 to paging 3. I tried to implement ItemKeyedDataSource of Paging 2 to Paging library 3. But the problem I was facing is, the same value(currentJodId) was passed as the nextkey in two sequential Pages loaded. And After that app crashes. but if I add "keyReuseSupported = true" in DataSource, app does not crash. But it started calling same item id as the nextkey.
JobSliderRestApi.kt
#GET("job/list/slides")
fun getDetailOfSelectedJob(
#Query("current_job") currentJodId: Int?,
#Query("limit") jobLimit: Int?,
#Query("search_in") fetchType: String?
): Single<Response<JobViewResponse>>
JobViewResponse.kt
data class JobViewResponse(
#SerializedName("data") val data: ArrayList<JobDetail>?
) : BaseResponse()
JobDetail.kt
data class JobDetail(
#SerializedName("job_id") val jobId: Int,
#SerializedName("tuition_type") val jobType: String?,
#SerializedName("class_image") val jobImage: String,
#SerializedName("salary") val salary: String,
#SerializedName("no_of_student") val noOfStudent: Int,
#SerializedName("student_gender") val studentGender: String,
#SerializedName("tutor_gender") val preferredTutor: String,
#SerializedName("days_per_week") val daysPerWeek: String?,
#SerializedName("other_req") val otherReq: String?,
#SerializedName("latitude") val latitude: Double?,
#SerializedName("longitude") val longitude: Double?,
#SerializedName("area") val area: String,
#SerializedName("tutoring_time") val tutoringTime: String?,
#SerializedName("posted_date") val postedDate: String?,
#SerializedName("subjects") val subjects: String,
#SerializedName("title") val title: String
)
JodSliderDataSource.kt
class JodSliderDataSource #Inject constructor(
private val jobSliderRestApi: JobSliderRestApi
): RxPagingSource<Int, JobDetail>() {
// override val keyReuseSupported = true
#ExperimentalPagingApi
override fun getRefreshKey(state: PagingState<Int, JobDetail>): Int? {
return state.anchorPosition?.let {
state.closestItemToPosition(it)?.jobId
}
}
override fun loadSingle(params: LoadParams<Int>): Single<LoadResult<Int, JobDetail>> {
return jobSliderRestApi.getDetailOfSelectedJob(42673, 2, "next").toSingle()
.subscribeOn(Schedulers.io())
.map { jobResponse -> toLoadResult(jobResponse.data) }
.onErrorReturn { LoadResult.Error(it) }
}
private fun toLoadResult(data: ArrayList<JobDetail>): LoadResult<Int, JobDetail> {
return LoadResult.Page(data = data, prevKey = null, nextKey = data.lastOrNull()?.jobId)
}
}
I was getting the same error and this is what worked for me. In the JodSliderDataSource class, toLoadResult method, set the nextKey parameter value by getting the page number from the response data and adding one.
private fun toLoadResult(
data: ArrayList<JobDetail>
): LoadResult<Int, JobDetail> {
return LoadResult.Page(
data = data,
prevKey = null,
nextKey = data.lastOrNull()?.jobId + 1 // Add one to the page number here.
)
}

Spark AccumulatorV2 with HashMap

I am trying to create a custom AccumulatorV2 with a hash map, the input would be hashmap and output would be a map of HashMap,
My intention is to have a K -> K1,V, where the value will increment. I am confused by the scala syntax for overriding AccumulatorV2 for Map, did anyone had a luck with this.
class CustomAccumulator extends AccumulatorV2[java.util.Map[String, String], java.util.Map[String,java.util.Map[String, Double]]]
I'm assuming that this is the scenario that needs to be implemented.
Input:
HashMap<String, String>
Output:
Should output a HashMap<String, HashMap<String, Double>>, where the second hashmap contains the count of values corresponding to the keys.
Example:
Inputs:(Following HashMaps are added to the accumulator)
Input HashMap1 -> {"key1", "value1"}, {"key2", "value1"}, {"key3", "value3"}
Input HashMap2 -> {"key1", "value1"}, {"key2", "value1"}
Input HashMap3 -> {"key2", "value1"}
Output:
{"key1", {"value1", 2}}, {"key2", {"value1", 3}}, {"key3", {"value3", 1}}
Code below:
import java.util
import java.util.Map.Entry
import java.util.{HashMap, Map}
import java.util.function.{BiFunction, Consumer}
import scala.collection.JavaConversions._
import org.apache.spark.util.AccumulatorV2
import org.datanucleus.store.rdbms.fieldmanager.OldValueParameterSetter
class CustomAccumulator extends AccumulatorV2[Map[String, String], Map[String, Map[String,Double]]] {
private var hashmap : Map[String, Map[String, Double]] = new HashMap[String, Map[String, Double]];
override def isZero: Boolean = {
return hashmap.size() == 0
}
override def copy(): AccumulatorV2[util.Map[String, String], util.Map[String, util.Map[String, Double]]] = {
var customAccumulatorcopy = new CustomAccumulator()
customAccumulatorcopy.merge(this)
return customAccumulatorcopy
}
override def reset(): Unit = {
this.hashmap = new HashMap[String, Map[String, Double]];
}
override def add(v: util.Map[String, String]): Unit = {
v.foreach(kv => {
val unitValueDouble : Double = 1;
if(this.hashmap.containsKey(kv._1)){
val innerMap = this.hashmap.get(kv._1)
innerMap.merge(kv._2, unitValueDouble, addFunction)
}
else {
val innerMap : Map[String, Double] = new HashMap[String, Double]()
innerMap.put(kv._2, unitValueDouble)
this.hashmap.put(kv._1, innerMap)
}
}
)
}
override def merge(otherAccumulator: AccumulatorV2[util.Map[String, String], util.Map[String, util.Map[String, Double]]]): Unit = {
otherAccumulator.value.foreach(kv => {
this.hashmap.merge(kv._1, kv._2, mergeMapsFunction)
})
}
override def value: util.Map[String, util.Map[String, Double]] = {
return this.hashmap
}
val mergeMapsFunction = new BiFunction[Map[String, Double], Map[String, Double], Map[String, Double]] {
override def apply(oldMap: Map[String, Double], newMap: Map[String, Double]): Map[String, Double] = {
newMap.foreach(kv => {
oldMap.merge(kv._1, kv._2, addFunction);
})
oldMap
}
}
val addFunction = new BiFunction[Double, Double, Double] {
override def apply(oldValue: Double, newValue: Double): Double = oldValue + newValue
}
}
Thanks!!!

Cassandra quill Codec not found for requested operation

I followed the example given in the docs but the following fails with Codec not found for requested operation: [varchar <-> java.util.UUID]. How is one supposed to provide a custom Cassandra codec with Quill?
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import io.getquill.{CassandraAsyncContext, LowerCase, MappedEncoding}
import scala.concurrent.duration._
object Main extends App {
case class MyOrder(id: String, uuid: UUID)
trait OrderRepository {
implicit val encodeUUID: MappedEncoding[UUID, String] = MappedEncoding[UUID, String](_.toString)
implicit val decodeUUID: MappedEncoding[String, UUID] = MappedEncoding[String, UUID](UUID.fromString)
def list(): Future[Iterable[MyOrder]]
def get(id: String): Future[Option[MyOrder]]
}
class OrderRepositoryImpl(db: CassandraAsyncContext[LowerCase]) extends OrderRepository {
import db._
override def list(): Future[Iterable[MyOrder]] = {
run(quote(query[MyOrder]))
}
override def get(id: String): Future[Option[MyOrder]] = {
val q = quote {
(id: String) =>
query[MyOrder].filter(_.id == id)
}
val res = run(q(lift(id.toString)))
res.map(_.headOption)
}
}
val db = new CassandraAsyncContext[LowerCase]("db")
val repo = new OrderRepositoryImpl(db)
val maybeOrder = Await.result(repo.get("id1"), 2 seconds)
println(s"maybeOrder $maybeOrder")
}
and the Cassandra ddl:
CREATE TABLE myorder (
id text,
uuid text,
PRIMARY KEY (id)
);
INSERT INTO myorder (
id,
uuid
) values (
'id1',
'a8a8a416-2436-4e18-82b3-d5881c8fec1a'
);
Obviously I can just create a class whose uuid field is of type String but the goal here is to figure out how to use custom decoders.

Exception when using UDT in Spark DataFrame

I'm trying to create a user defined type in spark sql, but I receive:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType even when using their example. Has anyone made this work?
My code:
test("udt serialisation") {
val points = Seq(new ExamplePoint(1.3, 1.6), new ExamplePoint(1.3, 1.8))
val df = SparkContextForStdout.context.parallelize(points).toDF()
}
#SQLUserDefinedType(udt = classOf[ExamplePointUDT])
case class ExamplePoint(val x: Double, val y: Double)
/**
* User-defined type for [[ExamplePoint]].
*/
class ExamplePointUDT extends UserDefinedType[ExamplePoint] {
override def sqlType: DataType = ArrayType(DoubleType, false)
override def pyUDT: String = "pyspark.sql.tests.ExamplePointUDT"
override def serialize(obj: Any): Seq[Double] = {
obj match {
case p: ExamplePoint =>
Seq(p.x, p.y)
}
}
override def deserialize(datum: Any): ExamplePoint = {
datum match {
case values: Seq[_] =>
val xy = values.asInstanceOf[Seq[Double]]
assert(xy.length == 2)
new ExamplePoint(xy(0), xy(1))
case values: util.ArrayList[_] =>
val xy = values.asInstanceOf[util.ArrayList[Double]].asScala
new ExamplePoint(xy(0), xy(1))
}
}
override def userClass: Class[ExamplePoint] = classOf[ExamplePoint]
}
The usefull stackstrace is this:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
java.lang.ClassCastException: com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:316)
at org.apache.spark.sql.SQLContext$implicits$.rddToDataFrameHolder(SQLContext.scala:254)
It seems that the UDT needs to be used inside of another class to work (as the type of a field). One solution to use it directly is to wrap it into a Tuple1:
test("udt serialisation") {
val points = Seq(new Tuple1(new ExamplePoint(1.3, 1.6)), new Tuple1(new ExamplePoint(1.3, 1.8)))
val df = SparkContextForStdout.context.parallelize(points).toDF()
df.collect().foreach(println(_))
}

Resources