object DataService {
val categories = listOf(
Category("JACKET", "jacket_collection"),
Category("SHIRT", "shirt_collection"),
Category("PANT", "pant_collection"),
Category("SHOE", "shoe_collection")
)
How can I extract string resource for "JACKET", "SHIRT", "PANT" and "SHOE"?
You can find those value like that::
val stringresource = categories.map {it.(your_var_name in Category Class)}
Try this,
val list = listOf( Category("JACKET", "jacket_collection"),
Category("SHIRT", "shirt_collection"),
Category("PANT", "pant_collection"),
Category("SHOE", "shoe_collection"))
val resource = list.map{it.type} // [JACKET, SHIRT, PANT, SHOE]
Your Category class:
data class Category( var type:String, var desc:String)
Related
Snapshot of my firebase realtime database
I want to extract the entire data under the "Orders" node, please tell me how should I model my data class for android in Kotlin?
I tried with this type of modeling,
After getting the reference of (Orders/uid/)
Order.kt
data class Order(
val items:ArrayList<Myitems>=ArrayList(),
val timeStamp:Long=0,
val totalCost:Int=0
)
MyItems.kt
data class MyItems(
val Item:ArrayList<Menu>=ArrayList()
)
Menu.kt
data class Menu(
val menCategory:String="",
val menName:String="",
val menImage:String="",
val menId:String="",
val menQuantity:Int=0,
val menCost:Int=0
)
After a lot of thinking and research online. I was finally able to model my classes and call add value event listener to it. Here it goes:
Order.kt
data class Order(
val items: ArrayList<HashMap<String, Any>> = ArrayList(),
val timeStamp: Long = 0,
val totalCost: Int = 0
)
OItem.kt
data class OItem(
val menCategory: String = "",
val menId: String = "",
val menImage: String = "",
val menName: String = "",
val menPrice: Int = 0,
var menQuantity: Int = 0
)
MainActivity.kt
val uid = FirebaseAuth.getInstance().uid
val ref = FirebaseDatabase.getInstance().getReference("Orders/$uid")
ref.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onCancelled(error: DatabaseError) {
//
}
override fun onDataChange(p0: DataSnapshot) {
p0.children.forEach {
val order = it.getValue(Order::class.java)
ordList.add(order!!)
}
Log.d("hf", ordList.toString())
}
})
How to search for data in RecyclerView?
I have model like this and I get data with retrofit. I have to make RecyclerView with model data like this and how to make Search from RecyclerView?
data class ListArticle(
#SerializedName("status") val status : String,
#SerializedName("totalResults") val totalResults : Int,
#SerializedName("articles") val articles : List<Articles>
)
data class Source (
#SerializedName("id") val id : String,
#SerializedName("name") val name : String
)
data class Articles (
#SerializedName("source") val source : Source,
#SerializedName("author") val author : String,
#SerializedName("title") val title : String,
#SerializedName("description") val description : String,
#SerializedName("url") val url : String,
#SerializedName("urlToImage") val urlToImage : String,
#SerializedName("publishedAt") val publishedAt : String,
#SerializedName("content") val content : String
)
Have to solve this question and I'm sorry for not completed question
just added this to your adapter
fun getFilter(): Filter? {
return articleFilter
}
private val articleFilter: Filter = object : Filter() {
override fun performFiltering(constraint: CharSequence?): FilterResults? {
val filteredList: MutableList<Articles> = ArrayList()
if (constraint == null || constraint.length == 0) {
filteredList.addAll(articles)
} else {
val filterPattern =
constraint.toString().toLowerCase().trim { it <= ' ' }
for (item in articles) {
if (item.title.toLowerCase().contains(filterPattern)) {
filteredList.add(item)
}
}
}
val results = FilterResults()
results.values = filteredList
return results
}
override fun publishResults(
constraint: CharSequence?,
results: FilterResults
) {
searchArticles = results.values as List<Articles>
notifyDataSetChanged()
}
}
fun setData(article: List<Articles>){
searchArticles = article
notifyDataSetChanged()
}
I'm trying to access a class value by using a variable previously defined in dart, but I keep getting the error the operator [] isn't defined for the class
In Javascript I would access an object value using a variable like this:
let movie = {
movieTitle : 'Toy Story',
actor: 'Tom Hanks'
}
let actorName = 'actor';
console.log(movie[actorName]); // <- what I'm trying to replicate in dart
// expected output: Tom Hanks
Here is what I've tried and is throwing that error
class Movie {
String name;
String actor;
String producer;
}
void main() {
var movieTitle = new Movie();
movieTitle.name = 'Toy Story';
movieTitle.actor = 'Tom Hanks';
print(movieTitle.actor); <- prints out Tom Hanks as expected
var actorName = 'actor';
print(movieTitle[actorName]); <- throws error
}
I expect to be able to use a variable on the fly to access the value.
A trivial use case for me would be if I had a a list of Movie classes, where some actors and producers are null, I would like to filter on either non null actors or producer with a function like so:
List values = movieList.where((i) => i.actor != "null").toList(); // returns all Movies in movieList where the actor value isn't the string "null"
var actorIsNull = 'actor';
List values = movieList.where((i) => i[actorisNull] != "null").toList(); // throws error
You can createn a toMap() function in your Movie class and access properties using [] operator
class Movie {
String name;
String actor;
String producer;
Map<String, dynamic> toMap() {
return {
'name': name,
'actor' : actor,
'producer' : producer,
};
}
}
Now Movie class properties can be accessed as:
Movie movie = Movie();
movie.toMap()['name'];
You cannot access class members by a string containing their name. (Except with mirrors - outside the scope of this answer.)
You could remove the class altogether and just use a Map<String, String>.
Map<String, String> movie = {
'movieTitle': 'Toy Story',
'actor': 'Tom Hanks',
}
You could add some bool methods on the class.
bool hasNoActor() => actor == null;
...
List values = movieList.where((m) => !m.hasNoActor()).toList();
Or, you could pass a lambda to your mapper.
Movie movieTitle = Movie()
..name = 'Toy Story'
..actor = 'Tom Hanks';
Function hasActor = (Movie m) => m.actor != null;
List values = movieList.where(hasActor).toList();
I have a dataframe let's say:
val someDF = Seq(
(8, "bat"),
(64, "mouse"),
(-27, "horse")
).toDF("number", "word")
I want to send that dataframe to a kafka topic using avro serialization and using schema registry. I believe I'm almost there, but I can't seem to get past the Task not serializable error. I understand there is a sink for kafka, but it doesn't communicate with the schema registry which is a requirement.
object Holder extends Serializable{
def prop(): java.util.Properties = {
val props = new Properties()
props.put("schema.registry.url", schemaRegistryURL)
props.put("key.serializer", classOf[KafkaAvroSerializer].getCanonicalName)
props.put("value.serializer", classOf[KafkaAvroSerializer].getCanonicalName)
props.put("schema.registry.url", schemaRegistryURL)
props.put("bootstrap.servers", brokers)
props
}
def vProps(props: java.util.Properties): kafka.utils.VerifiableProperties = {
val vProps = new kafka.utils.VerifiableProperties(props)
vProps
}
def messageSchema(vProps: kafka.utils.VerifiableProperties): org.apache.avro.Schema = {
val ser = new KafkaAvroEncoder(vProps)
val avro_schema = new RestService(schemaRegistryURL).getLatestVersion(subjectValueName)
val messageSchema = new Schema.Parser().parse(avro_schema.getSchema)
messageSchema
}
def avroRecord(messageSchema: org.apache.avro.Schema): org.apache.avro.generic.GenericData.Record = {
val avroRecord = new GenericData.Record(messageSchema)
avroRecord
}
def ProducerRecord(avroRecord:org.apache.avro.generic.GenericData.Record): org.apache.kafka.clients.producer.ProducerRecord[org.apache.avro.generic.GenericRecord,org.apache.avro.generic.GenericRecord] = {
val record = new ProducerRecord[GenericRecord, GenericRecord](topicWrite, avroRecord)
record
}
def producer(props: java.util.Properties): KafkaProducer[GenericRecord, GenericRecord] = {
val producer = new KafkaProducer[GenericRecord, GenericRecord](props)
producer
}
}
val prod: (String, String) => String = (
number: String,
word: String,
) => {
val prop = Holder.prop()
val vProps = Holder.vProps(prop)
val mSchema = Holder.messageSchema(vProps)
val aRecord = Holder.avroRecord(mSchema)
aRecord.put("number", number)
aRecord.put("word", word)
val record = Holder.ProducerRecord(aRecord)
val producer = Holder.producer(prop)
producer.send(record)
"sent"
}
val prodUDF: org.apache.spark.sql.expressions.UserDefinedFunction =
udf((
Number: String,
word: String,
) => prod(number,word))
val testDF = firstDF.withColumn("sent", prodUDF(col("number"), col("word")))
KafkaProducer is not serializable.
Create the KafkaProducer inside prod() instead of creating it outside.
I'm trying to create a user defined type in spark sql, but I receive:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType even when using their example. Has anyone made this work?
My code:
test("udt serialisation") {
val points = Seq(new ExamplePoint(1.3, 1.6), new ExamplePoint(1.3, 1.8))
val df = SparkContextForStdout.context.parallelize(points).toDF()
}
#SQLUserDefinedType(udt = classOf[ExamplePointUDT])
case class ExamplePoint(val x: Double, val y: Double)
/**
* User-defined type for [[ExamplePoint]].
*/
class ExamplePointUDT extends UserDefinedType[ExamplePoint] {
override def sqlType: DataType = ArrayType(DoubleType, false)
override def pyUDT: String = "pyspark.sql.tests.ExamplePointUDT"
override def serialize(obj: Any): Seq[Double] = {
obj match {
case p: ExamplePoint =>
Seq(p.x, p.y)
}
}
override def deserialize(datum: Any): ExamplePoint = {
datum match {
case values: Seq[_] =>
val xy = values.asInstanceOf[Seq[Double]]
assert(xy.length == 2)
new ExamplePoint(xy(0), xy(1))
case values: util.ArrayList[_] =>
val xy = values.asInstanceOf[util.ArrayList[Double]].asScala
new ExamplePoint(xy(0), xy(1))
}
}
override def userClass: Class[ExamplePoint] = classOf[ExamplePoint]
}
The usefull stackstrace is this:
com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
java.lang.ClassCastException: com.ubs.ged.risk.stdout.spark.ExamplePointUDT cannot be cast to org.apache.spark.sql.types.StructType
at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:316)
at org.apache.spark.sql.SQLContext$implicits$.rddToDataFrameHolder(SQLContext.scala:254)
It seems that the UDT needs to be used inside of another class to work (as the type of a field). One solution to use it directly is to wrap it into a Tuple1:
test("udt serialisation") {
val points = Seq(new Tuple1(new ExamplePoint(1.3, 1.6)), new Tuple1(new ExamplePoint(1.3, 1.8)))
val df = SparkContextForStdout.context.parallelize(points).toDF()
df.collect().foreach(println(_))
}