org.apache.kafka.common.serialization.Serdes Scala Examples

The following examples show how to use org.apache.kafka.common.serialization.Serdes. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ExampleKafkaStreamsSpec.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.streams

import net.manub.embeddedkafka.Codecs._
import net.manub.embeddedkafka.ConsumerExtensions._
import net.manub.embeddedkafka.EmbeddedKafkaConfig
import org.apache.kafka.common.serialization.{Serde, Serdes}
import org.apache.kafka.streams.StreamsBuilder
import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced}
import org.scalatest.{Matchers, WordSpec}

class ExampleKafkaStreamsSpec
    extends WordSpec
    with Matchers
    with EmbeddedKafkaStreamsAllInOne {

  import net.manub.embeddedkafka.Codecs.stringKeyValueCrDecoder

  implicit val config =
    EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001)

  val (inTopic, outTopic) = ("in", "out")

  val stringSerde: Serde[String] = Serdes.String()

  "A Kafka streams test" should {
    "be easy to run with streams and consumer lifecycle management" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")
        publishToKafka(inTopic, "baz", "yaz")
        withConsumer[String, String, Unit] { consumer =>
          val consumedMessages: Stream[(String, String)] =
            consumer.consumeLazily(outTopic)
          consumedMessages.take(2) should be(
            Seq("hello" -> "world", "foo" -> "bar"))
          consumedMessages.drop(2).head should be("baz" -> "yaz")
        }
      }
    }

    "allow support creating custom consumers" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")
        val consumer = newConsumer[String, String]()
        consumer.consumeLazily[(String, String)](outTopic).take(2) should be(
          Seq("hello" -> "world", "foo" -> "bar"))
        consumer.close()
      }
    }

    "allow for easy string based testing" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreamsWithStringConsumer(Seq(inTopic, outTopic),
                                   streamBuilder.build()) { consumer =>
        publishToKafka(inTopic, "hello", "world")
        consumer.consumeLazily[(String, String)](outTopic).head should be(
          "hello" -> "world")
      }
    }
  }
} 
Example 2
Source File: CirceSerdes.scala    From kafka-streams-circe   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kafka.streams.circe

import java.nio.charset.StandardCharsets
import java.util

import io.circe.parser._
import io.circe.{Decoder, Encoder}
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serdes, Serializer}

object CirceSerdes {

  implicit def serializer[T: Encoder]: Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def serialize(topic: String, caseClass: T): Array[Byte] =
        Encoder[T].apply(caseClass).noSpaces.getBytes(StandardCharsets.UTF_8)
      override def close(): Unit = ()
    }

  implicit def deserializer[T: Decoder]: Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def deserialize(topic: String, data: Array[Byte]): T =
        Option(data).fold(null.asInstanceOf[T]) { data =>
          decode[T](new String(data, StandardCharsets.UTF_8))
            .fold(error => throw new SerializationException(error), identity)
        }
      override def close(): Unit = ()
    }

  implicit def serde[CC: Encoder: Decoder]: Serde[CC] = Serdes.serdeFrom(serializer, deserializer)
} 
Example 3
Source File: AggregationExampleWithSAM.scala    From kafka-streams-scala-examples   with Apache License 2.0 5 votes vote down vote up
package com.knoldus.kafka.examples

import java.util.Properties

import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.kstream._
import org.apache.kafka.streams.{KafkaStreams, StreamsConfig}

import scala.collection.JavaConverters._


object AggregationExampleWithSAM {
  def main(args: Array[String]): Unit = {
    val config = {
      val properties = new Properties()
      properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-application")
      properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
      properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass)
      properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass)
      properties
    }

    val stringSerde = Serdes.String()
    val longSerde = Serdes.Long()

    val builder = new KStreamBuilder()
    val originalStream = builder.stream("SourceTopic")

    //Works only with Scala 2.12.x
    val mappedStream: KTable[String, java.lang.Long] =
      originalStream.flatMapValues((value: String) =>
        value.toLowerCase.split("\\W+").toIterable.asJava)
        .groupBy((_, word) => word)
        .count("Counts")
    mappedStream.to(stringSerde, longSerde, "SinkTopic")

    val streams = new KafkaStreams(builder, config)
    streams.start()
  }
} 
Example 4
Source File: AggregationExample.scala    From kafka-streams-scala-examples   with Apache License 2.0 5 votes vote down vote up
package com.knoldus.kafka.examples

import java.util.Properties

import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.kstream.{KStreamBuilder, KeyValueMapper, ValueMapper}
import org.apache.kafka.streams.{KafkaStreams, StreamsConfig}

import scala.collection.JavaConverters._


object AggregationExample {
  def main(args: Array[String]): Unit = {
    val config = {
      val properties = new Properties()
      properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-application")
      properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
      properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass)
      properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass)
      properties
    }

    val stringSerde = Serdes.String()
    val longSerde = Serdes.Long()

    val builder = new KStreamBuilder()
    val originalStream = builder.stream("SourceTopic")

    val mappedStream =
      originalStream.flatMapValues[String] {
        new ValueMapper[String, java.lang.Iterable[java.lang.String]]() {
          override def apply(value: String): java.lang.Iterable[java.lang.String] = {
            value.toLowerCase.split("\\W+").toIterable.asJava
          }
        }
      }.groupBy {
        new KeyValueMapper[String, String, String]() {
          override def apply(key: String, word: String): String = word
        }
      }.count("Counts")
    mappedStream.to(stringSerde, longSerde, "SinkTopic")

    val streams = new KafkaStreams(builder, config)
    streams.start()
  }
} 
Example 5
Source File: ExampleKafkaStreamsSpec.scala    From embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.streams

import net.manub.embeddedkafka.Codecs._
import net.manub.embeddedkafka.ConsumerExtensions._
import net.manub.embeddedkafka.EmbeddedKafkaConfig
import net.manub.embeddedkafka.streams.EmbeddedKafkaStreams._
import org.apache.kafka.common.serialization.{Serde, Serdes}
import org.apache.kafka.streams.StreamsBuilder
import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced}
import org.scalatest.Assertion
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ExampleKafkaStreamsSpec extends AnyWordSpec with Matchers {
  implicit val config: EmbeddedKafkaConfig =
    EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001)

  val (inTopic, outTopic) = ("in", "out")

  val stringSerde: Serde[String] = Serdes.String()

  "A Kafka streams test" should {
    "be easy to run with streams and consumer lifecycle management" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")
        publishToKafka(inTopic, "baz", "yaz")
        withConsumer[String, String, Assertion] { consumer =>
          val consumedMessages =
            consumer.consumeLazily[(String, String)](outTopic)
          consumedMessages.take(2).toList should be(
            Seq("hello" -> "world", "foo" -> "bar")
          )
          val h :: _ = consumedMessages.drop(2).toList
          h should be("baz" -> "yaz")
        }
      }
    }

    "allow support creating custom consumers" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build()) {
        publishToKafka(inTopic, "hello", "world")
        publishToKafka(inTopic, "foo", "bar")

        withConsumer[String, String, Assertion] { consumer =>
          consumer.consumeLazily[(String, String)](outTopic).take(2) should be(
            Seq("hello" -> "world", "foo" -> "bar")
          )
        }
      }
    }

    "allow for easy string based testing" in {
      val streamBuilder = new StreamsBuilder
      val stream: KStream[String, String] =
        streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde))

      stream.to(outTopic, Produced.`with`(stringSerde, stringSerde))

      runStreams(Seq(inTopic, outTopic), streamBuilder.build())(
        withConsumer[String, String, Assertion]({ consumer =>
          publishToKafka(inTopic, "hello", "world")
          val h :: _ = consumer.consumeLazily[(String, String)](outTopic).toList
          h should be("hello" -> "world")
        })
      )(config)

    }
  }
} 
Example 6
Source File: ModelStateStore.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.kafkastreams.store.store.custom

import com.lightbend.java.configuration.kafka.ApplicationKafkaParameters
import com.lightbend.scala.modelServer.model._
import com.lightbend.scala.kafkastreams.store.StoreState
import com.lightbend.scala.kafkastreams.store.store.ModelStateSerde
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.processor.{ProcessorContext, StateRestoreCallback, StateStore}
import org.apache.kafka.streams.state.internals.StateStoreProvider
import org.apache.kafka.streams.state.{QueryableStoreType, StateSerdes}


class ModelStateStore(name: String, loggingEnabled: Boolean) extends StateStore with ReadableModelStateStore {

  import ApplicationKafkaParameters._

  var state = new StoreState
  val changelogKey = STORE_ID
  var changeLogger: ModelStateStoreChangeLogger[Integer,StoreState] = null
  var open = false

  override def name: String = name

  override def init(context: ProcessorContext, root: StateStore): Unit = {
    val serdes = new StateSerdes[Integer, StoreState](name, Serdes.Integer, new ModelStateSerde)
    changeLogger = new ModelStateStoreChangeLogger[Integer,StoreState](name, context, serdes)
    if (root != null && loggingEnabled)
      context.register(root, loggingEnabled, new StateRestoreCallback() {
      override def restore(key: Array[Byte], value: Array[Byte]): Unit = {
        if (value == null) state.zero()
        else state = serdes.valueFrom(value)
      }
    })
    open = true
  }

  override def flush(): Unit = {
    if (loggingEnabled) changeLogger.logChange(changelogKey, state)
  }

  override def close(): Unit = {
    open = false
  }

  override def persistent : Boolean = false

  override def isOpen: Boolean = open

  def getCurrentModel: Model = state.currentModel.getOrElse(null)

  def setCurrentModel(currentModel: Model): Unit = {
    state.currentModel = Some(currentModel)
  }

  def getNewModel: Model = state.newModel.getOrElse(null)

  def setNewModel(newModel: Model): Unit = {
    state.newModel = Some(newModel)
  }

  override def getCurrentServingInfo: ModelToServeStats = state.currentState.getOrElse(ModelToServeStats.empty)

  def setCurrentServingInfo(currentServingInfo: ModelToServeStats): Unit = {
    state.currentState = Some(currentServingInfo)
  }

  def getNewServingInfo: ModelToServeStats = state.newState.getOrElse(ModelToServeStats.empty)

  def setNewServingInfo(newServingInfo: ModelToServeStats) : Unit = {
    state.newState = Some(newServingInfo)
  }
}

class ModelStateStoreType extends QueryableStoreType[ReadableModelStateStore] {

  override def accepts(stateStore: StateStore): Boolean = {
    return stateStore.isInstanceOf[ModelStateStore]
  }

  override def create(provider: StateStoreProvider, storeName: String): ReadableModelStateStore = {
    return provider.stores(storeName, this).get(0)
  }
} 
Example 7
Source File: AvroSerdes.scala    From embedded-kafka-schema-registry   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.schemaregistry.avro

import io.confluent.kafka.serializers.{
  AbstractKafkaSchemaSerDeConfig,
  KafkaAvroDeserializerConfig,
  KafkaAvroDeserializer => ConfluentKafkaAvroDeserializer,
  KafkaAvroSerializer => ConfluentKafkaAvroSerializer
}
import net.manub.embeddedkafka.schemaregistry.EmbeddedKafkaConfig
import org.apache.avro.generic.GenericRecord
import org.apache.avro.specific.SpecificRecord
import org.apache.kafka.common.serialization.{Serde, Serdes}

import scala.jdk.CollectionConverters._

@deprecated(
  "Avro-related classes will be removed soon",
  since = "5.5.0"
)
object AvroSerdes {

  protected def configForSchemaRegistry(
      implicit config: EmbeddedKafkaConfig
  ): Map[String, Object] =
    Map(
      AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> s"http://localhost:${config.schemaRegistryPort}"
    )

  protected def specificAvroReaderConfigForSchemaRegistry(
      implicit config: EmbeddedKafkaConfig
  ): Map[String, Object] =
    configForSchemaRegistry ++ Map(
      KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG -> true.toString
    )

  def specific[T <: SpecificRecord](
      isKey: Boolean = false,
      extraConfig: Map[String, Object] = Map.empty
  )(
      implicit config: EmbeddedKafkaConfig
  ): Serde[T] =
    serdeFrom[T](
      configForSchemaRegistry ++ extraConfig,
      specificAvroReaderConfigForSchemaRegistry ++ extraConfig, //need this to support SpecificRecord
      isKey
    )

  def generic(
      isKey: Boolean = false,
      extraConfig: Map[String, Object] = Map.empty
  )(
      implicit config: EmbeddedKafkaConfig
  ): Serde[GenericRecord] =
    serdeFrom[GenericRecord](
      configForSchemaRegistry ++ extraConfig,
      configForSchemaRegistry ++ extraConfig,
      isKey
    )

  private def serdeFrom[T](
      serConfig: Map[String, Object],
      deserConfig: Map[String, Object],
      isKey: Boolean
  ): Serde[T] = {
    val ser = new ConfluentKafkaAvroSerializer
    ser.configure(serConfig.asJava, isKey)
    val deser = new ConfluentKafkaAvroDeserializer
    deser.configure(deserConfig.asJava, isKey)

    Serdes.serdeFrom(ser, deser).asInstanceOf[Serde[T]]
  }
}