org.apache.kafka.common.serialization.Deserializer Scala Examples

The following examples show how to use org.apache.kafka.common.serialization.Deserializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CirceSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdecirce

import java.nio.charset.StandardCharsets.UTF_8
import java.util

import io.circe.{ Decoder, Encoder, Printer }
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.util.control.NonFatal

trait CirceSupport {
  implicit def toSerializer[T >: Null](implicit encoder: Encoder[T],
                                       printer: Printer = Printer.noSpaces): Serializer[T] =
    new Serializer[T] {
      import io.circe.syntax._
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try printer.pretty(data.asJson).getBytes(UTF_8)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null](implicit decoder: Decoder[T]): Deserializer[T] =
    new Deserializer[T] {
      import io.circe._
      import cats.syntax.either._

      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          parser
            .parse(new String(data, UTF_8))
            .valueOr(e => throw new SerializationException(e))
            .as[T]
            .valueOr(e => throw new SerializationException(e))
    }

  implicit def toSerde[T >: Null](implicit encoder: Encoder[T],
                                  printer: Printer = Printer.noSpaces,
                                  decoder: Decoder[T]): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object CirceSupport extends CirceSupport 
Example 2
Source File: ConfigureSerializationSpec.scala    From scala-kafka-client   with MIT License 5 votes vote down vote up
package cakesolutions.kafka

import java.util
import com.typesafe.config.ConfigFactory
import org.apache.kafka.common.serialization.{Deserializer, Serializer}

class ConfigureSerializationSpec extends KafkaIntSpec{

  private class MockDeserializer() extends Deserializer[String] {
    var configuration: String = _
    var isKeyDeserializer: Boolean = _

    override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
      configuration = configs.get("mock.config").toString
      isKeyDeserializer = isKey
    }

    override def close(): Unit = { }

    override def deserialize(topic: String, data: Array[Byte]): String = new String(data)
  }

  private class MockSerializer() extends Serializer[String] {
    var configuration: String = _
    var isKeySerializer: Boolean = _

    override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
      configuration = configs.get("mock.config").toString
      isKeySerializer = isKey
    }

    override def serialize(topic: String, data: String): Array[Byte] = data.getBytes

    override def close(): Unit = { }
  }

  "Producer" should "configure the serializers" in {
    val keySerializer = new MockSerializer
    val valueSerializer = new MockSerializer

    val conf = KafkaProducer.Conf(
      ConfigFactory.parseString(
        s"""
           | bootstrap.servers = "localhost:$kafkaPort",
           | mock.config = "mock_value"
         """.stripMargin
      ), keySerializer, valueSerializer)

    val producer = KafkaProducer(conf)
    producer.close

    keySerializer.configuration shouldEqual "mock_value"
    keySerializer.isKeySerializer shouldEqual true
    valueSerializer.configuration shouldEqual "mock_value"
    valueSerializer.isKeySerializer shouldEqual false
  }

  "Consumer" should "configure the deserializers" in {
    val keyDeserializer = new MockDeserializer
    val valueDeserializer = new MockDeserializer

    val conf = KafkaConsumer.Conf(
      ConfigFactory.parseString(
        s"""
           | bootstrap.servers = "localhost:$kafkaPort",
           | mock.config = "mock_value"
         """.stripMargin
      ), keyDeserializer, valueDeserializer)

    val consumer = KafkaConsumer(conf)
    consumer.close

    keyDeserializer.configuration shouldEqual "mock_value"
    keyDeserializer.isKeyDeserializer shouldEqual true
    valueDeserializer.configuration shouldEqual "mock_value"
    valueDeserializer.isKeyDeserializer shouldEqual false
  }
} 
Example 3
Source File: AvroSerde.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.serde

import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
import java.time.Instant
import java.util
import java.util.UUID

import com.sksamuel.avro4s._
import org.amitayh.invoices.common.domain._
import org.amitayh.invoices.common.serde.UuidConverters.{fromByteBuffer, toByteBuffer}
import org.apache.avro.Schema
import org.apache.avro.Schema.Field
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}

object AvroSerde {
  implicit val instantToSchema: ToSchema[Instant] = new ToSchema[Instant] {
    override val schema: Schema = Schema.create(Schema.Type.STRING)
  }

  implicit val instantToValue: ToValue[Instant] = new ToValue[Instant] {
    override def apply(value: Instant): String = value.toString
  }

  implicit val instantFromValue: FromValue[Instant] = new FromValue[Instant] {
    override def apply(value: Any, field: Field): Instant =
      Instant.parse(value.toString)
  }

  implicit val uuidToSchema: ToSchema[UUID] = new ToSchema[UUID] {
    override val schema: Schema = Schema.create(Schema.Type.BYTES)
  }

  implicit val uuidToValue: ToValue[UUID] = new ToValue[UUID] {
    override def apply(value: UUID): ByteBuffer = toByteBuffer(value)
  }

  implicit val uuidFromValue: FromValue[UUID] = new FromValue[UUID] {
    override def apply(value: Any, field: Field): UUID =
      fromByteBuffer(value.asInstanceOf[ByteBuffer])
  }

  val CommandSerde: Serde[Command] = serdeFor[Command]

  val CommandResultSerde: Serde[CommandResult] = serdeFor[CommandResult]

  val SnapshotSerde: Serde[InvoiceSnapshot] = serdeFor[InvoiceSnapshot]

  val EventSerde: Serde[Event] = serdeFor[Event]

  def toBytes[T: SchemaFor: ToRecord](data: T): Array[Byte] = {
    val baos = new ByteArrayOutputStream
    val output = AvroOutputStream.binary[T](baos)
    output.write(data)
    output.close()
    baos.toByteArray
  }

  def fromBytes[T: SchemaFor: FromRecord](data: Array[Byte]): T = {
    val input = AvroInputStream.binary[T](data)
    input.iterator.next()
  }

  private def serdeFor[T: SchemaFor: ToRecord: FromRecord]: Serde[T] = new Serde[T] {
    override val serializer: Serializer[T] = new Serializer[T] {
      override def serialize(topic: String, data: T): Array[Byte] = toBytes(data)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def close(): Unit = ()
    }
    override val deserializer: Deserializer[T] = new Deserializer[T] {
      override def deserialize(topic: String, data: Array[Byte]): T = fromBytes(data)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def close(): Unit = ()
    }
    override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
    override def close(): Unit = ()
  }
} 
Example 4
Source File: UuidSerde.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common.serde

import java.util
import java.util.UUID

import org.amitayh.invoices.common.serde.UuidConverters.{fromBytes, toBytes}
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}

object UuidSerializer extends Serializer[UUID] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def serialize(topic: String, uuid: UUID): Array[Byte] = toBytes(uuid)
  override def close(): Unit = ()
}

object UuidDeserializer extends Deserializer[UUID] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def deserialize(topic: String, data: Array[Byte]): UUID = fromBytes(data)
  override def close(): Unit = ()
}

object UuidSerde extends Serde[UUID] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override val serializer: Serializer[UUID] = UuidSerializer
  override val deserializer: Deserializer[UUID] = UuidDeserializer
  override def close(): Unit = ()
} 
Example 5
Source File: Config.scala    From event-sourcing-kafka-streams   with MIT License 5 votes vote down vote up
package org.amitayh.invoices.common

import org.amitayh.invoices.common.serde.{AvroSerde, UuidSerde}
import org.apache.kafka.clients.admin.NewTopic
import org.apache.kafka.common.config.TopicConfig
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}

import scala.collection.JavaConverters._
import scala.concurrent.duration._

object Config {
  val BootstrapServers = sys.env("BOOTSTRAP_SERVERS")

  object Stores {
    val Snapshots = "invoices.store.snapshots"
  }

  object Topics {
    sealed trait CleanupPolicy
    object CleanupPolicy {
      case object Compact extends CleanupPolicy
    }

    case class Topic[K, V](name: String,
                           keySerde: Serde[K],
                           valueSerde: Serde[V],
                           numPartitions: Int = 4,
                           replicationFactor: Short = 1,
                           retention: Option[Duration] = None,
                           cleanupPolicy: Option[CleanupPolicy] = None) {

      val keySerializer: Serializer[K] = keySerde.serializer

      val keyDeserializer: Deserializer[K] = keySerde.deserializer

      val valueSerializer: Serializer[V] = valueSerde.serializer

      val valueDeserializer: Deserializer[V] = valueSerde.deserializer

      def toNewTopic: NewTopic = {
        val emptyConfigs = Map.empty[String, String]
        val withRetention = retentionConfig.foldLeft(emptyConfigs)(_ + _)
        val withCleanupPolicy = cleanupPolicyConfig.foldLeft(withRetention)(_ + _)
        new NewTopic(name, numPartitions, replicationFactor)
          .configs(withCleanupPolicy.asJava)
      }

      private def retentionConfig: Option[(String, String)] = retention.map { retention =>
        val millis = if (retention.isFinite) retention.toMillis else -1
        TopicConfig.RETENTION_MS_CONFIG -> millis.toString
      }

      private def cleanupPolicyConfig: Option[(String, String)] = cleanupPolicy.map {
        case CleanupPolicy.Compact =>
          TopicConfig.CLEANUP_POLICY_CONFIG ->
            TopicConfig.CLEANUP_POLICY_COMPACT
      }

    }

    val Events = Topic(
      "invoices.topic.events",
      UuidSerde,
      AvroSerde.EventSerde,
      retention = Some(Duration.Inf))

    val Commands = Topic(
      "invoices.topic.commands",
      UuidSerde,
      AvroSerde.CommandSerde,
      retention = Some(5.minutes))

    val CommandResults = Topic(
      "invoices.topic.command-results",
      UuidSerde,
      AvroSerde.CommandResultSerde,
      retention = Some(5.minutes))

    val Snapshots = Topic(
      "invoices.topic.snapshots",
      UuidSerde,
      AvroSerde.SnapshotSerde,
      cleanupPolicy = Some(CleanupPolicy.Compact))

    val All = Set(Events, Commands, CommandResults, Snapshots)
  }
} 
Example 6
Source File: EmbeddedKafkaTest.scala    From apache-spark-test   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.spark.kafka

import com.github.dnvriend.TestSpec
import net.manub.embeddedkafka.EmbeddedKafka
import org.apache.kafka.common.serialization.{ Deserializer, Serializer }

class EmbeddedKafkaTest extends TestSpec with EmbeddedKafka {
  final val TopicName = "MyTopic"

  def publish[T: Serializer](msg: T): Unit = publishToKafka(TopicName, msg)
  def consume[T: Deserializer]: T = consumeFirstMessageFrom(TopicName)

  import net.manub.embeddedkafka.Codecs._
  it should "setup and embedded kafka, create a topic, send a message and receive a message from the same topic" in withRunningKafka {
    publish("foo")
    consume[String] shouldBe "foo"
    publish("bar".getBytes)
    consume[Array[Byte]] shouldBe "bar".getBytes()

  }
} 
Example 7
Source File: ModelStateSerde.scala    From kafka-with-akka-streams-kafka-streams-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.scala.kafkastreams.store.store

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import java.util

import com.lightbend.model.modeldescriptor.ModelDescriptor
import com.lightbend.scala.modelServer.model.PMML.PMMLModel
import com.lightbend.scala.modelServer.model.tensorflow.TensorFlowModel
import com.lightbend.scala.modelServer.model.{ModelToServeStats, ModelWithDescriptor}
import com.lightbend.scala.kafkastreams.store.StoreState
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}


class ModelStateSerde extends Serde[StoreState] {

  private val mserializer = new ModelStateSerializer()
  private val mdeserializer = new ModelStateDeserializer()

  override def deserializer() = mdeserializer

  override def serializer() = mserializer

  override def configure(configs: util.Map[String, _], isKey: Boolean) = {}

  override def close() = {}
}

object ModelStateDeserializer {
  val factories = Map(
    ModelDescriptor.ModelType.PMML.index -> PMMLModel,
    ModelDescriptor.ModelType.TENSORFLOW.index -> TensorFlowModel
  )
}

class ModelStateDeserializer extends Deserializer[StoreState] {

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}

  override def deserialize(topic: String, data: Array[Byte]): StoreState = {
    if(data != null) {
      val input = new DataInputStream(new ByteArrayInputStream(data))
      new StoreState(ModelWithDescriptor.readModel(input), ModelWithDescriptor.readModel(input),
        ModelToServeStats.readServingInfo(input), ModelToServeStats.readServingInfo(input))
    }
    else new StoreState()
  }

  override def close(): Unit = {}

}

class ModelStateSerializer extends Serializer[StoreState] {

  private val bos = new ByteArrayOutputStream()

  override def serialize(topic: String, state: StoreState): Array[Byte] = {
    bos.reset()
    val output = new DataOutputStream(bos)
    ModelWithDescriptor.writeModel(output, state.currentModel.orNull)
    ModelWithDescriptor.writeModel(output, state.newModel.orNull)
    ModelToServeStats.writeServingInfo(output, state.currentState.orNull)
    ModelToServeStats.writeServingInfo(output, state.newState.orNull)
    try {
      output.flush()
      output.close()
    } catch {
      case t: Throwable =>
    }
    bos.toByteArray
  }

  override def close(): Unit = {}

  override def configure(configs: util.Map[String, _], isKey: Boolean) = {}
} 
Example 8
Source File: JacksonJsonSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdejackson

import java.util

import com.fasterxml.jackson.databind.ObjectMapper
import Jackson.typeReference
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.reflect.runtime.universe._
import scala.util.control.NonFatal

trait JacksonJsonSupport {
  implicit def toSerializer[T <: AnyRef](implicit mapper: ObjectMapper): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try mapper.writeValueAsBytes(data)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null <: AnyRef](
      implicit mapper: ObjectMapper,
      tt: TypeTag[T]
  ): Deserializer[T] =
    new Deserializer[T] {
      private val tr                                                             = typeReference[T]
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          try mapper.readValue[T](data, tr)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toSerde[T >: Null <: AnyRef](
      implicit mapper: ObjectMapper,
      tt: TypeTag[T]
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object JacksonJsonSupport extends JacksonJsonSupport 
Example 9
Source File: JacksonFormatSchemaSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdejackson

import java.util

import scala.reflect.runtime.universe._

import com.fasterxml.jackson.core.FormatSchema
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }
import Jackson.typeReference

import scala.language.implicitConversions
import scala.reflect.ClassTag
import scala.util.control.NonFatal

trait JacksonFormatSchemaSupport {
  implicit def toSerializer[T <: AnyRef](implicit mapper: ObjectMapper,
                                         schema: FormatSchema): Serializer[T] =
    new Serializer[T] {
      private val writer                                                         = mapper.writer(schema)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try writer.writeValueAsBytes(data)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null <: AnyRef](
      implicit mapper: ObjectMapper,
      schema: FormatSchema,
      tt: TypeTag[T]
  ): Deserializer[T] =
    new Deserializer[T] {
      private val reader                                                         = mapper.readerFor(typeReference[T]).`with`(schema)
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          try reader.readValue[T](data)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toSerde[T >: Null <: AnyRef](
      implicit mapper: ObjectMapper,
      schema: FormatSchema,
      ct: TypeTag[T]
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object JacksonFormatSchemaSupport extends JacksonFormatSchemaSupport 
Example 10
Source File: PlayJsonSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdeplayjson

import java.nio.charset.StandardCharsets.UTF_8
import java.util

import io.github.azhur.kafkaserdeplayjson.PlayJsonSupport.PlayJsonError
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }
import play.api.libs.json.{ JsError, JsValue, Json, Reads, Writes }

import scala.language.implicitConversions
import scala.util.control.NonFatal

trait PlayJsonSupport {
  implicit def toSerializer[T <: AnyRef](
      implicit writes: Writes[T],
      printer: JsValue => String = Json.stringify
  ): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try printer(writes.writes(data)).getBytes(UTF_8)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null <: AnyRef: Manifest](
      implicit reads: Reads[T]
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          reads
            .reads(Json.parse(new String(data, UTF_8)))
            .recoverTotal { e =>
              throw new SerializationException(PlayJsonError(e))
            }
    }

  implicit def toSerde[T >: Null <: AnyRef: Manifest](
      implicit writes: Writes[T],
      reads: Reads[T],
      printer: JsValue => String = Json.stringify
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object PlayJsonSupport extends PlayJsonSupport {
  final case class PlayJsonError(error: JsError) extends RuntimeException {
    override def getMessage: String =
      JsError.toJson(error).toString()
  }
} 
Example 11
Source File: Avro4sBinarySupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdeavro4s

import java.io.ByteArrayOutputStream
import java.util

import com.sksamuel.avro4s.{
  AvroBinaryInputStream,
  AvroOutputStream,
  FromRecord,
  SchemaFor,
  ToRecord
}
import org.apache.avro.file.SeekableByteArrayInput
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.util.{ Failure, Success }
import scala.util.control.NonFatal

trait Avro4sBinarySupport {
  implicit def toSerializer[T >: Null](implicit schemaFor: SchemaFor[T],
                                       toRecord: ToRecord[T]): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else {
          val baos = new ByteArrayOutputStream()
          try {
            val output = AvroOutputStream.binary[T](baos)
            try {
              output.write(data)
            } finally {
              output.close()
            }
            baos.toByteArray
          } catch {
            case NonFatal(e) => throw new SerializationException(e)
          } finally {
            baos.close()
          }
        }
    }

  implicit def toDeserializer[T >: Null](
      implicit schemaFor: SchemaFor[T],
      fromRecord: FromRecord[T],
      schemas: WriterReaderSchemas = WriterReaderSchemas()
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else {
          val it = new AvroBinaryInputStream[T](new SeekableByteArrayInput(data),
                                                schemas.writerSchema,
                                                schemas.readerSchema).tryIterator
          if (it.hasNext) {
            it.next() match {
              case Success(record) => record
              case Failure(err)    => throw new SerializationException(err)
            }
          } else {
            throw new SerializationException("Empty avro4s binary iterator")
          }
        }

    }

  implicit def toSerde[T >: Null](
      implicit schemaFor: SchemaFor[T],
      toRecord: ToRecord[T],
      fromRecord: FromRecord[T],
      schemas: WriterReaderSchemas = WriterReaderSchemas()
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object Avro4sBinarySupport extends Avro4sBinarySupport 
Example 12
Source File: Avro4sDataSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdeavro4s

import java.io.ByteArrayOutputStream
import java.util

import com.sksamuel.avro4s.{
  AvroDataInputStream,
  AvroDataOutputStream,
  FromRecord,
  SchemaFor,
  ToRecord
}
import org.apache.avro.file.{ CodecFactory, SeekableByteArrayInput }
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.util.control.NonFatal
import scala.util.{ Failure, Success }

trait Avro4sDataSupport {
  implicit def toSerializer[T >: Null](
      implicit schemaFor: SchemaFor[T],
      toRecord: ToRecord[T],
      codec: CodecFactory = CodecFactory.nullCodec()
  ): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else {
          val baos = new ByteArrayOutputStream()
          try {
            val output = AvroDataOutputStream[T](baos, codec)
            try {
              output.write(data)
            } finally {
              output.close()
            }
            baos.toByteArray
          } catch {
            case NonFatal(e) => throw new SerializationException(e)
          } finally {
            baos.close()
          }
        }
    }

  implicit def toDeserializer[T >: Null](
      implicit schemaFor: SchemaFor[T],
      fromRecord: FromRecord[T],
      schemas: WriterReaderSchemas = WriterReaderSchemas()
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else {
          val it = new AvroDataInputStream[T](new SeekableByteArrayInput(data),
                                              schemas.writerSchema,
                                              schemas.readerSchema).tryIterator
          if (it.hasNext) {
            it.next() match {
              case Success(record) => record
              case Failure(err)    => throw new SerializationException(err)
            }
          } else {
            throw new SerializationException("Empty avro4s data iterator")
          }
        }

    }

  implicit def toSerde[T >: Null](implicit schemaFor: SchemaFor[T],
                                  toRecord: ToRecord[T],
                                  fromRecord: FromRecord[T],
                                  codec: CodecFactory = CodecFactory.nullCodec()): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object Avro4sDataSupport extends Avro4sDataSupport 
Example 13
Source File: Avro4sJsonSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdeavro4s

import java.io.ByteArrayOutputStream
import java.util

import com.sksamuel.avro4s.{
  AvroJsonInputStream,
  AvroOutputStream,
  FromRecord,
  SchemaFor,
  ToRecord
}
import org.apache.avro.file.SeekableByteArrayInput
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.util.control.NonFatal
import scala.util.{ Failure, Success }

trait Avro4sJsonSupport {
  implicit def toSerializer[T >: Null](implicit schemaFor: SchemaFor[T],
                                       toRecord: ToRecord[T]): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else {
          val baos = new ByteArrayOutputStream()
          try {
            val output = AvroOutputStream.json[T](baos)
            try {
              output.write(data)
            } finally {
              output.close()
            }
            baos.toByteArray
          } catch {
            case NonFatal(e) => throw new SerializationException(e)
          } finally {
            baos.close()
          }
        }
    }

  implicit def toDeserializer[T >: Null](
      implicit schemaFor: SchemaFor[T],
      fromRecord: FromRecord[T],
      schemas: WriterReaderSchemas = WriterReaderSchemas()
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          new AvroJsonInputStream[T](new SeekableByteArrayInput(data),
                                     schemas.writerSchema,
                                     schemas.readerSchema).singleEntity match {
            case Success(json)  => json
            case Failure(error) => throw new SerializationException(error)
          }
    }

  implicit def toSerde[T >: Null](
      implicit schemaFor: SchemaFor[T],
      toRecord: ToRecord[T],
      fromRecord: FromRecord[T],
      schemas: WriterReaderSchemas = WriterReaderSchemas()
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object Avro4sJsonSupport extends Avro4sJsonSupport 
Example 14
Source File: UpickleSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdeupickle

import java.nio.charset.StandardCharsets.UTF_8
import java.util

import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }
import upickle.default.{ Reader, Writer, read, write }

import scala.language.implicitConversions
import scala.util.control.NonFatal

trait UpickleSupport {
  implicit def toSerializer[T >: Null](implicit writer: Writer[T]): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try write(data).getBytes(UTF_8)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null](implicit reader: Reader[T]): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          try read(new String(data, UTF_8))
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toSerde[T >: Null](implicit reader: Reader[T], writer: Writer[T]): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object UpickleSupport extends UpickleSupport 
Example 15
Source File: JsoniterScalaSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdejsoniterscala

import java.util

import com.github.plokhotnyuk.jsoniter_scala.core._
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }

import scala.language.implicitConversions
import scala.util.control.NonFatal

trait JsoniterScalaSupport {
  implicit def toSerializer[T >: Null](
      implicit codec: JsonValueCodec[T],
      writerConfig: WriterConfig = WriterConfig()
  ): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try writeToArray(data, writerConfig)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null](
      implicit codec: JsonValueCodec[T],
      readerConfig: ReaderConfig = ReaderConfig()
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          try readFromArray(data, readerConfig)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toSerde[T >: Null](
      implicit codec: JsonValueCodec[T],
      writerConfig: WriterConfig = WriterConfig(),
      readerConfig: ReaderConfig = ReaderConfig()
  ): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object JsoniterScalaSupport extends JsoniterScalaSupport 
Example 16
Source File: Json4sSupport.scala    From kafka-serde-scala   with Apache License 2.0 5 votes vote down vote up
package io.github.azhur.kafkaserdejson4s

import java.nio.charset.StandardCharsets.UTF_8
import java.util

import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer }
import org.json4s.{ Formats, Serialization }

import scala.language.implicitConversions
import scala.util.control.NonFatal

trait Json4sSupport {
  implicit def toSerializer[T <: AnyRef](implicit serialization: Serialization,
                                         formats: Formats): Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serialize(topic: String, data: T): Array[Byte] =
        if (data == null) null
        else
          try serialization.write[T](data).getBytes(UTF_8)
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toDeserializer[T >: Null <: AnyRef: Manifest](
      implicit serialization: Serialization,
      formats: Formats
  ): Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def deserialize(topic: String, data: Array[Byte]): T =
        if (data == null) null
        else
          try serialization.read[T](new String(data, UTF_8))
          catch {
            case NonFatal(e) => throw new SerializationException(e)
          }
    }

  implicit def toSerde[T >: Null <: AnyRef: Manifest](implicit serialization: Serialization,
                                                      formats: Formats): Serde[T] =
    new Serde[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
      override def close(): Unit                                                 = {}
      override def serializer(): Serializer[T]                                   = toSerializer[T]
      override def deserializer(): Deserializer[T]                               = toDeserializer[T]
    }
}

object Json4sSupport extends Json4sSupport 
Example 17
Source File: avroMarshallers.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.avro

import java.io.ByteArrayOutputStream

import kafka.utils.VerifiableProperties
import org.apache.avro.Schema
import org.apache.avro.io._
import org.apache.avro.specific.{
  SpecificDatumReader,
  SpecificDatumWriter,
  SpecificRecord
}
import org.apache.kafka.common.serialization.{Deserializer, Serializer}

class KafkaAvroDeserializer[T <: SpecificRecord](schema: Schema)
    extends Deserializer[T]
    with NoOpConfiguration
    with NoOpClose {

  private val reader = new SpecificDatumReader[T](schema)

  override def deserialize(topic: String, data: Array[Byte]): T = {
    val decoder = DecoderFactory.get().binaryDecoder(data, null)
    reader.read(null.asInstanceOf[T], decoder)
  }
}

class KafkaAvroSerializer[T <: SpecificRecord]()
    extends Serializer[T]
    with NoOpConfiguration
    with NoOpClose {

  private def toBytes(nullableData: T): Array[Byte] =
    Option(nullableData).fold[Array[Byte]](null) { data =>
      val writer: DatumWriter[T] = new SpecificDatumWriter[T](data.getSchema)
      val out = new ByteArrayOutputStream()
      val encoder = EncoderFactory.get.binaryEncoder(out, null)

      writer.write(data, encoder)
      encoder.flush()
      out.close()

      out.toByteArray
    }

  override def serialize(topic: String, data: T): Array[Byte] =
    toBytes(data)
}

sealed trait NoOpConfiguration {
  def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = ()
}

sealed trait NoOpClose {
  def close(): Unit = ()
} 
Example 18
Source File: ConsumerBuilder.scala    From asura   with MIT License 5 votes vote down vote up
package asura.kafka

import akka.actor.ActorSystem
import akka.kafka.scaladsl.Consumer
import akka.kafka.scaladsl.Consumer.Control
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.scaladsl.Source
import io.confluent.kafka.serializers.{AbstractKafkaAvroSerDeConfig, KafkaAvroDeserializer, KafkaAvroDeserializerConfig}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.{Deserializer, StringDeserializer}

import scala.collection.JavaConverters._

object ConsumerBuilder {

  def buildAvroSource[V](
                          brokerUrl: String,
                          schemaRegisterUrl: String,
                          group: String,
                          topics: Set[String],
                          resetType: String = "latest",
                        )(implicit system: ActorSystem): Source[ConsumerRecord[String, V], Control] = {

    val kafkaAvroSerDeConfig = Map[String, Any](
      AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG -> schemaRegisterUrl,
      KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG -> true.toString
    )
    val consumerSettings: ConsumerSettings[String, V] = {
      val kafkaAvroDeserializer = new KafkaAvroDeserializer()
      kafkaAvroDeserializer.configure(kafkaAvroSerDeConfig.asJava, false)
      val deserializer = kafkaAvroDeserializer.asInstanceOf[Deserializer[V]]

      ConsumerSettings(system, new StringDeserializer, deserializer)
        .withBootstrapServers(brokerUrl)
        .withGroupId(group)
        .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, resetType)
    }
    Consumer.plainSource(consumerSettings, Subscriptions.topics(topics))
  }
} 
Example 19
Source File: GenericSerde.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package com.sksamuel.avro4s.kafka

import java.io.ByteArrayOutputStream

import com.sksamuel.avro4s.{AvroFormat, AvroInputStream, AvroOutputStream, AvroSchema, BinaryFormat, DataFormat, Decoder, Encoder, JsonFormat, SchemaFor}
import org.apache.avro.Schema
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}


class GenericSerde[T >: Null : SchemaFor : Encoder : Decoder](avroFormat: AvroFormat = BinaryFormat) extends Serde[T]
  with Deserializer[T]
  with Serializer[T]
  with Serializable {

  val schema: Schema = AvroSchema[T]

  override def serializer(): Serializer[T] = this

  override def deserializer(): Deserializer[T] = this

  override def deserialize(topic: String, data: Array[Byte]): T = {
    if (data == null) null else {

      val avroInputStream = avroFormat match {
        case BinaryFormat => AvroInputStream.binary[T]
        case JsonFormat => AvroInputStream.json[T]
        case DataFormat => AvroInputStream.data[T]
      }

      val input = avroInputStream.from(data).build(schema)
      val result = input.iterator.next()
      input.close()
      result
    }
  }

  override def close(): Unit = ()

  override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = ()

  override def serialize(topic: String, data: T): Array[Byte] = {
    val baos = new ByteArrayOutputStream()

    val avroOutputStream = avroFormat match {
      case BinaryFormat => AvroOutputStream.binary[T]
      case JsonFormat => AvroOutputStream.json[T]
      case DataFormat => AvroOutputStream.data[T]
    }

    val output = avroOutputStream.to(baos).build()
    output.write(data)
    output.close()
    baos.toByteArray
  }
} 
Example 20
Source File: KafkaConsumer.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.kafkadistributedprocessing.internal

import java.time.Duration
import java.util.Properties
import java.util.concurrent.Executors

import cats.effect.{ Async, ContextShift, Resource }
import cats.~>
import org.apache.kafka.clients.consumer.{ Consumer, ConsumerRebalanceListener, ConsumerRecords }
import org.apache.kafka.common.PartitionInfo
import org.apache.kafka.common.serialization.Deserializer

import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration

private[kafkadistributedprocessing] final class KafkaConsumer[F[_], K, V](
  withConsumer: (Consumer[K, V] => *) ~> F
) {

  def subscribe(topics: Set[String], listener: ConsumerRebalanceListener): F[Unit] =
    withConsumer(_.subscribe(topics.asJava, listener))

  def subscribe(topics: Set[String]): F[Unit] =
    withConsumer(_.subscribe(topics.asJava))

  val unsubscribe: F[Unit] =
    withConsumer(_.unsubscribe())

  def partitionsFor(topic: String): F[Set[PartitionInfo]] =
    withConsumer(_.partitionsFor(topic).asScala.toSet)

  def close: F[Unit] =
    withConsumer(_.close())

  def poll(timeout: FiniteDuration): F[ConsumerRecords[K, V]] =
    withConsumer(_.poll(Duration.ofNanos(timeout.toNanos)))
}

private[kafkadistributedprocessing] object KafkaConsumer {
  final class Create[F[_]] {
    def apply[K, V](
      config: Properties,
      keyDeserializer: Deserializer[K],
      valueDeserializer: Deserializer[V]
    )(implicit F: Async[F], contextShift: ContextShift[F]): Resource[F, KafkaConsumer[F, K, V]] = {
      val create = F.suspend {

        val executor = Executors.newSingleThreadExecutor()

        def eval[A](a: => A): F[A] =
          contextShift.evalOn(ExecutionContext.fromExecutor(executor)) {
            F.async[A] { cb =>
              executor.execute(new Runnable {
                override def run(): Unit =
                  cb {
                    try Right(a)
                    catch {
                      case e: Throwable => Left(e)
                    }
                  }
              })
            }
          }

        eval {
          val original = Thread.currentThread.getContextClassLoader
          Thread.currentThread.setContextClassLoader(null)
          val consumer = new org.apache.kafka.clients.consumer.KafkaConsumer[K, V](
            config,
            keyDeserializer,
            valueDeserializer
          )
          Thread.currentThread.setContextClassLoader(original)
          val withConsumer = new ((Consumer[K, V] => *) ~> F) {
            def apply[A](f: Consumer[K, V] => A): F[A] =
              eval(f(consumer))
          }
          new KafkaConsumer[F, K, V](withConsumer)
        }
      }
      Resource.make(create)(_.close)
    }
  }
  def create[F[_]]: Create[F] = new Create[F]
} 
Example 21
Source File: TestSerdes.scala    From haystack-traces   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trace.indexer.integration.serdes

import java.util

import com.expedia.open.tracing.Span
import com.expedia.open.tracing.buffer.SpanBuffer
import com.expedia.www.haystack.trace.commons.packer.Unpacker
import org.apache.kafka.common.serialization.{Deserializer, Serializer}

class SpanProtoSerializer extends Serializer[Span] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def serialize(topic: String, data: Span): Array[Byte] = {
    data.toByteArray
  }
  override def close(): Unit = ()
}

class SnappyCompressedSpanBufferProtoDeserializer extends Deserializer[SpanBuffer] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()

  override def deserialize(topic: String, data: Array[Byte]): SpanBuffer = {
    if(data == null) {
      null
    } else {
      Unpacker.readSpanBuffer(data)
    }
  }

  override def close(): Unit = ()
} 
Example 22
Source File: InternalKafkaAvroSerde.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.kafka

import io.amient.affinity.avro.record.AvroRecord
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}

import scala.reflect.runtime.universe._


class InternalKafkaAvroSerde[T: TypeTag] extends Serde[T] {
  val schema = AvroRecord.inferSchema[T]
    override def configure(configs: java.util.Map[String, _], isKey: Boolean) = ()
    override def close() = ()

    override def deserializer() = new Deserializer[T] {
      override def configure(configs: java.util.Map[String, _], isKey: Boolean) = ()
      override def close() = ()
      override def deserialize(topic: String, data: Array[Byte]) = AvroRecord.read(data, schema)
    }

    override def serializer() = new Serializer[T] {
      override def configure(configs: java.util.Map[String, _], isKey: Boolean) = ()
      override def close() = ()
      override def serialize(topic: String, data: T) = AvroRecord.write(data, schema)
    }

} 
Example 23
Source File: KafkaAvroDeserializer.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.kafka

import java.util

import com.typesafe.config.ConfigFactory
import io.amient.affinity.avro.record.AvroSerde
import org.apache.kafka.common.serialization.Deserializer

class KafkaAvroDeserializer extends Deserializer[Any] {

  var isKey: Boolean = false
  var serde: AvroSerde = null

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
    val config = ConfigFactory.parseMap(configs).getConfig("schema").atKey("schema").atPath(AvroSerde.AbsConf.Avro.path)
    this.serde = AvroSerde.create(config)
    this.isKey = isKey
  }

  override def close(): Unit = if (serde != null) serde.close()

  override def deserialize(topic: String, data: Array[Byte]): Any = {
    require(serde != null, "AvroSerde not configured")
    //val subject = s"$topic-${if (isKey) "key" else "value"}"
    serde.fromBytes(data)
  }
} 
Example 24
Source File: KafkaJsonSerializer.scala    From ticket-booking-aecor   with Apache License 2.0 5 votes vote down vote up
package ru.pavkin.payment.kafka
import java.nio.charset.StandardCharsets
import java.util

import io.circe.parser._
import io.circe.Encoder
import org.apache.kafka.common.serialization.{ Deserializer, Serializer, StringSerializer }
import ru.pavkin.payment.event.PaymentReceived

class PaymentReceivedEventSerializer extends Serializer[PaymentReceived] {
  private val stringSerializer = new StringSerializer

  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()

  def serialize(topic: String, data: PaymentReceived): Array[Byte] =
    stringSerializer.serialize(topic, Encoder[PaymentReceived].apply(data).noSpaces)

  def close(): Unit = ()
}

class PaymentReceivedEventDeserializer extends Deserializer[PaymentReceived] {
  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()

  def close(): Unit = ()

  def deserialize(topic: String, data: Array[Byte]): PaymentReceived =
    if (data ne null)
      decode[PaymentReceived](new String(data, StandardCharsets.UTF_8)).fold(throw _, identity)
    else null

} 
Example 25
Source File: SimpleKafkaConsumer.scala    From remora   with MIT License 5 votes vote down vote up
import java.util.Properties

import com.fasterxml.jackson.databind.KeyDeserializer
import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}
import org.apache.kafka.common.serialization.Deserializer
import net.manub.embeddedkafka.Codecs.stringDeserializer
import net.manub.embeddedkafka.ConsumerExtensions._

class SimpleKafkaConsumer[K,V](consumerProps : Properties,
                               topic : String,
                               keyDeserializer: Deserializer[K],
                               valueDeserializer: Deserializer[V],
                               function : ConsumerRecords[K, V] => Unit,
                               poll : Long = 2000) {

  private var running = false

  private val consumer = new KafkaConsumer[K, V](consumerProps, keyDeserializer, valueDeserializer)


  private val thread = new Thread {
    import scala.collection.JavaConverters._

    override def run: Unit = {
      consumer.subscribe(List(topic).asJava)
      consumer.partitionsFor(topic)

      while (running) {
        val record: ConsumerRecords[K, V] = consumer.poll(poll)
        function(record)
      }
    }
  }

  def start(): Unit = {
    if(!running) {
      running = true
      thread.start()
    }
  }

  def stop(): Unit = {
    if(running) {
      running = false
      thread.join()
      consumer.close()
    }
  }
} 
Example 26
Source File: WindowedMetricSerde.scala    From haystack-trends   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trends.kstream.serde

import java.util

import com.expedia.www.haystack.commons.entities.Interval
import com.expedia.www.haystack.commons.metrics.MetricsSupport
import com.expedia.www.haystack.trends.aggregation.metrics.{AggregationType, CountMetricFactory, HistogramMetricFactory, Metric}
import com.expedia.www.haystack.trends.aggregation.{TrendMetric, WindowedMetric}
import com.expedia.www.haystack.trends.aggregation.entities.TimeWindow
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer}
import org.msgpack.core.MessagePack
import org.msgpack.value.ValueFactory

import scala.collection.JavaConverters._
import scala.collection.mutable


object WindowedMetricSerde extends Serde[WindowedMetric] with MetricsSupport {

  private val SERIALIZED_METRIC_KEY = "serializedMetric"
  private val START_TIME_KEY = "startTime"
  private val END_TIME_KEY = "endTime"

  private val aggregationTypeKey = "aggregationType"
  private val metricsKey = "metrics"

  override def close(): Unit = ()

  override def deserializer(): Deserializer[WindowedMetric] = {
    new Deserializer[WindowedMetric] {
      override def configure(map: util.Map[String, _], b: Boolean): Unit = ()

      override def close(): Unit = ()

      
      override def serialize(topic: String, windowedMetric: WindowedMetric): Array[Byte] = {

        val packer = MessagePack.newDefaultBufferPacker()

        val serializedMetrics = windowedMetric.windowedMetricsMap.map {
          case (timeWindow, metric) =>
            ValueFactory.newMap(Map(
              ValueFactory.newString(START_TIME_KEY) -> ValueFactory.newInteger(timeWindow.startTime),
              ValueFactory.newString(END_TIME_KEY) -> ValueFactory.newInteger(timeWindow.endTime),
              ValueFactory.newString(SERIALIZED_METRIC_KEY) -> ValueFactory.newBinary(windowedMetric.getMetricFactory.getMetricSerde.serialize(metric))
            ).asJava)
        }
        val windowedMetricMessagePack = Map(
          ValueFactory.newString(metricsKey) -> ValueFactory.newArray(serializedMetrics.toList.asJava),
          ValueFactory.newString(aggregationTypeKey) -> ValueFactory.newString(windowedMetric.getMetricFactory.getAggregationType.toString)
        )
        packer.packValue(ValueFactory.newMap(windowedMetricMessagePack.asJava))
        val data = packer.toByteArray
        data
      }

      override def close(): Unit = ()
    }
  }

  override def configure(map: util.Map[String, _], b: Boolean): Unit = ()
} 
Example 27
Source File: KafkaService.scala    From ws_to_kafka   with MIT License 5 votes vote down vote up
package com.pkinsky


import akka.actor.ActorSystem
import akka.stream.scaladsl.{Source, Flow, Sink}
import com.softwaremill.react.kafka.{ConsumerProperties, ProducerProperties, ProducerMessage, ReactiveKafka}
import org.apache.kafka.common.serialization.{Deserializer, Serializer}
import play.api.libs.json.{Json, Reads, Writes}

case class KafkaServiceConf(bootstrapServers: String)

class KafkaService(kafkaClient: ReactiveKafka, conf: KafkaServiceConf) {
  
  def consume[T](topic: String, groupId: String)(implicit writes: Reads[T], actorSystem: ActorSystem): Source[T, Unit] =
    Source.fromPublisher(kafkaClient.consume(
      ConsumerProperties(
        bootstrapServers = conf.bootstrapServers, // IP and port of local Kafka instance
        topic = topic, // topic to consume messages from
        groupId = groupId, // consumer group
        valueDeserializer = KafkaService.deserializer[T]
      )
    )).map(_.value())
}


object KafkaService {
  def serializer[T: Writes] = new Serializer[T] {
    override def serialize(topic: String, data: T): Array[Byte] = {
      val js = Json.toJson(data)
      js.toString().getBytes("UTF-8")
    }

    override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = ()
    override def close(): Unit = ()
  }

  def deserializer[T: Reads] = new Deserializer[T] {
    override def deserialize(topic: String, data: Array[Byte]): T = {
      val s = new String(data, "UTF-8")
      Json.fromJson(Json.parse(s)).get //throw exception on error ¯\_(ツ)_/¯ (consider returning JsResult[T])
    }

    override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = ()
    override def close(): Unit = ()
  }
} 
Example 28
Source File: EventAggregationSpec.scala    From spark-summit-2018   with GNU General Public License v3.0 5 votes vote down vote up
package com.twilio.open.streaming.trend.discovery

import java.util

import com.twilio.open.protocol.Calls.CallEvent
import com.twilio.open.protocol.Metrics
import com.twilio.open.streaming.trend.discovery.streams.EventAggregation
import org.apache.kafka.common.serialization.{Deserializer, Serializer, StringDeserializer, StringSerializer}
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.apache.spark.sql._
import org.apache.spark.sql.kafka010.KafkaTestUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}

class EventAggregationSpec extends KafkaBackedTest[String, CallEvent] {
  override val testUtils = new KafkaTestUtils[String, CallEvent] {
    override val keySerializer: Serializer[String] = new StringSerializer
    override val keyDeserializer: Deserializer[String] = new StringDeserializer
    override val valueSerializer: Serializer[CallEvent] = new CallEventSerializer
    override val valueDeserializer: Deserializer[CallEvent] = new CallEventDeserializer
  }
  override protected val kafkaTopic = "spark.summit.call.events"
  override protected val partitions = 8

  private val pathToTestScenarios = "src/test/resources/scenarios"

  val log: Logger = LoggerFactory.getLogger(classOf[EventAggregation])

  lazy val session: SparkSession = sparkSql

  override def conf: SparkConf = {
    new SparkConf()
      .setMaster("local[*]")
      .setAppName("aggregation-test-app")
      .set("spark.ui.enabled", "false")
      .set("spark.app.id", appID)
      .set("spark.driver.host", "localhost")
      .set("spark.sql.shuffle.partitions", "32")
      .set("spark.executor.cores", "4")
      .set("spark.executor.memory", "1g")
      .set("spark.ui.enabled", "false")
      .setJars(SparkContext.jarOfClass(classOf[EventAggregation]).toList)
  }

  test("Should aggregate call events") {
    import session.implicits._
    val appConfig = appConfigForTest()
    val scenario = TestHelper.loadScenario[CallEvent](s"$pathToTestScenarios/pdd_events.json")
    val scenarioIter = scenario.toIterator
    scenario.nonEmpty shouldBe true

    testUtils.createTopic(kafkaTopic, partitions, overwrite = true)
    sendNextMessages(scenarioIter, 30, _.getEventId, _.getLoggedEventTime)

    val trendDiscoveryApp = new TrendDiscoveryApp(appConfigForTest(), session)
    val eventAggregation = EventAggregation(appConfig)

    eventAggregation.process(trendDiscoveryApp.readKafkaStream())(session)
      .writeStream
      .queryName("calleventaggs")
      .format("memory")
      .outputMode(eventAggregation.outputMode)
      .start()
      .processAllAvailable()

    val df = session.sql("select * from calleventaggs")
    df.printSchema()
    df.show

    val res = session
      .sql("select avg(stats.p99) from calleventaggs")
      .collect()
      .map { r =>
        r.getAs[Double](0) }
      .head

    DiscoveryUtils.round(res) shouldEqual 7.13

  }


}

class CallEventSerializer extends Serializer[CallEvent] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
  override def serialize(topic: String, data: CallEvent): Array[Byte] = data.toByteArray
  override def close(): Unit = {}
}

class CallEventDeserializer extends Deserializer[CallEvent] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
  override def deserialize(topic: String, data: Array[Byte]): CallEvent = CallEvent.parseFrom(data)
  override def close(): Unit = {}
} 
Example 29
Source File: TestJsonDeserializer.scala    From embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.serializers

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.Deserializer

import scala.reflect.ClassTag


class TestJsonDeserializer[T](implicit tag: ClassTag[T], ev: Null <:< T)
    extends Deserializer[T] {
  private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)

  override def deserialize(topic: String, bytes: Array[Byte]): T =
    Option(bytes).map { _ =>
      try mapper.readValue(
        bytes,
        tag.runtimeClass.asInstanceOf[Class[T]]
      )
      catch {
        case e: Exception => throw new SerializationException(e)
      }
    }.orNull
} 
Example 30
Source File: CirceSerdes.scala    From kafka-streams-circe   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kafka.streams.circe

import java.nio.charset.StandardCharsets
import java.util

import io.circe.parser._
import io.circe.{Decoder, Encoder}
import org.apache.kafka.common.errors.SerializationException
import org.apache.kafka.common.serialization.{Deserializer, Serde, Serdes, Serializer}

object CirceSerdes {

  implicit def serializer[T: Encoder]: Serializer[T] =
    new Serializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def serialize(topic: String, caseClass: T): Array[Byte] =
        Encoder[T].apply(caseClass).noSpaces.getBytes(StandardCharsets.UTF_8)
      override def close(): Unit = ()
    }

  implicit def deserializer[T: Decoder]: Deserializer[T] =
    new Deserializer[T] {
      override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
      override def deserialize(topic: String, data: Array[Byte]): T =
        Option(data).fold(null.asInstanceOf[T]) { data =>
          decode[T](new String(data, StandardCharsets.UTF_8))
            .fold(error => throw new SerializationException(error), identity)
        }
      override def close(): Unit = ()
    }

  implicit def serde[CC: Encoder: Decoder]: Serde[CC] = Serdes.serdeFrom(serializer, deserializer)
} 
Example 31
Source File: KafkaSinkTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.kafka

import java.util
import java.util.{Properties, UUID}

import io.eels.Row
import io.eels.datastream.DataStream
import io.eels.schema.{Field, StringType, StructType}
import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig}
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.common.serialization.{Deserializer, Serializer}
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}

import scala.collection.JavaConverters._
import scala.util.Try

class KafkaSinkTest extends FlatSpec with Matchers with BeforeAndAfterAll {

  implicit val kafkaConfig = EmbeddedKafkaConfig(
    kafkaPort = 6001,
    zooKeeperPort = 6000
  )
  Try {
    EmbeddedKafka.start()
  }

  val schema = StructType(
    Field("name", StringType, nullable = true),
    Field("location", StringType, nullable = true)
  )

  val ds = DataStream.fromValues(
    schema,
    Seq(
      Vector("clint eastwood", UUID.randomUUID().toString),
      Vector("elton john", UUID.randomUUID().toString)
    )
  )

  "KafkaSink" should "support default implicits" ignore {

    val topic = "mytopic-" + System.currentTimeMillis()

    val properties = new Properties()
    properties.put("bootstrap.servers", s"localhost:${kafkaConfig.kafkaPort}")
    properties.put("group.id", "test")
    properties.put("auto.offset.reset", "earliest")

    val producer = new KafkaProducer[String, Row](properties, StringSerializer, RowSerializer)
    val sink = KafkaSink(topic, producer)

    val consumer = new KafkaConsumer[String, String](properties, StringDeserializer, StringDeserializer)
    consumer.subscribe(util.Arrays.asList(topic))

    ds.to(sink)
    producer.close()

    val records = consumer.poll(4000)
    records.iterator().asScala.map(_.value).toList shouldBe ds.collect.map {
      case Row(_, values) => values.mkString(",")
    }.toList
  }
}

object RowSerializer extends Serializer[Row] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def serialize(topic: String, data: Row): Array[Byte] = data.values.mkString(",").getBytes
  override def close(): Unit = ()
}

object StringSerializer extends Serializer[String] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()
  override def serialize(topic: String, data: String): Array[Byte] = data.getBytes
}

object StringDeserializer extends Deserializer[String] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()
  override def deserialize(topic: String, data: Array[Byte]): String = new String(data)
} 
Example 32
Source File: Consumers.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka

import org.apache.kafka.clients.consumer.{
  ConsumerConfig,
  KafkaConsumer,
  OffsetResetStrategy
}
import org.apache.kafka.common.serialization.Deserializer


  def newConsumer[K: Deserializer, V: Deserializer]()(
      implicit config: EmbeddedKafkaConfig): KafkaConsumer[K, V] = {
    import scala.collection.JavaConverters._

    val consumerConfig = Map[String, Object](
      ConsumerConfig.GROUP_ID_CONFIG -> UUIDs.newUuid().toString,
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> s"localhost:${config.kafkaPort}",
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> OffsetResetStrategy.EARLIEST.toString.toLowerCase
    )
    new KafkaConsumer[K, V](consumerConfig.asJava,
                            implicitly[Deserializer[K]],
                            implicitly[Deserializer[V]])
  }
}