kafka.utils.VerifiableProperties Scala Examples

The following examples show how to use kafka.utils.VerifiableProperties. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: avroMarshallers.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.avro

import java.io.ByteArrayOutputStream

import kafka.utils.VerifiableProperties
import org.apache.avro.Schema
import org.apache.avro.io._
import org.apache.avro.specific.{
  SpecificDatumReader,
  SpecificDatumWriter,
  SpecificRecord
}
import org.apache.kafka.common.serialization.{Deserializer, Serializer}

class KafkaAvroDeserializer[T <: SpecificRecord](schema: Schema)
    extends Deserializer[T]
    with NoOpConfiguration
    with NoOpClose {

  private val reader = new SpecificDatumReader[T](schema)

  override def deserialize(topic: String, data: Array[Byte]): T = {
    val decoder = DecoderFactory.get().binaryDecoder(data, null)
    reader.read(null.asInstanceOf[T], decoder)
  }
}

class KafkaAvroSerializer[T <: SpecificRecord]()
    extends Serializer[T]
    with NoOpConfiguration
    with NoOpClose {

  private def toBytes(nullableData: T): Array[Byte] =
    Option(nullableData).fold[Array[Byte]](null) { data =>
      val writer: DatumWriter[T] = new SpecificDatumWriter[T](data.getSchema)
      val out = new ByteArrayOutputStream()
      val encoder = EncoderFactory.get.binaryEncoder(out, null)

      writer.write(data, encoder)
      encoder.flush()
      out.close()

      out.toByteArray
    }

  override def serialize(topic: String, data: T): Array[Byte] =
    toBytes(data)
}

sealed trait NoOpConfiguration {
  def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = ()
}

sealed trait NoOpClose {
  def close(): Unit = ()
} 
Example 2
Source File: AvroDecoder.scala    From cuesheet   with Apache License 2.0 5 votes vote down vote up
package com.kakao.cuesheet.convert

import java.util.Arrays.copyOfRange

import kafka.serializer.Decoder
import kafka.utils.VerifiableProperties
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}


sealed trait AvroDecoder[T] extends Decoder[T] {

  def props: VerifiableProperties

  protected val schema = new Schema.Parser().parse(props.getString(Avro.SCHEMA))
  protected val skipBytes = props.getInt(Avro.SKIP_BYTES, 0)

  protected val reader = new GenericDatumReader[GenericRecord](schema)
  protected val decoder = Avro.recordDecoder(reader)

  private def skip(bytes: Array[Byte], size: Int): Array[Byte] = {
    val length = bytes.length
    length - size match {
      case remaining if remaining > 0 => copyOfRange(bytes, size, length)
      case _ => new Array[Byte](0)
    }
  }

  def parse(bytes: Array[Byte]): GenericRecord = {
    val data = if (skipBytes == 0) bytes else skip(bytes, skipBytes)
    decoder(data)
  }
}

class AvroRecordDecoder(val props: VerifiableProperties) extends AvroDecoder[GenericRecord] {
  override def fromBytes(bytes: Array[Byte]): GenericRecord = parse(bytes)
}

class AvroMapDecoder(val props: VerifiableProperties) extends AvroDecoder[Map[String, Any]] {
  override def fromBytes(bytes: Array[Byte]): Map[String, Any] = Avro.toMap(parse(bytes))
}

class AvroJsonDecoder(val props: VerifiableProperties) extends AvroDecoder[String] {
  override def fromBytes(bytes: Array[Byte]): String = Avro.toJson(parse(bytes))
} 
Example 3
Source File: KafkaJsonProducer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.producer.{KeyedMessage, ProducerConfig, Producer}
import kafka.serializer.Encoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods._

object KafkaJsonProducer {
	type KafkaEncoder = Encoder[JValue]
	def apply() = new KafkaJsonProducer(classOf[JsonEncoder])
	def apply[T <: KafkaEncoder](encoder: Class[T]) = new KafkaJsonProducer(encoder)
}

class KafkaJsonProducer[T <: KafkaEncoder](encoderClass: Class[T]) {
	def createSender(topic: String, properties: Properties): KafkaSender = {
		val props = properties.clone.asInstanceOf[Properties]
		props.put("serializer.class", encoderClass.getName)
		val producer = createProducer(props)
		new KafkaSender(topic, producer)
	}

	def createProducer(props: Properties): Producer[String, JValue] = {
		new Producer[String, JValue](new ProducerConfig(props))
	}
}

class KafkaSender(topic: String, producer: Producer[String, JValue]) {
	def send(key: Option[String], message: JObject) = {
		val keyedMessage: KeyedMessage[String, JValue] = key match {
			case Some(key) => new KeyedMessage(topic, key, message)
			case None => new KeyedMessage(topic, message)
		}

		producer.send(keyedMessage)
	}
}

class JsonEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		compact(value).getBytes("UTF-8")
	}
} 
Example 4
Source File: KafkaJsonProducerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.scalatest.{Matchers, WordSpec}
import org.json4s.jackson.JsonMethods._
import kafka.producer.{ProducerConfig, KeyedMessage, Producer}
import org.mockito.{Mockito, ArgumentCaptor}
import org.mockito.Mockito._
import scala.collection.mutable

class KafkaJsonProducerSpec extends WordSpec with Matchers {
	"A KafkaJsonProducer" should {
		"create a KafkaJsonProducer with the JsonEncoder" in {
			val producer = KafkaJsonProducer()
			assert(producer.getClass == classOf[KafkaJsonProducer[JsonEncoder]])
		}

		"create a KafkaJsonProducer with the specified Encoder" in {
			val producer = KafkaJsonProducer(classOf[MyEncoder])
			assert(producer.getClass == classOf[KafkaJsonProducer[MyEncoder]])
		}

		"create a sender" in {
			val producer = new MyKafkaJsonProducer
			producer.createSender("topic", new Properties)
			val serializer = producer.receivedProperties.get("serializer.class")
			assert(serializer == classOf[MyEncoder].getName)
		}
	}

	"A KafkaSender" should {
		"send the JSON provided without a key to Kafka" in {
			val messageJson = """{"key1": "value1", "key2": "value2"}"""

			val keyedMessage = sendMessage(None, messageJson)

			assert(keyedMessage.topic == "test")
			assert(keyedMessage.hasKey == false)
			assert(keyedMessage.message == parse(messageJson))
		}

		"send the JSON provided with a key to Kafka" in {
			val messageJson = """{"key3": "value3", "key4": "value4"}"""

			val keyedMessage = sendMessage(Some("key"), messageJson)

			assert(keyedMessage.key == "key")
			assert(keyedMessage.topic == "test")
			assert(keyedMessage.message == parse(messageJson))
		}
	}

	"A JsonEncoder" should {
		"encode the provided json" in {
			val json = """{"key1": "value1"}"""
			val encoder = new JsonEncoder(new VerifiableProperties)
			val result = encoder.toBytes(parse(json))
			assert(parse(new String(result, "UTF-8")) == parse(json))
		}
	}

	private def sendMessage(key: Option[String], messageJson: String): KeyedMessage[String, JValue] = {
		val producer = Mockito.mock(classOf[Producer[String, JValue]])
		val sender = new KafkaSender("test", producer)
		sender.send(key, parse(messageJson).asInstanceOf[JObject])

		val argumentCaptor = ArgumentCaptor.forClass(classOf[KeyedMessage[String, JValue]])
		verify(producer).send(argumentCaptor.capture())

		val keyedMessages = argumentCaptor.getAllValues
		assert(keyedMessages.size == 1)

		// The following construction is necessary because capturing of parameters
		// with Mockito, Scala type interference, and multiple arguments
		// don't work together without explicit casts.
		keyedMessages.get(0).asInstanceOf[mutable.WrappedArray.ofRef[KeyedMessage[String, JValue]]](0)
	}
}

class MyEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		Array()
	}
}

class MyKafkaJsonProducer extends KafkaJsonProducer(classOf[MyEncoder]) {
	var receivedProperties: Properties = _

	override def createProducer(props: Properties): Producer[String, JValue] = {
		receivedProperties = props
		Mockito.mock(classOf[Producer[String, JValue]])
	}
} 
Example 5
package packt.ch05

import java.util

import kafka.utils.VerifiableProperties
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.clients.producer.Partitioner
import org.apache.kafka.common.Cluster

object SimplePartitioner {

  private var producer: KafkaProducer[String, String] = _
}

class SimplePartitioner(props: VerifiableProperties) extends Partitioner {

  def partition(key: AnyRef, a_numPartitions: Int): Int = {
    var partition = 0
    val partitionKey = key.asInstanceOf[String]
    val offset = partitionKey.lastIndexOf('.')
    if (offset > 0) {
      partition = java.lang.Integer.parseInt(partitionKey.substring(offset + 1)) %
        a_numPartitions
    }
    partition
  }

  override def partition(topic: String,
                         key: AnyRef,
                         keyBytes: Array[Byte],
                         value: AnyRef,
                         valueBytes: Array[Byte],
                         cluster: Cluster): Int = partition(key, 10)

  override def close() {
  }

  override def configure(configs: util.Map[String, _]) {
  }
}