kafka.server.KafkaServer Scala Examples

The following examples show how to use kafka.server.KafkaServer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: servers.scala    From scalatest-embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.schemaregistry

import io.confluent.kafka.schemaregistry.RestApp
import kafka.server.KafkaServer
import net.manub.embeddedkafka.{
  EmbeddedServer,
  EmbeddedServerWithKafka,
  EmbeddedZ
}

import scala.reflect.io.Directory


  override def stop(clearLogs: Boolean = false): Unit = app.stop()
}

case class EmbeddedKWithSR(
    factory: Option[EmbeddedZ],
    broker: KafkaServer,
    app: EmbeddedSR,
    logsDirs: Directory)(implicit config: EmbeddedKafkaConfigWithSchemaRegistry)
    extends EmbeddedServerWithKafka {

  override def stop(clearLogs: Boolean): Unit = {
    app.stop()

    broker.shutdown()
    broker.awaitShutdown()

    factory.foreach(_.stop(clearLogs))

    if (clearLogs) logsDirs.deleteRecursively()
  }
} 
Example 2
Source File: EmbeddedServer.scala    From embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka

import java.nio.file.Path

import kafka.server.KafkaServer
import org.apache.zookeeper.server.ServerCnxnFactory

import scala.reflect.io.Directory


  override def stop(clearLogs: Boolean): Unit = {
    broker.shutdown()
    broker.awaitShutdown()

    factory.foreach(_.stop(clearLogs))

    if (clearLogs) {
      val _ = Directory(logsDirs.toFile).deleteRecursively
    }
  }
}

object EmbeddedK {
  def apply(
      broker: KafkaServer,
      logsDirs: Path,
      config: EmbeddedKafkaConfig
  ): EmbeddedK =
    EmbeddedK(factory = None, broker, logsDirs, config)
} 
Example 3
Source File: EmbeddedKafkaCluster.scala    From ksql-jdbc-driver   with Apache License 2.0 5 votes vote down vote up
package com.github.mmolimar.ksql.jdbc.embedded

import java.io.File
import java.util.Properties

import com.github.mmolimar.ksql.jdbc.utils.TestUtils
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.Logging
import kafka.zk.AdminZkClient

import scala.collection.Seq

class EmbeddedKafkaCluster(zkConnection: String,
                           ports: Seq[Int] = Seq(TestUtils.getAvailablePort),
                           baseProps: Properties = new Properties) extends Logging {

  private val actualPorts: Seq[Int] = ports.map(resolvePort)

  private var brokers: Seq[KafkaServer] = Seq.empty
  private var logDirs: Seq[File] = Seq.empty

  private lazy val zkClient = TestUtils.buildZkClient(zkConnection)
  private lazy val adminZkClient = new AdminZkClient(zkClient)

  def startup(): Unit = {
    info("Starting up embedded Kafka brokers")

    for ((port, i) <- actualPorts.zipWithIndex) {
      val logDir: File = TestUtils.makeTempDir("kafka-local")

      val properties: Properties = new Properties(baseProps)
      properties.setProperty(KafkaConfig.ZkConnectProp, zkConnection)
      properties.setProperty(KafkaConfig.ZkSyncTimeMsProp, i.toString)
      properties.setProperty(KafkaConfig.BrokerIdProp, (i + 1).toString)
      properties.setProperty(KafkaConfig.HostNameProp, "localhost")
      properties.setProperty(KafkaConfig.AdvertisedHostNameProp, "localhost")
      properties.setProperty(KafkaConfig.PortProp, port.toString)
      properties.setProperty(KafkaConfig.AdvertisedPortProp, port.toString)
      properties.setProperty(KafkaConfig.LogDirProp, logDir.getAbsolutePath)
      properties.setProperty(KafkaConfig.NumPartitionsProp, 1.toString)
      properties.setProperty(KafkaConfig.AutoCreateTopicsEnableProp, true.toString)
      properties.setProperty(KafkaConfig.DeleteTopicEnableProp, true.toString)
      properties.setProperty(KafkaConfig.LogFlushIntervalMessagesProp, 1.toString)
      properties.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, 1.toString)

      info(s"Local directory for broker ID ${i + 1} is ${logDir.getAbsolutePath}")

      brokers :+= startBroker(properties)
      logDirs :+= logDir
    }

    info(s"Started embedded Kafka brokers: $getBrokerList")
  }

  def shutdown(): Unit = {
    brokers.foreach(broker => TestUtils.swallow(broker.shutdown))
    logDirs.foreach(logDir => TestUtils.swallow(TestUtils.deleteFile(logDir)))
  }

  def getPorts: Seq[Int] = actualPorts

  def getBrokerList: String = actualPorts.map("localhost:" + _).mkString(",")

  def createTopic(topic: String, numPartitions: Int = 1, replicationFactor: Int = 1): Unit = {
    info(s"Creating topic $topic")
    adminZkClient.createTopic(topic, numPartitions, replicationFactor)
  }

  def deleteTopic(topic: String) {
    info(s"Deleting topic $topic")
    adminZkClient.deleteTopic(topic)
  }

  def deleteTopics(topics: Seq[String]): Unit = topics.foreach(deleteTopic)

  def existTopic(topic: String): Boolean = zkClient.topicExists(topic)

  def listTopics: Set[String] = zkClient.getAllTopicsInCluster

  private def resolvePort(port: Int) = if (port <= 0) TestUtils.getAvailablePort else port

  private def startBroker(props: Properties): KafkaServer = {
    val server = new KafkaServer(new KafkaConfig(props))
    server.startup
    server
  }

  override def toString: String = {
    val sb: StringBuilder = StringBuilder.newBuilder
    sb.append("Kafka{")
    sb.append("brokerList='").append(getBrokerList).append('\'')
    sb.append('}')

    sb.toString
  }

} 
Example 4
Source File: servers.scala    From embedded-kafka-schema-registry   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka.schemaregistry

import java.nio.file.Path

import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication
import kafka.server.KafkaServer
import net.manub.embeddedkafka.{
  EmbeddedServer,
  EmbeddedServerWithKafka,
  EmbeddedZ
}

import scala.reflect.io.Directory


  override def stop(clearLogs: Boolean = false): Unit = app.stop()
}

case class EmbeddedKWithSR(
    factory: Option[EmbeddedZ],
    broker: KafkaServer,
    app: EmbeddedSR,
    logsDirs: Path,
    config: EmbeddedKafkaConfig
) extends EmbeddedServerWithKafka {
  override def stop(clearLogs: Boolean): Unit = {
    app.stop()

    broker.shutdown()
    broker.awaitShutdown()

    factory.foreach(_.stop(clearLogs))

    if (clearLogs) {
      val _ = Directory(logsDirs.toFile).deleteRecursively
    }
  }
}