Java Code Examples for org.apache.kafka.streams.StreamsBuilder#build()

The following examples show how to use org.apache.kafka.streams.StreamsBuilder#build() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractKafkaRepository.java    From SkaETL with Apache License 2.0 8 votes vote down vote up
public AbstractKafkaRepository(String name, Serde<V> valueSerde, Function<V,String> keyFunction, KafkaAdminService kafkaAdminService, KafkaConfiguration kafkaConfiguration) {
    this.repositoryName = name + "-db";
    this.keyFunction = keyFunction;
    this.producer = KafkaUtils.kafkaProducer(kafkaConfiguration.getBootstrapServers(), StringSerializer.class, JsonNodeSerialializer.class);
    kafkaAdminService.createTopic(kafkaAdminService.buildTopicInfo(repositoryName,TopicConfig.CLEANUP_POLICY_COMPACT));

    Properties props = KafkaUtils.createKStreamProperties(repositoryName + "-stream"+ UUID.randomUUID().toString(), kafkaConfiguration.getBootstrapServers());
    StreamsBuilder builder = new StreamsBuilder();

    final GlobalKTable<String, V> globalKTable = builder.globalTable(repositoryName, materialize(valueSerde));

    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    streams.start();
    producer.flush();
    keyValueStore = streams.store(getStoreName(), QueryableStoreTypes.keyValueStore());

    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

}
 
Example 2
Source File: StreamingWordCount.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 7 votes vote down vote up
public static void main(final String[] args) {
    final Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "StreamingWordCount");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.STATE_DIR_CONFIG, "state-store");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

    logger.info("Start Reading Messages");
    StreamsBuilder streamBuilder = new StreamsBuilder();
    KStream<String, String> KS0 = streamBuilder.stream("streaming-word-count");

    KStream<String, String> KS1 = KS0.flatMapValues(value ->
        Arrays.asList(value.toLowerCase().split(" ")));

    KGroupedStream<String, String> KGS2 = KS1.groupBy((key, value) -> value);

    KTable<String, Long> KTS3 = KGS2.count();

    KTS3.toStream().peek(
        (k, v) -> logger.info("Key = " + k + " Value = " + v.toString())
    );

    KafkaStreams streams = new KafkaStreams(streamBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example 3
Source File: KafkaDenormalizer.java    From cqrs-eventsourcing-kafka with Apache License 2.0 6 votes vote down vote up
@Override
public void start() throws Exception {
    Predicate<String, EventEnvelope> inventoryItemCreated = (k, v) -> k.equals(InventoryItemCreated.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemRenamed =  (k, v) -> k.equals(InventoryItemRenamed.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemDeactivated = (k, v) -> k.equals(InventoryItemDeactivated.class.getSimpleName());

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String, EventEnvelope>[] filteredStreams = builder
            .stream(INVENTORY_ITEM_TOPIC, Consumed.with(Serdes.String(), initializeEnvelopeSerde()))
            .selectKey((k, v) -> v.eventType)
            .branch(inventoryItemCreated, inventoryItemRenamed, inventoryItemDeactivated);

    filteredStreams[0].process(InventoryItemCreatedHandler::new);
    filteredStreams[1].process(InventoryItemRenamedHandler::new);
    filteredStreams[2].process(InventoryItemDeactivatedHandler::new);

    kafkaStreams = new KafkaStreams(builder.build(), getProperties());
    kafkaStreams.cleanUp(); // -- only because we are using in-memory
    kafkaStreams.start();
}
 
Example 4
Source File: NameJoinGlobalKTable.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    final GlobalKTable<Long, String> joinTable = builder.globalTable(NAME_INPUT);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example 5
Source File: StreamsStarterApp.java    From kafka-streams-machine-learning-examples with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {

		Properties config = new Properties();
		config.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-starter-app");
		config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
		config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
		config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
		config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

		StreamsBuilder builder = new StreamsBuilder();

		KStream<String, String> kStream = builder.stream("streams-file-input");
		// do stuff
		kStream.to("streams-wordcount-output");

		KafkaStreams streams = new KafkaStreams(builder.build(), config);
		streams.cleanUp(); // only do this in dev - not in prod
		streams.start();

		// print the topology
		System.out.println(streams.localThreadsMetadata().toString());

		// shutdown hook to correctly close the streams application
		Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

	}
 
Example 6
Source File: AggregatingSum.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<TicketSale> ticketSaleSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde))
      // Set key to title and value to ticket value
      .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue()))
      // Group by title
      .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
      // Apply SUM aggregation
      .reduce(Integer::sum)
      // Write to stream specified by outputTopic
      .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Integer()));

  return builder.build();
}
 
Example 7
Source File: MetricImporter.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private KafkaStreams feedMergeTopic(String id, String mergeTopic, String destId) {

        StreamsBuilder builder = new StreamsBuilder();
        Properties properties = createProperties(kafkaConfiguration.getBootstrapServers());
        String inputTopic = id + TOPIC_TREAT_PROCESS;
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, inputTopic + "merger-stream-" + destId);

        KStream<String, JsonNode> stream = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        stream.to(mergeTopic, Produced.with(Serdes.String(),GenericSerdes.jsonNodeSerde()));

        final KafkaStreams streams = new KafkaStreams(builder.build(), properties);
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        return streams;

    }
 
Example 8
Source File: CountingSessionApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    KStream<String, UserClicks> KS0 = streamsBuilder.stream(
        AppConfigs.posTopicName,
        Consumed.with(AppSerdes.String(), AppSerdes.UserClicks())
            .withTimestampExtractor(new AppTimestampExtractor())
    );

    KGroupedStream<String, UserClicks> KS1 = KS0.groupByKey(
        Grouped.with(AppSerdes.String(),
            AppSerdes.UserClicks()));

    SessionWindowedKStream<String, UserClicks> KS2 = KS1.windowedBy(
        SessionWindows.with(Duration.ofMinutes(5))
            .grace(Duration.ofMinutes(1))
    );

    KTable<Windowed<String>, Long> KT3 = KS2.count(
        //Materialized is not needed if you don't want to override defaults
        Materialized.<String, Long, SessionStore<Bytes, byte[]>>as("clicks-by-user-session")
    );

    KT3.toStream().foreach(
        (kWindowed, v) -> logger.info(
            "UserID: " + kWindowed.key() +
                " Window Start: " + utcTimeString(kWindowed.window().start()) +
                " Window End: " + utcTimeString(kWindowed.window().end()) +
                " Count: " + v
        ));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example 9
Source File: MatchApplication.java    From football-events with MIT License 5 votes vote down vote up
@Bean
public KafkaStreams kafkaStreams() {
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    DomainUpdater snapshotBuilder = new DomainUpdater(leagueRepository());
    Topology topology = streamsBuilder.build();
    snapshotBuilder.init(topology);
    KafkaStreamsStarter starter = new KafkaStreamsStarter(kafkaBootstrapAddress, topology, APP_ID);
    starter.setKafkaTimeout(kafkaTimeout);
    starter.setStreamsStartupTimeout(streamsStartupTimeout);
    return starter.start();
}
 
Example 10
Source File: KStreamAggDemo.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();
    KStream<String, Employee> KS0 = streamsBuilder.stream(AppConfigs.topicName,
        Consumed.with(AppSerdes.String(), AppSerdes.Employee()));

    KGroupedStream<String, Employee> KGS1 = KS0.groupBy(
        (k, v) -> v.getDepartment(),
        Serialized.with(AppSerdes.String(),
            AppSerdes.Employee()));

    KTable<String, DepartmentAggregate> KT2 = KGS1.aggregate(
        //Initializer
        () -> new DepartmentAggregate()
            .withEmployeeCount(0)
            .withTotalSalary(0)
            .withAvgSalary(0D),
        //Aggregator
        (k, v, aggV) -> new DepartmentAggregate()
            .withEmployeeCount(aggV.getEmployeeCount() + 1)
            .withTotalSalary(aggV.getTotalSalary() + v.getSalary())
            .withAvgSalary((aggV.getTotalSalary() + v.getSalary()) / (aggV.getEmployeeCount() + 1D)),
        //Serializer
        Materialized.<String, DepartmentAggregate, KeyValueStore<Bytes, byte[]>>as("agg-store")
            .withKeySerde(AppSerdes.String())
            .withValueSerde(AppSerdes.DepartmentAggregate())
    );

    KT2.toStream().foreach(
        (k, v) -> System.out.println("Key = " + k + " Value = " + v.toString()));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

}
 
Example 11
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_multiple_span_from_stream_input_topic_whenSharingDisabled() {
  String inputTopic = testName.getMethodName() + "-input";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic).foreach((k, v) -> {
  });
  Topology topology = builder.build();

  KafkaStreamsTracing kafkaStreamsTracing = KafkaStreamsTracing.newBuilder(tracing)
    .singleRootSpanOnReceiveBatch(false)
    .build();
  KafkaStreams streams = kafkaStreamsTracing.kafkaStreams(topology, streamsProperties());

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  for (int i = 0; i < 3; i++) {
    MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
    assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
  }

  streams.close();
  streams.cleanUp();
}
 
Example 12
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_predicate_false() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filter("filter-2", (key, value) -> false))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filter transformer returns false so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example 13
Source File: AgeCountDemo.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreLocation);

    StreamsBuilder streamsBuilder = new StreamsBuilder();
    KTable<String, String> KT0 = streamsBuilder.table(
        AppConfigs.topicName,
        Consumed.with(Serdes.String(),Serdes.String()));

    KGroupedTable<String, String> KGT1 = KT0.groupBy(
        (person, age) -> KeyValue.pair(age, "1"),
        Serialized.with(Serdes.String(),Serdes.String())
    );

    KGT1.count()
        .toStream().peek((k, v) -> logger.info("Age=" + k + " Count=" + v));

    KafkaStreams myStream = new KafkaStreams(streamsBuilder.build(), props);
    myStream.start();

    Runtime.getRuntime().addShutdownHook(new Thread(() -> {
        logger.info("Stopping Stream");
        myStream.close();
    }));
}
 
Example 14
Source File: BasicViewApplication.java    From football-events with MIT License 5 votes vote down vote up
@Bean
public KafkaStreams kafkaStreams() {
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    new StatisticsBuilder(streamsBuilder).build();
    Topology topology = streamsBuilder.build();
    KafkaStreamsStarter starter = new KafkaStreamsStarter(kafkaBootstrapAddress, topology, APP_ID);
    starter.setKafkaTimeout(kafkaTimeout);
    starter.setStreamsStartupTimeout(streamsStartupTimeout);
    return starter.start();
}
 
Example 15
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_foreach() {
  String inputTopic = testName.getMethodName() + "-input";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .process(kafkaStreamsTracing.foreach("foreach-1", (key, value) -> {
      try {
        Thread.sleep(100L);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
    }));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);

  streams.close();
  streams.cleanUp();
}
 
Example 16
Source File: FilterEvents.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<Publication> publicationSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), publicationSerde))
      .filter((name, publication) -> "George R. R. Martin".equals(publication.getName()))
      .to(outputTopic, Produced.with(Serdes.String(), publicationSerde));

  return builder.build();
}
 
Example 17
Source File: WordCountProcessorApplicationTests.java    From spring-cloud-stream-samples with Apache License 2.0 5 votes vote down vote up
/**
   * Setup Stream topology
   * Add KStream based on @StreamListener annotation
   * Add to(topic) based @SendTo annotation
   */
  @Before
  public void setup() {
      final StreamsBuilder builder = new StreamsBuilder();
      KStream<Bytes, String> input = builder.stream(INPUT_TOPIC, Consumed.with(nullSerde, stringSerde));
      KafkaStreamsWordCountApplication.WordCountProcessorApplication app = new KafkaStreamsWordCountApplication.WordCountProcessorApplication();
      final Function<KStream<Bytes, String>, KStream<Bytes, KafkaStreamsWordCountApplication.WordCount>> process = app.process();

final KStream<Bytes, KafkaStreamsWordCountApplication.WordCount> output = process.apply(input);

output.to(OUTPUT_TOPIC, Produced.with(nullSerde, countSerde));

      testDriver = new TopologyTestDriver(builder.build(), getStreamsConfiguration());
  }
 
Example 18
Source File: StreamingApp.java    From Apache-Kafka-1-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception { 

   Properties props = new Properties(); 
   props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streaming_app_id");// 1 
   props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); //2 

   StreamsConfig config = new StreamsConfig(props); // 3 
   StreamsBuilder builder = new StreamsBuilder(); //4 

   Topology topology = builder.build(); 

   KafkaStreams streams = new KafkaStreams(topology, config); 

   KStream<String, String> simpleFirstStream = builder.stream("src-topic"); //5 

   KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase); //6 

   upperCasedStream.to("out-topic"); //7 
   

   System.out.println("Streaming App Started"); 
   streams.start(); 
   Thread.sleep(30000);  //8 
   System.out.println("Shutting down the Streaming App"); 
   streams.close(); 
 }
 
Example 19
Source File: PressureDatetimeExtractorTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() throws Exception {
    this.pressureSerde = makePressureAlertSerde();

    Consumed<String, PressureAlert> consumedPressure = Consumed
            .with(Serdes.String(), pressureSerde)
            .withTimestampExtractor(timestampExtractor);

    Produced<String, PressureAlert> producedPressure = Produced.with(Serdes.String(), pressureSerde);

    StreamsBuilder builder = new StreamsBuilder();

    builder.stream(this.inputTopic, consumedPressure).to(this.outputTopic, producedPressure);

    this.topologyTestDriver = new TopologyTestDriver(builder.build(), WindowFinalResult.buildProperties(config));
}
 
Example 20
Source File: AlarmMessageLogger.java    From phoebus with Eclipse Public License 1.0 4 votes vote down vote up
@Override
public void run() {
    logger.info("Starting the alarm messages stream consumer for " + topic);

    Properties props = new Properties();
    props.putAll(PropertiesHelper.getProperties());
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-"+topic+"-alarm-messages");

    if (!props.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    }
    
    
    final String indexDateSpanUnits = props.getProperty("date_span_units");
    final Integer indexDateSpanValue = Integer.parseInt(props.getProperty("date_span_value"));

    try {
        stateIndexNameHelper = new IndexNameHelper(topic + STATE_INDEX_FORMAT, indexDateSpanUnits, indexDateSpanValue);
        configIndexNameHelper = new IndexNameHelper(topic + CONFIG_INDEX_FORMAT , indexDateSpanUnits, indexDateSpanValue);
    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Time based index creation failed.", ex);
    }
    
    // Attach a message time stamp.
    StreamsBuilder builder = new StreamsBuilder();

    KStream<String, AlarmMessage> alarms = builder.stream(topic,
            Consumed.with(Serdes.String(), alarmMessageSerde).withTimestampExtractor(new TimestampExtractor() {

                @Override
                public long extract(ConsumerRecord<Object, Object> record, long previousTimestamp) {
                    return record.timestamp();
                }
            }));

    alarms = alarms.filter((k, v) -> {
        return v != null;
    });

    alarms = alarms.map((key, value) -> {
        logger.config("Processing alarm message with key : " + key != null ? key
                : "null" + " " + value != null ? value.toString() : "null");
        value.setKey(key);
        return new KeyValue<String, AlarmMessage>(key, value);
    });

    @SuppressWarnings("unchecked")
    KStream<String, AlarmMessage>[] alarmBranches = alarms.branch((k,v) -> k.startsWith("state"),
                                                                  (k,v) -> k.startsWith("config"),
                                                                  (k,v) -> false
                                                                 );

    processAlarmStateStream(alarmBranches[0], props);
    processAlarmConfigurationStream(alarmBranches[1], props);

    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    final CountDownLatch latch = new CountDownLatch(1);

    // attach shutdown handler to catch control-c
    Runtime.getRuntime().addShutdownHook(new Thread("streams-"+topic+"-alarm-messages-shutdown-hook") {
        @Override
        public void run() {
            streams.close(10, TimeUnit.SECONDS);
            System.out.println("\nShutting streams Done.");
            latch.countDown();
        }
    });

    try {
        streams.start();
        latch.await();
    } catch (Throwable e) {
        System.exit(1);
    }
    System.exit(0);
}