Java Code Examples for org.apache.kafka.streams.KafkaStreams#close()

The following examples show how to use org.apache.kafka.streams.KafkaStreams#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 6 votes vote down vote up
@Test
public void should_create_span_from_stream_input_topic_using_kafka_client_supplier() {
  String inputTopic = testName.getMethodName() + "-input";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic).foreach((k, v) -> {
  });
  Topology topology = builder.build();

  KafkaStreams streams =
    new KafkaStreams(topology, streamsProperties(), kafkaStreamsTracing.kafkaClientSupplier());

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  streams.close();
  streams.cleanUp();
}
 
Example 2
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_foreach() {
  String inputTopic = testName.getMethodName() + "-input";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .process(kafkaStreamsTracing.foreach("foreach-1", (key, value) -> {
      try {
        Thread.sleep(100L);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
    }));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);

  streams.close();
  streams.cleanUp();
}
 
Example 3
Source File: LocalQueryExecutor.java    From rya with Apache License 2.0 5 votes vote down vote up
@Override
protected void shutDown() throws Exception {
    log.info("Local Query Executor shutting down. Stopping all jobs...");

    // Stop all of the running queries.
    for(final KafkaStreams job : byQueryId.values()) {
        job.close();
    }

    log.info("Local Query Executor shut down.");
}
 
Example 4
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_not_filtered_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transformValues(kafkaStreamsTracing.markAsNotFiltered("filterNot-1", (key, value) -> true))
    .filterNot((k, v) -> Objects.isNull(v))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filterNot transformer returns true so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example 5
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_input_and_output_topics() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic).to(outputTopic);
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  Consumer<String, String> consumer = createTracingConsumer(outputTopic);

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanInput);

  streams.close();
  streams.cleanUp();
  consumer.close();
}
 
Example 6
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_map() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.map("map-1", (key, value) -> {
      try {
        Thread.sleep(100L);
      } catch (InterruptedException e) {
        e.printStackTrace();
      }
      return KeyValue.pair(key, value);
    }))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example 7
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_not_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filterNot("filterNot-1", (key, value) -> true))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filterNot transformer returns true so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example 8
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filter("filter-1", (key, value) -> true))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");

  // the filter transformer returns true so record is not dropped

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example 9
Source File: StockPerformanceStreamsAndProcessorMultipleValuesApplication.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Serde<String> stringSerde = Serdes.String();
        Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


        StreamsBuilder builder = new StreamsBuilder();

        String stocksStateStore = "stock-performance-store";
        double differentialThreshold = 0.05;

        TransformerSupplier<String, StockTransaction, KeyValue<String, List<KeyValue<String, StockPerformance>>>> transformerSupplier =
                () -> new StockPerformanceMultipleValuesTransformer(stocksStateStore, differentialThreshold);

        KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
        StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

        builder.addStateStore(storeBuilder);

        builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
                .transform(transformerSupplier, stocksStateStore).flatMap((dummyKey,valueList) -> valueList)
                .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance"));
                //.to(stringSerde, stockPerformanceSerde, "stock-performance");


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol);
        System.out.println("Stock Analysis KStream/Process API App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down the Stock KStream/Process API Analysis App now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 10
Source File: WikipediaStreamDemo.java    From hello-kafka-streams with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

        final String bootstrapServers = args.length == 1 ? args[0] : DEFAULT_BOOTSTRAP_SERVERS;

        //1. Launch Embedded Connect Instance for ingesting Wikipedia IRC feed into wikipedia-raw topic
        ConnectEmbedded connect = createWikipediaFeedConnectInstance(bootstrapServers);
        connect.start();

        //2. Launch Kafka Streams Topology
        KafkaStreams streams = createWikipediaStreamsInstance(bootstrapServers);
        try {
            streams.start();
        } catch (Throwable e) {
            log.error("Stopping the application due to streams initialization error ", e);
            connect.stop();
        }
        
        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                connect.stop();
            }
        });

        try {
            connect.awaitStop();
            log.info("Connect closed cleanly...");
        } finally {
            streams.close();
            log.info("Streams closed cleanly...");
        }
    }
 
Example 11
Source File: StockPerformanceStreamsAndProcessorApplication.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Serde<String> stringSerde = Serdes.String();
        Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


        StreamsBuilder builder = new StreamsBuilder();

        String stocksStateStore = "stock-performance-store";
        double differentialThreshold = 0.02;

        KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
        StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

        builder.addStateStore(storeBuilder);

        builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
                .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore)
                .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance"));

        //Uncomment this line and comment out the line above for writing to a topic
        //.to(stringSerde, stockPerformanceSerde, "stock-performance");


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol);
        System.out.println("Stock Analysis KStream/Process API App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down the Stock KStream/Process API Analysis App now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 12
Source File: StreamingApp.java    From Apache-Kafka-1-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception { 

   Properties props = new Properties(); 
   props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streaming_app_id");// 1 
   props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); //2 

   StreamsConfig config = new StreamsConfig(props); // 3 
   StreamsBuilder builder = new StreamsBuilder(); //4 

   Topology topology = builder.build(); 

   KafkaStreams streams = new KafkaStreams(topology, config); 

   KStream<String, String> simpleFirstStream = builder.stream("src-topic"); //5 

   KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase); //6 

   upperCasedStream.to("out-topic"); //7 
   

   System.out.println("Streaming App Started"); 
   streams.start(); 
   Thread.sleep(30000);  //8 
   System.out.println("Shutting down the Streaming App"); 
   streams.close(); 
 }
 
Example 13
Source File: KafkaStreamsJoinsApp.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        StreamsBuilder builder = new StreamsBuilder();


        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<String> stringSerde = Serdes.String();

        KeyValueMapper<String, Purchase, KeyValue<String,Purchase>> custIdCCMasking = (k, v) -> {
            Purchase masked = Purchase.builder(v).maskCreditCard().build();
            return new KeyValue<>(masked.getCustomerId(), masked);
        };


        Predicate<String, Purchase> coffeePurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> electronicPurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int COFFEE_PURCHASE = 0;
        int ELECTRONICS_PURCHASE = 1;

        KStream<String, Purchase> transactionStream = builder.stream( "transactions", Consumed.with(Serdes.String(), purchaseSerde)).map(custIdCCMasking);

        KStream<String, Purchase>[] branchesStream = transactionStream.selectKey((k,v)-> v.getCustomerId()).branch(coffeePurchase, electronicPurchase);

        KStream<String, Purchase> coffeeStream = branchesStream[COFFEE_PURCHASE];
        KStream<String, Purchase> electronicsStream = branchesStream[ELECTRONICS_PURCHASE];

        ValueJoiner<Purchase, Purchase, CorrelatedPurchase> purchaseJoiner = new PurchaseJoiner();
        JoinWindows twentyMinuteWindow =  JoinWindows.of(60 * 1000 * 20);

        KStream<String, CorrelatedPurchase> joinedKStream = coffeeStream.join(electronicsStream,
                                                                              purchaseJoiner,
                                                                              twentyMinuteWindow,
                                                                              Joined.with(stringSerde,
                                                                                          purchaseSerde,
                                                                                          purchaseSerde));

        joinedKStream.print(Printed.<String, CorrelatedPurchase>toSysOut().withLabel("joined KStream"));

        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        LOG.info("Starting Join Examples");
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Join Examples now");
        kafkaStreams.close();
        MockDataProducer.shutdown();


    }
 
Example 14
Source File: StreamsBuilderSmokeTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Integration test validates that streams can be used against KafkaTestServer.
 */
@Test
void testStreamConsumer() throws Exception {
    // Define topic to test with.
    final String inputTopic = "stream-input-topic" + System.currentTimeMillis();
    final String outputTopic = "stream-output-topic" + System.currentTimeMillis();

    // Define how many records
    final int numberOfRecords = 25;
    final int partitionId = 0;

    // Tracks how many records the Stream consumer has processed.
    final AtomicInteger recordCounter = new AtomicInteger(0);

    // Create our test server instance.
    try (final KafkaTestServer kafkaTestServer = new KafkaTestServer()) {
        // Start it and create our topic.
        kafkaTestServer.start();

        // Create test utils instance.
        final KafkaTestUtils kafkaTestUtils = new KafkaTestUtils(kafkaTestServer);

        // Create topics
        kafkaTestUtils.createTopic(inputTopic, 1, (short) 1);
        kafkaTestUtils.createTopic(outputTopic, 1, (short) 1);

        // Produce random data into input topic
        kafkaTestUtils.produceRecords(numberOfRecords, inputTopic, partitionId);

        // Define stream consumer properties.
        final Properties config = new Properties();
        config.put(StreamsConfig.APPLICATION_ID_CONFIG, "testStreamProcessor");
        config.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaTestServer.getKafkaConnectString());
        config.put("group.id", "test-stream-group");
        config.put("auto.offset.reset", "earliest");

        // Build the stream
        final StreamsBuilder streamsBuilder = new StreamsBuilder();
        streamsBuilder
            // Read from input topic.
            .stream(inputTopic)

            // For each record processed, increment our counter
            .map((key, word) -> {
                recordCounter.incrementAndGet();
                return new KeyValue<>(word, word);
            })

            // Write to output topic.
            .to(outputTopic);

        // Create stream
        final KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(), new StreamsConfig(config));
        try {
            // Start the stream consumer
            kafkaStreams.start();

            // Since stream processing is async, we need to wait for the Stream processor to start, consume messages
            // from the input topic, and process them. We'll wait for Wait for it to do its thing up to 10 seconds.
            for (int timeoutCounter = 0; timeoutCounter <= 10; timeoutCounter++) {
                // If we've processed all of our records
                if (recordCounter.get() >= numberOfRecords) {
                    // Break out of sleep loop.
                    break;
                }
                // Otherwise, we need to wait longer, sleep 1 second.
                Thread.sleep(1000L);
            }
        } finally {
            // Close the stream consumer.
            kafkaStreams.close();
        }

        // Validation.
        Assertions.assertEquals(numberOfRecords, recordCounter.get(), "Should have 25 records processed");

        // Consume records from output topic.
        final List<ConsumerRecord<String, String>> outputRecords =
            kafkaTestUtils.consumeAllRecordsFromTopic(outputTopic, StringDeserializer.class, StringDeserializer.class);

        // Validate we got the correct number of records.
        Assertions.assertEquals(numberOfRecords, outputRecords.size());
    }
}
 
Example 15
Source File: PopsHopsApplication.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Deserializer<BeerPurchase> beerPurchaseDeserializer = new JsonDeserializer<>(BeerPurchase.class);
        Serde<String> stringSerde = Serdes.String();
        Deserializer<String> stringDeserializer = stringSerde.deserializer();
        Serializer<String> stringSerializer = stringSerde.serializer();
        Serializer<BeerPurchase> beerPurchaseSerializer = new JsonSerializer<>();

        Topology toplogy = new Topology();

        String domesticSalesSink = "domestic-beer-sales";
        String internationalSalesSink = "international-beer-sales";
        String purchaseSourceNodeName = "beer-purchase-source";
        String purchaseProcessor = "purchase-processor";


        BeerPurchaseProcessor beerProcessor = new BeerPurchaseProcessor(domesticSalesSink, internationalSalesSink);

        toplogy.addSource(LATEST,
                          purchaseSourceNodeName,
                          new UsePreviousTimeOnInvalidTimestamp(),
                          stringDeserializer,
                          beerPurchaseDeserializer,
                          Topics.POPS_HOPS_PURCHASES.topicName())
                .addProcessor(purchaseProcessor,
                              () -> beerProcessor,
                              purchaseSourceNodeName);

                //Uncomment these two lines and comment out the printer lines for writing to topics
               // .addSink(internationalSalesSink,"international-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor)
               // .addSink(domesticSalesSink,"domestic-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor);

        //You'll have to comment these lines out if you want to write to topics as they have the same node names
        toplogy.addProcessor(domesticSalesSink,
                            new KStreamPrinter("domestic-sales"),
                            purchaseProcessor );

        toplogy.addProcessor(internationalSalesSink,
                             new KStreamPrinter("international-sales"),
                             purchaseProcessor );

        KafkaStreams kafkaStreams = new KafkaStreams(toplogy, streamsConfig);
        MockDataProducer.produceBeerPurchases(5);
        System.out.println("Starting Pops-Hops Application now");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down Pops-Hops Application  now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 16
Source File: StockPerformanceInteractiveQueryApplication.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
private static void shutdown(KafkaStreams kafkaStreams, InteractiveQueryServer queryServer) {
    LOG.info("Shutting down the Stock Analysis Interactive Query App Started now");
    kafkaStreams.close();
    queryServer.stop();
}
 
Example 17
Source File: Kafka_Streams_TensorFlow_Keras_Example_IntegrationTest.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldPredictValues() throws Exception {

	// ########################################################
	// Step 1: Load Keras Model using DeepLearning4J API
	// ########################################################
	String simpleMlp = new ClassPathResource("generatedModels/Keras/simple_mlp.h5").getFile().getPath();
	System.out.println(simpleMlp.toString());

	MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp);

	// Create test data which is sent from Kafka Producer into Input Topic
	List<String> inputValues = Arrays.asList("256,100");

	// ####################################################################
	// Step 2: Configure and start the Kafka Streams processor topology.
	// ####################################################################

	Properties streamsConfiguration = new Properties();
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test");
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());

	// Configure Kafka Streams Application
	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic, where
	// message values represent lines of text (for the sake of this example, we
	// ignore whatever may be stored in the message keys).
	final KStream<String, String> inputEvents = builder.stream(inputTopic);

	// ###############################################################
	// THIS IS WHERE WE DO REAL TIME MODEL INFERENCE FOR EACH EVENT
	// ###############################################################
	inputEvents.foreach((key, value) -> {

		// Transform input values (list of Strings) to expected DL4J parameters (two
		// Integer values):
		String[] valuesAsArray = value.split(",");
		INDArray input = Nd4j.create(Integer.parseInt(valuesAsArray[0]), Integer.parseInt(valuesAsArray[1]));

		// Apply the analytic model:
		output = model.output(input);
		prediction = output.toString();

	});

	// Transform message: Add prediction result
	KStream<String, Object> transformedMessage = inputEvents.mapValues(value -> "Prediction => " + prediction);

	// Send prediction result to Output Topic
	transformedMessage.to(outputTopic);

	// Start Kafka Streams Application to process new incoming messages from
	// Input Topic
	final KafkaStreams streams = new TestKafkaStreams(builder.build(), streamsConfiguration);
	streams.cleanUp();
	streams.start();
	System.out.println("Prediction Microservice is running...");
	System.out.println("Input to Kafka Topic " + inputTopic + "; Output to Kafka Topic " + outputTopic);

	// ########################################################
	// Step 3: Produce some input data to the input topic.
	// ########################################################

	Properties producerConfig = new Properties();
	producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
	producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
	producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig, new MockTime());

	// ########################################################
	// Step 4: Verify the application's output data.
	// ########################################################

	Properties consumerConfig = new Properties();
	consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test-standard-consumer");
	consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	List<KeyValue<String, String>> response = IntegrationTestUtils
			.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, 1);
	streams.close();

	System.out.println("VALUE: " + response.get(0).value);

	assertThat(response).isNotNull();
	assertThat(response.get(0).value).doesNotMatch("Value => unknown");
	assertThat(response.get(0).value).contains("0.1000,    0.1000,    0.1000");
}
 
Example 18
Source File: KStreamVsKTableExample.java    From kafka-streams-in-action with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        StreamsBuilder builder = new StreamsBuilder();


        KTable<String, StockTickerData> stockTickerTable = builder.table(STOCK_TICKER_TABLE_TOPIC);
        KStream<String, StockTickerData> stockTickerStream = builder.stream(STOCK_TICKER_STREAM_TOPIC);

        stockTickerTable.toStream().print(Printed.<String, StockTickerData>toSysOut().withLabel("Stocks-KTable"));
        stockTickerStream.print(Printed.<String, StockTickerData>toSysOut().withLabel( "Stocks-KStream"));

        int numberCompanies = 3;
        int iterations = 3;

        MockDataProducer.produceStockTickerData(numberCompanies, iterations);

        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        LOG.info("KTable vs KStream output started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(15000);
        LOG.info("Shutting down KTable vs KStream Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();

    }
 
Example 19
Source File: ZMartKafkaStreamsAdvancedReqsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();


        // previous requirements
        KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



           // selecting a key for storage and filtering out low dollar purchases


        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases"));
        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));



         // branching stream for separating out purchases in new departments to their own topics

        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee"));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics"));




         // security Requirements to record transactions for certain employee
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) ->
                SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased());

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("ZMart Advanced Requirements Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 20
Source File: ZMartKafkaStreamsAddStateApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {
    
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
    Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
    Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
    Serde<String> stringSerde = Serdes.String();

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
            .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

    KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

    patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
    patternKStream.to("patterns", Produced.with(stringSerde, purchasePatternSerde));



     // adding State to processor
    String rewardsStateStoreName = "rewardsPointsStore";
    RewardsStreamPartitioner streamPartitioner = new RewardsStreamPartitioner();

    KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(rewardsStateStoreName);
    StoreBuilder<KeyValueStore<String, Integer>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), Serdes.Integer());

    builder.addStateStore(storeBuilder);

    KStream<String, Purchase> transByCustomerStream = purchaseKStream.through( "customer_transactions", Produced.with(stringSerde, purchaseSerde, streamPartitioner));


    KStream<String, RewardAccumulator> statefulRewardAccumulator = transByCustomerStream.transformValues(() ->  new PurchaseRewardTransformer(rewardsStateStoreName),
            rewardsStateStoreName);

    statefulRewardAccumulator.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
    statefulRewardAccumulator.to("rewards", Produced.with(stringSerde, rewardAccumulatorSerde));



    // used only to produce data for this application, not typical usage
    MockDataProducer.producePurchaseData();

    
    LOG.info("Starting Adding State Example");
    KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
    LOG.info("ZMart Adding State Application Started");
    kafkaStreams.cleanUp();
    kafkaStreams.start();
    Thread.sleep(65000);
    LOG.info("Shutting down the Add State Application now");
    kafkaStreams.close();
    MockDataProducer.shutdown();
}