Java Code Examples for org.apache.kafka.streams.StreamsBuilder#stream()

The following examples show how to use org.apache.kafka.streams.StreamsBuilder#stream() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WordCount.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count(Materialized.as("count"));

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example 2
Source File: TimeCheckDemo.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 6 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);



    StreamsBuilder streamsBuilder = new StreamsBuilder();
    KStream<String, PosInvoice> KS0 = streamsBuilder.stream(AppConfigs.posTopicName,
            Consumed.with(PosSerdes.String(), PosSerdes.PosInvoice())
                    .withTimestampExtractor(new InvoiceTimeExtractor())
    );

    KS0.transformValues(() -> new ValueTransformer<PosInvoice, PosInvoice>() {
        private ProcessorContext context;

        @Override
        public void init(ProcessorContext processorContext) {
            this.context = processorContext;
        }

        @Override
        public PosInvoice transform(PosInvoice invoice) {
            logger.info("Invoice Time: " + new Timestamp(invoice.getCreatedTime()) +
                    " Event Time: " + new Timestamp(context.timestamp()));
            return invoice;
        }

        @Override
        public void close() {
        }
    });

    logger.info("Starting Kafka Streams");
    KafkaStreams myStream = new KafkaStreams(streamsBuilder.build(), props);
    myStream.start();

    Runtime.getRuntime().addShutdownHook(new Thread(myStream::close));
}
 
Example 3
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
private void feedStream(String consumerId, ProcessReferential processReferential, String topicMerge) {
    String topicSource = consumerId + TOPIC_PARSED_PROCESS;
    log.info("creating {} Process Merge for topicsource {}", consumerId, topicSource);
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, JsonNode> streamToMerge = builder.stream(topicSource, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    streamToMerge.to(topicMerge, Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(processReferential.getIdProcess() + "_" + consumerId + "-_merge-topic", kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    runningMergeProcess.get(processReferential).add(streams);
    streams.start();
}
 
Example 4
Source File: ErrorImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating error importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ErrorData> errorDataSerde = Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(ErrorData.class));

    KStream<String, ErrorData> streamToES = builder.stream(kafkaConfiguration.getErrorTopic(), Consumed.with(Serdes.String(), errorDataSerde));

    streamToES.process(() -> elasticsearchProcessor);

    errorStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_ERROR, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(errorStream::close));

    errorStream.start();
}
 
Example 5
Source File: KStreamAggDemo.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();
    KStream<String, Employee> KS0 = streamsBuilder.stream(AppConfigs.topicName,
        Consumed.with(AppSerdes.String(), AppSerdes.Employee()));

    KGroupedStream<String, Employee> KGS1 = KS0.groupBy(
        (k, v) -> v.getDepartment(),
        Serialized.with(AppSerdes.String(),
            AppSerdes.Employee()));

    KTable<String, DepartmentAggregate> KT2 = KGS1.aggregate(
        //Initializer
        () -> new DepartmentAggregate()
            .withEmployeeCount(0)
            .withTotalSalary(0)
            .withAvgSalary(0D),
        //Aggregator
        (k, v, aggV) -> new DepartmentAggregate()
            .withEmployeeCount(aggV.getEmployeeCount() + 1)
            .withTotalSalary(aggV.getTotalSalary() + v.getSalary())
            .withAvgSalary((aggV.getTotalSalary() + v.getSalary()) / (aggV.getEmployeeCount() + 1D)),
        //Serializer
        Materialized.<String, DepartmentAggregate, KeyValueStore<Bytes, byte[]>>as("agg-store")
            .withKeySerde(AppSerdes.String())
            .withValueSerde(AppSerdes.DepartmentAggregate())
    );

    KT2.toStream().foreach(
        (k, v) -> System.out.println("Key = " + k + " Value = " + v.toString()));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

}
 
Example 6
Source File: DynamicOutputTopic.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();
    final String orderInputTopic = envProps.getProperty("input.topic.name");
    final String orderOutputTopic = envProps.getProperty("output.topic.name");
    final String specialOrderOutput = envProps.getProperty("special.order.topic.name");

    final Serde<Long> longSerde = getPrimitiveAvroSerde(envProps, true);
    final Serde<Order> orderSerde = getSpecificAvroSerde(envProps);
    final Serde<CompletedOrder> completedOrderSerde = getSpecificAvroSerde(envProps);

    final ValueMapper<Order, CompletedOrder> orderProcessingSimulator = v -> {
       double amount = v.getQuantity() * FAKE_PRICE;
       return CompletedOrder.newBuilder().setAmount(amount).setId(v.getId() + "-" + v.getSku()).setName(v.getName()).build();
    };

    final TopicNameExtractor<Long, CompletedOrder> orderTopicNameExtractor = (key, completedOrder, recordContext) -> {
          final String compositeId = completedOrder.getId();
          final String skuPart = compositeId.substring(compositeId.indexOf('-') + 1, 5);
          final String outTopic;
          if (skuPart.equals("QUA")) {
              outTopic = specialOrderOutput;
          } else {
              outTopic = orderOutputTopic;
          }
          return outTopic;
    };

    final KStream<Long, Order> exampleStream = builder.stream(orderInputTopic, Consumed.with(longSerde, orderSerde));

    exampleStream.mapValues(orderProcessingSimulator).to(orderTopicNameExtractor, Produced.with(longSerde, completedOrderSerde));

    return builder.build();
}
 
Example 7
Source File: CountingSessionApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    KStream<String, UserClicks> KS0 = streamsBuilder.stream(
        AppConfigs.posTopicName,
        Consumed.with(AppSerdes.String(), AppSerdes.UserClicks())
            .withTimestampExtractor(new AppTimestampExtractor())
    );

    KGroupedStream<String, UserClicks> KS1 = KS0.groupByKey(
        Grouped.with(AppSerdes.String(),
            AppSerdes.UserClicks()));

    SessionWindowedKStream<String, UserClicks> KS2 = KS1.windowedBy(
        SessionWindows.with(Duration.ofMinutes(5))
            .grace(Duration.ofMinutes(1))
    );

    KTable<Windowed<String>, Long> KT3 = KS2.count(
        //Materialized is not needed if you don't want to override defaults
        Materialized.<String, Long, SessionStore<Bytes, byte[]>>as("clicks-by-user-session")
    );

    KT3.toStream().foreach(
        (kWindowed, v) -> logger.info(
            "UserID: " + kWindowed.key() +
                " Window Start: " + utcTimeString(kWindowed.window().start()) +
                " Window End: " + utcTimeString(kWindowed.window().end()) +
                " Count: " + v
        ));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example 8
Source File: StreamsIngest.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<City> citySerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  KStream<String, City> citiesNoKey = builder.stream(inputTopic, Consumed.with(Serdes.String(), citySerde));
  final KStream<Long, City> citiesKeyed = citiesNoKey.map((k, v) -> new KeyValue<>(v.getCityId(), v));
  citiesKeyed.to(outputTopic, Produced.with(Serdes.Long(), citySerde));

  return builder.build();
}
 
Example 9
Source File: KafkaStreamsLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
@Ignore("it needs to have kafka broker running on local")
public void shouldTestKafkaStreams() throws InterruptedException {
    // given
    String inputTopic = "inputTopic";

    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());

    // when
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> textLines = builder.stream(inputTopic);
    Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);

    KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();

    textLines.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));

    String outputTopic = "outputTopic";
    final Serde<String> stringSerde = Serdes.String();
    final Serde<String> longSerde = Serdes.String();
    textLines.to(outputTopic, Produced.with(stringSerde,longSerde));

    KafkaStreams streams = new KafkaStreams(new Topology(), streamsConfiguration);
    streams.start();

    // then
    Thread.sleep(30000);
    streams.close();
}
 
Example 10
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  functionRegistry = new FunctionRegistry();
  ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
  StreamsBuilder builder = new StreamsBuilder();
  kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(),
      Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic()
          .getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap())
              , false, new MockSchemaRegistryClient())));
}
 
Example 11
Source File: EventSourcedConsumer.java    From simplesource with Apache License 2.0 4 votes vote down vote up
static <K, C> KStream<K, CommandResponse<K>> commandResponseStream(TopologyContext<K, C, ?, ?> ctx, final StreamsBuilder builder) {
    return builder.stream(ctx.topicName(COMMAND_RESPONSE), ctx.commandResponseConsumed());
}
 
Example 12
Source File: AppTopology.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 4 votes vote down vote up
static void withBuilder(StreamsBuilder builder) {

        KStream<String, AdImpression> KS0 = builder.stream(
            AppConfigs.impressionTopic,
            Consumed.with(AppSerdes.String(), AppSerdes.AdImpression())
        );

        KTable<String, Long> adImpressionCount = KS0.groupBy(
            (k, v) -> v.getCampaigner(),
            Grouped.with(AppSerdes.String(),
                AppSerdes.AdImpression()))
            .count();

        KStream<String, AdClick> KS1 = builder.stream(
            AppConfigs.clicksTopic,
            Consumed.with(AppSerdes.String(), AppSerdes.AdClick())
        );

        KTable<String, Long> adClickCount = KS1.groupBy(
            (k, v) -> v.getCampaigner(),
            Grouped.with(AppSerdes.String(),
                AppSerdes.AdClick()))
            .count();

        KTable<String, CampaignPerformance> campaignPerformance =
            adImpressionCount.leftJoin(adClickCount,
                (impCount, clkCount) -> new CampaignPerformance()
                    .withAdImpressions(impCount)
                    .withAdClicks(clkCount)
            ).mapValues((k, v) -> v.withCampaigner(k),
                Materialized.<String, CampaignPerformance, KeyValueStore<Bytes, byte[]>>
                    as(AppConfigs.stateStoreNameCP)
                    .withKeySerde(AppSerdes.String())
                    .withValueSerde(AppSerdes.CampaignPerformance())
            );

        campaignPerformance.toStream().to(
            AppConfigs.outputTopic,
            Produced.with(AppSerdes.String(), AppSerdes.CampaignPerformance())
        );

        /*
        campaignPerformance.toStream()
            .foreach((k, v) -> logger.info("inside = " + v));
            */
    }
 
Example 13
Source File: EventSourcedConsumer.java    From simplesource with Apache License 2.0 4 votes vote down vote up
static <K, C> KStream<K, CommandRequest<K, C>> commandRequestStream(TopologyContext<K, C, ?, ?> ctx, final StreamsBuilder builder) {
    return builder.stream(ctx.topicName(COMMAND_REQUEST), ctx.commandRequestConsumed());
}
 
Example 14
Source File: RunningAverageTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() throws IOException, RestClientException {

  final Properties mockProps = new Properties();
  mockProps.put("application.id", "kafka-movies-test");
  mockProps.put("bootstrap.servers", "DUMMY_KAFKA_CONFLUENT_CLOUD_9092");
  mockProps.put("schema.registry.url", "DUMMY_SR_CONFLUENT_CLOUD_8080");
  mockProps.put("default.topic.replication.factor", "1");
  mockProps.put("offset.reset.policy", "latest");
  mockProps.put("specific.avro.reader", true);

  final RunningAverage streamsApp = new RunningAverage();
  final Properties streamsConfig = streamsApp.buildStreamsProperties(mockProps);

  StreamsBuilder builder = new StreamsBuilder();

  // workaround https://stackoverflow.com/a/50933452/27563
  final String tempDirectory = Files.createTempDirectory("kafka-streams")
      .toAbsolutePath()
      .toString();
  streamsConfig.setProperty(StreamsConfig.STATE_DIR_CONFIG, tempDirectory);

  final Map<String, String> mockSerdeConfig = RunningAverage.getSerdeConfig(streamsConfig);

  SpecificAvroSerde<CountAndSum> countAndSumSerde = new SpecificAvroSerde<>(new MockSchemaRegistryClient());
  countAndSumSerde.configure(mockSerdeConfig, false);

  // MockSchemaRegistryClient doesn't require connection to Schema Registry which is perfect for unit test
  final MockSchemaRegistryClient client = new MockSchemaRegistryClient();
  ratingSpecificAvroSerde = new SpecificAvroSerde<>(client);
  client.register(RATINGS_TOPIC_NAME + "-value", Rating.SCHEMA$);
  ratingSpecificAvroSerde.configure(mockSerdeConfig, false);

  KStream<Long, Rating> ratingStream = builder.stream(RATINGS_TOPIC_NAME,
                                                      Consumed.with(Serdes.Long(), ratingSpecificAvroSerde));

  final KTable<Long, Double> ratingAverageTable = RunningAverage.getRatingAverageTable(ratingStream,
                                                                                       AVERAGE_RATINGS_TOPIC_NAME,
                                                                                       countAndSumSerde);

  final Topology topology = builder.build();
  testDriver = new TopologyTestDriver(topology, streamsConfig);

}
 
Example 15
Source File: CampaignPerformanceApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 4 votes vote down vote up
public static void main(String[] args) {
    Properties properties = new Properties();
    properties.put(StreamsConfig.APPLICATION_ID_CONFIG,
        AppConfigs.applicationID);
    properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
        AppConfigs.bootstrapServers);
    properties.put(StreamsConfig.STATE_DIR_CONFIG,
        AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    KStream<String, AdImpression> KS0 = streamsBuilder.stream(
        AppConfigs.impressionTopic,
        Consumed.with(AppSerdes.String(), AppSerdes.AdImpression())
    );

    KTable<String, Long> adImpressionCount = KS0.groupBy(
        (k, v) -> v.getCampaigner(),
        Grouped.with(AppSerdes.String(),
            AppSerdes.AdImpression()))
        .count();

    KStream<String, AdClick> KS1 = streamsBuilder.stream(
        AppConfigs.clicksTopic,
        Consumed.with(AppSerdes.String(), AppSerdes.AdClick())
    );

    KTable<String, Long> adClickCount = KS1.groupBy(
        (k, v) -> v.getCampaigner(),
        Grouped.with(AppSerdes.String(),
            AppSerdes.AdClick()))
        .count();

    KTable<String, CampaignPerformance> campaignPerformance =
        adImpressionCount.leftJoin(adClickCount,
            (impCount, clkCount) -> new CampaignPerformance()
                .withAdImpressions(impCount)
                .withAdClicks(clkCount))
            .mapValues((k, v) -> v.withCampaigner(k));

    campaignPerformance.toStream()
        .foreach((k, v) -> logger.info(v));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), properties);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example 16
Source File: Transformer.java    From apicurio-registry with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) {
    Properties properties = new Properties();
    for (String arg : args) {
        String[] split = arg.split("=");
        properties.put(split[0], split[1]);
    }

    String appId = properties.getProperty(StreamsConfig.APPLICATION_ID_CONFIG);
    if (appId == null) {
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "apicurio-registry-transformer");
    }

    String inputTopic = properties.getProperty("input-topic");
    if (inputTopic == null) {
        throw new IllegalArgumentException("Missing input topic!");
    }

    String outputTopic = properties.getProperty("output-topic");
    if (outputTopic == null) {
        throw new IllegalArgumentException("Missing output topic!");
    }

    String fnType = properties.getProperty("type");
    if (fnType == null) {
        throw new IllegalArgumentException("Missing transformation type!");
    }
    Type type = Type.valueOf(fnType);

    log.info(String.format("Transforming: %s --> %s [%s]", inputTopic, outputTopic, type));

    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, byte[]> input = builder.stream(
        inputTopic,
        Consumed.with(Serdes.String(), Serdes.ByteArray())
    );

    input.transformValues(() -> new ValueTransformer<byte[], byte[]>() {
        @Override
        public void init(ProcessorContext context) {
        }

        @Override
        public byte[] transform(byte[] value) {
            return type.apply(value);
        }

        @Override
        public void close() {
        }
    }).to(outputTopic, Produced.with(Serdes.String(), Serdes.ByteArray()));

    Topology topology = builder.build(properties);
    KafkaStreams streams = new KafkaStreams(topology, properties);

    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

    streams.start();
}
 
Example 17
Source File: Kafka_Streams_TensorFlow_Keras_Example_IntegrationTest.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldPredictValues() throws Exception {

	// ########################################################
	// Step 1: Load Keras Model using DeepLearning4J API
	// ########################################################
	String simpleMlp = new ClassPathResource("generatedModels/Keras/simple_mlp.h5").getFile().getPath();
	System.out.println(simpleMlp.toString());

	MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp);

	// Create test data which is sent from Kafka Producer into Input Topic
	List<String> inputValues = Arrays.asList("256,100");

	// ####################################################################
	// Step 2: Configure and start the Kafka Streams processor topology.
	// ####################################################################

	Properties streamsConfiguration = new Properties();
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test");
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());

	// Configure Kafka Streams Application
	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic, where
	// message values represent lines of text (for the sake of this example, we
	// ignore whatever may be stored in the message keys).
	final KStream<String, String> inputEvents = builder.stream(inputTopic);

	// ###############################################################
	// THIS IS WHERE WE DO REAL TIME MODEL INFERENCE FOR EACH EVENT
	// ###############################################################
	inputEvents.foreach((key, value) -> {

		// Transform input values (list of Strings) to expected DL4J parameters (two
		// Integer values):
		String[] valuesAsArray = value.split(",");
		INDArray input = Nd4j.create(Integer.parseInt(valuesAsArray[0]), Integer.parseInt(valuesAsArray[1]));

		// Apply the analytic model:
		output = model.output(input);
		prediction = output.toString();

	});

	// Transform message: Add prediction result
	KStream<String, Object> transformedMessage = inputEvents.mapValues(value -> "Prediction => " + prediction);

	// Send prediction result to Output Topic
	transformedMessage.to(outputTopic);

	// Start Kafka Streams Application to process new incoming messages from
	// Input Topic
	final KafkaStreams streams = new TestKafkaStreams(builder.build(), streamsConfiguration);
	streams.cleanUp();
	streams.start();
	System.out.println("Prediction Microservice is running...");
	System.out.println("Input to Kafka Topic " + inputTopic + "; Output to Kafka Topic " + outputTopic);

	// ########################################################
	// Step 3: Produce some input data to the input topic.
	// ########################################################

	Properties producerConfig = new Properties();
	producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
	producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
	producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig, new MockTime());

	// ########################################################
	// Step 4: Verify the application's output data.
	// ########################################################

	Properties consumerConfig = new Properties();
	consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test-standard-consumer");
	consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	List<KeyValue<String, String>> response = IntegrationTestUtils
			.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, 1);
	streams.close();

	System.out.println("VALUE: " + response.get(0).value);

	assertThat(response).isNotNull();
	assertThat(response.get(0).value).doesNotMatch("Value => unknown");
	assertThat(response.get(0).value).contains("0.1000,    0.1000,    0.1000");
}
 
Example 18
Source File: PosFanOutApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 4 votes vote down vote up
public static void main(String[] args) {

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, FanOutConfigs.applicationID);
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, FanOutConfigs.bootstrapServers);

        StreamsBuilder builder = new StreamsBuilder();
        KStream<String, PosInvoice> KS0 = builder.stream(FanOutConfigs.posTopicName,
            Consumed.with(PosSerdes.String(), PosSerdes.PosInvoice()));

        //Requirement 1 - Produce to shipment
        KStream<String, PosInvoice> KS1 = KS0.filter((key, value) ->
            value.getDeliveryType()
                .equalsIgnoreCase(FanOutConfigs.DELIVERY_TYPE_HOME_DELIVERY));

        KS1.to(FanOutConfigs.shipmentTopicName,
            Produced.with(PosSerdes.String(), PosSerdes.PosInvoice()));

        //Requirement 2 - Produce to loyaltyHadoopRecord
        KStream<String, PosInvoice> KS3 = KS0.filter((key, value) ->
            value.getCustomerType()
                .equalsIgnoreCase(FanOutConfigs.CUSTOMER_TYPE_PRIME));

        KStream<String, Notification> KS4 = KS3.mapValues(
            invoice -> RecordBuilder.getNotification(invoice)
        );

        KS4.to(FanOutConfigs.notificationTopic,
            Produced.with(PosSerdes.String(), PosSerdes.Notification()));

        //Requirement 3 - Produce to Hadoop
        KStream<String, PosInvoice> KS6 = KS0.mapValues(
            invoice -> RecordBuilder.getMaskedInvoice(invoice)
        );

        KStream<String, HadoopRecord> KS7 = KS6.flatMapValues(
            invoice -> RecordBuilder.getHadoopRecords(invoice)
        );

        KS7.to(FanOutConfigs.hadoopTopic,
            Produced.with(PosSerdes.String(), PosSerdes.HadoopRecord()));

        Topology posFanOutTopology = builder.build();

        logger.info("Starting the following topology");
        logger.info(posFanOutTopology.describe().toString());

        KafkaStreams myStream = new KafkaStreams(posFanOutTopology, props);
        myStream.start();

        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            logger.info("Stopping Stream");
            myStream.close();
        }));
    }
 
Example 19
Source File: CountingWindowApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 4 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID);
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers);
    props.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreName);

    StreamsBuilder streamsBuilder = new StreamsBuilder();
    KStream<String, SimpleInvoice> KS0 = streamsBuilder.stream(AppConfigs.posTopicName,
        Consumed.with(AppSerdes.String(), AppSerdes.SimpleInvoice())
            .withTimestampExtractor(new InvoiceTimeExtractor())
    );

    KGroupedStream<String, SimpleInvoice> KS1 = KS0.groupByKey(
        Grouped.with(AppSerdes.String(),
            AppSerdes.SimpleInvoice()));

    TimeWindowedKStream<String, SimpleInvoice> KS2 = KS1.windowedBy(
        TimeWindows.of(Duration.ofMinutes(5))
        //.grace(Duration.ofMillis(100))
    );

    KTable<Windowed<String>, Long> KT3 = KS2.count(
        //Materialized is not needed if you don't want to override defaults
        Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("invoice-count")
        //.withRetention(Duration.ofHours(6))
    );

    //Suppress is only available in 2.1, Checkout 2.1 branch
    //.suppress(untilWindowCloses(unbounded()));


    KT3.toStream().foreach(
        (kWindowed, v) -> logger.info(
            "StoreID: " + kWindowed.key() +
                " Window start: " +
                Instant.ofEpochMilli(kWindowed.window().start())
                    .atOffset(ZoneOffset.UTC) +
                " Window end: " +
                Instant.ofEpochMilli(kWindowed.window().end())
                    .atOffset(ZoneOffset.UTC) +
                " Count: " + v +
                " Window#: " + kWindowed.window().hashCode()

        ));

    KafkaStreams streams = new KafkaStreams(streamsBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example 20
Source File: KafkaStreamsYellingApp.java    From kafka-streams-in-action with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {


        //Used only to produce data for this application, not typical usage
        MockDataProducer.produceRandomTextData();

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "yelling_app_id");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

        StreamsConfig streamsConfig = new StreamsConfig(props);

        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, String> simpleFirstStream = builder.stream("src-topic", Consumed.with(stringSerde, stringSerde));


        KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase);

        upperCasedStream.to( "out-topic", Produced.with(stringSerde, stringSerde));
        upperCasedStream.print(Printed.<String, String>toSysOut().withLabel("Yelling App"));


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("Hello World Yelling App Started");
        kafkaStreams.start();
        Thread.sleep(35000);
        LOG.info("Shutting down the Yelling APP now");
        kafkaStreams.close();
        MockDataProducer.shutdown();

    }