org.apache.kafka.streams.Consumed Java Examples

The following examples show how to use org.apache.kafka.streams.Consumed. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MetricImporter.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private KafkaStreams feedMergeTopic(String id, String mergeTopic, String destId) {

        StreamsBuilder builder = new StreamsBuilder();
        Properties properties = createProperties(kafkaConfiguration.getBootstrapServers());
        String inputTopic = id + TOPIC_TREAT_PROCESS;
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, inputTopic + "merger-stream-" + destId);

        KStream<String, JsonNode> stream = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        stream.to(mergeTopic, Produced.with(Serdes.String(),GenericSerdes.jsonNodeSerde()));

        final KafkaStreams streams = new KafkaStreams(builder.build(), properties);
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        return streams;

    }
 
Example #2
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private void createStreamInput(String inputTopic, String outputTopic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> streamInput = builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()));

    KStream<String, String> streamParsed = streamInput.mapValues((value) -> {
        Metrics.counter("skaetl_nb_read_kafka_count", Lists.newArrayList(Tag.of("processConsumerName", getProcessConsumer().getName()))).increment();
        return getGenericParser().apply(value, getProcessConsumer());
    }).filter((key, value) -> StringUtils.isNotBlank(value));

    final Serde<String> stringSerdes = Serdes.String();

    streamParsed.to(outputTopic, Produced.with(stringSerdes, stringSerdes));

    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.INPUT_PROCESS, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
    addStreams(streams);
}
 
Example #3
Source File: PlayerCommandConnector.java    From football-events with MIT License 6 votes vote down vote up
public void build(StreamsBuilder builder) {
    KStream<byte[], JsonNode> playerSourceStream = builder.stream(
            CONNECT_PLAYERS_TOPIC, Consumed.with(Serdes.ByteArray(), new JsonNodeSerde()))
            .filter((id, json) -> creationOrSnapshot(json));

    playerSourceStream.foreach(this::debug);

    KStream<String, PlayerStartedCareer> playerReadyStream = playerSourceStream
            .map((id, json) -> {
                PlayerStartedCareer event = createEvent(json);
                return KeyValue.pair(event.getAggId(), event);
            });

    playerReadyStream.to(PLAYER_STARTED_CAREER_TOPIC, Produced.with(
            Serdes.String(), new JsonPojoSerde<>(PlayerStartedCareer.class)));
}
 
Example #4
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
public void createStreamEmail(String inputTopic, ParameterOutput parameterOutput) {

        String email = parameterOutput.getEmail();
        if (email != null) {
            String template = parameterOutput.getTemplate();
            StreamsBuilder builder = new StreamsBuilder();

            if (template != null)
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeEmailProcessor(email, template, emailService));
            else
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeEmailProcessor(email, emailService));

            KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.EMAIL_PROCESS, getBootstrapServer()));
            Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
            streams.start();
            addStreams(streams);
        } else {
            log.error("destinationEmail is null and it's not normal");
        }
    }
 
Example #5
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
public void createStreamSlack(String inputTopic, ParameterOutput parameterOutput) {

        String webHookURL = parameterOutput.getWebHookURL();
        if (webHookURL != null) {
            String template = parameterOutput.getTemplate();
            StreamsBuilder builder = new StreamsBuilder();

            if (template != null)
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeSlackProcessor(webHookURL, template));
            else
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeSlackProcessor(webHookURL));

            KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.SLACK_PROCESS, getBootstrapServer()));
            Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
            streams.start();
            addStreams(streams);
        } else {
            log.error("webHookURL is null and it's not normal");
        }
    }
 
Example #6
Source File: SimulateStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private void createStreamSimulate(String topic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> streamInput = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));

    KStream<String, SimulateData> streamParsed = streamInput.map((key, value) -> {
        String resultParsing = getGenericParser().apply(value, getProcessConsumer());
        ObjectNode resultTransformation = getGenericTransformator().apply(JSONUtils.getInstance().parseObj(resultParsing), getProcessConsumer());
        ValidateData item = getGenericValidator().process(resultTransformation, getProcessConsumer());
        if (item.success) {
            return callFilter(value, item);
        } else {
            return new KeyValue<>("input", generateFromValidateData(value, item));
        }
    });
    final Serde<String> stringSerdes = Serdes.String();
    final Serde<SimulateData> simulateDataSerde = Serdes.serdeFrom(new SimulateDataSerializer(), new SimulateDataDeserializer());
    streamParsed.to(SIMULATE_OUTPUT, Produced.with(stringSerdes, simulateDataSerde));

    KafkaStreams streams = new KafkaStreams(builder.build(), createKStreamProperties(SIMULATE_PROCESS, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
}
 
Example #7
Source File: StructuredDataSourceNode.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
private KTable createKTable(
    StreamsBuilder builder, final Topology.AutoOffsetReset autoOffsetReset,
    final KsqlTable ksqlTable,
    final Serde<GenericRow> genericRowSerde,
    final Serde<GenericRow> genericRowSerdeAfterRead
) {
  if (ksqlTable.isWindowed()) {
    return table(
        builder.stream(
            ksqlTable.getKsqlTopic().getKafkaTopicName(),
            Consumed.with(windowedSerde, genericRowSerde).withOffsetResetPolicy(autoOffsetReset)
        ).mapValues(windowedMapper).transformValues(new AddTimestampColumn()),
        windowedSerde,
        genericRowSerdeAfterRead
    );
  } else {
    return table(
        builder.stream(
            ksqlTable.getKsqlTopic().getKafkaTopicName(),
            Consumed.with(Serdes.String(), genericRowSerde).withOffsetResetPolicy(autoOffsetReset)
        ).mapValues(nonWindowedValueMapper).transformValues(new AddTimestampColumn()),
        Serdes.String(),
        genericRowSerdeAfterRead
    );
  }
}
 
Example #8
Source File: StatisticsKeeper.java    From football-events with MIT License 5 votes vote down vote up
private <T> void updateStoreAndDashboard(Class<T> viewType, String store) {
    JsonPojoSerde serde = new JsonPojoSerde<>(viewType);
    streamsBuilder.stream(Topics.viewTopicName(viewType), Consumed.with(Serdes.String(), serde))
            .peek(this::updateDashboard)
            .groupByKey()
            .reduce((aggValue, newValue) -> newValue, StreamsUtils.materialized(store, serde));
}
 
Example #9
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
private void feedStream(String consumerId, ProcessReferential processReferential, String topicMerge) {
    String topicSource = consumerId + TOPIC_PARSED_PROCESS;
    log.info("creating {} Process Merge for topicsource {}", consumerId, topicSource);
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, JsonNode> streamToMerge = builder.stream(topicSource, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    streamToMerge.to(topicMerge, Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(processReferential.getIdProcess() + "_" + consumerId + "-_merge-topic", kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    runningMergeProcess.get(processReferential).add(streams);
    streams.start();
}
 
Example #10
Source File: TopScorersBuilder.java    From football-events with MIT License 5 votes vote down vote up
public void build() {
    KTable<String, TopPlayers> top10Table = builder
            .stream(PLAYER_GOALS_TOPIC, Consumed.with(Serdes.String(), playerGoalsSerde))
            // create a single record that includes the top scorers
            .groupBy((playerId, playerGoals) -> "topPlayers", Serialized.with(Serdes.String(), playerGoalsSerde))
            .aggregate(() -> new TopPlayers(10), (playerId, playerStat, top10) -> top10.aggregate(playerStat),
                materialized(TOP_SCORERS_STORE, topSerde));

    top10Table.toStream().to(TOP_SCORERS_TOPIC, Produced.with(String(), topSerde));
}
 
Example #11
Source File: StockPerformanceStreamsAndProcessorApplication.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Serde<String> stringSerde = Serdes.String();
        Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


        StreamsBuilder builder = new StreamsBuilder();

        String stocksStateStore = "stock-performance-store";
        double differentialThreshold = 0.02;

        KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
        StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

        builder.addStateStore(storeBuilder);

        builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
                .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore)
                .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance"));

        //Uncomment this line and comment out the line above for writing to a topic
        //.to(stringSerde, stockPerformanceSerde, "stock-performance");


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol);
        System.out.println("Stock Analysis KStream/Process API App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down the Stock KStream/Process API Analysis App now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #12
Source File: StockPerformanceStreamsAndProcessorMultipleValuesApplication.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Serde<String> stringSerde = Serdes.String();
        Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


        StreamsBuilder builder = new StreamsBuilder();

        String stocksStateStore = "stock-performance-store";
        double differentialThreshold = 0.05;

        TransformerSupplier<String, StockTransaction, KeyValue<String, List<KeyValue<String, StockPerformance>>>> transformerSupplier =
                () -> new StockPerformanceMultipleValuesTransformer(stocksStateStore, differentialThreshold);

        KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
        StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

        builder.addStateStore(storeBuilder);

        builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
                .transform(transformerSupplier, stocksStateStore).flatMap((dummyKey,valueList) -> valueList)
                .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance"));
                //.to(stringSerde, stockPerformanceSerde, "stock-performance");


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol);
        System.out.println("Stock Analysis KStream/Process API App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down the Stock KStream/Process API Analysis App now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #13
Source File: SqlPredicateTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  metaStore = MetaStoreFixture.getNewMetaStore();
  functionRegistry = new FunctionRegistry();
  ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
  StreamsBuilder builder = new StreamsBuilder();
  kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(),
                           ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(
                                                 null, new KsqlConfig(Collections.emptyMap()),
                                                 false, new MockSchemaRegistryClient()
                                                 )));
}
 
Example #14
Source File: SchemaKTableTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  functionRegistry = new FunctionRegistry();
  ksqlTable = (KsqlTable) metaStore.getSource("TEST2");
  StreamsBuilder builder = new StreamsBuilder();
  kTable = builder
          .table(ksqlTable.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String()
              , ksqlTable.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new
                  KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));

}
 
Example #15
Source File: SchemaKStreamTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Before
public void init() {
  functionRegistry = new FunctionRegistry();
  ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
  StreamsBuilder builder = new StreamsBuilder();
  kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(),
      Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic()
          .getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap())
              , false, new MockSchemaRegistryClient())));
}
 
Example #16
Source File: RetryImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating retry importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ValidateData> validateDataSerdes = Serdes.serdeFrom(new ValidateDataSerializer(), new ValidateDataDeserializer());

    KStream<String, ValidateData> streamToES = builder.stream(kafkaConfiguration.getRetryTopic(), Consumed.with(Serdes.String(), validateDataSerdes));
    streamToES.process(() -> elasticSearchProcessor);

    retryStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_RETRY, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(retryStream::close));
    retryStream.start();
}
 
Example #17
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
private void createStreamValidAndTransformAndFilter(String inputTopic, String outputTopic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, JsonNode> streamInput = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    String applicationId = getProcessConsumer().getIdProcess() + ProcessConstants.VALIDATE_PROCESS;
    Counter counter = Metrics.counter("skaetl_nb_transformation_validation_count", Lists.newArrayList(Tag.of("processConsumerName", getProcessConsumer().getName())));
    KStream<String, ValidateData> streamValidation = streamInput.mapValues((value) -> {
        ObjectNode resultTransformer = getGenericTransformator().apply(value, getProcessConsumer());
        ValidateData item = getGenericValidator().process(resultTransformer, getProcessConsumer());
        counter.increment();
        return item;
    }).filter((key, value) -> {
        //Validation
        if (!value.success) {
            //produce to errorTopic
            esErrorRetryWriter.sendToErrorTopic(applicationId, value);
            return false;
        }
        //FILTER
        return processFilter(value);
    });

    KStream<String, JsonNode> streamOfJsonNode = streamValidation.mapValues(value -> value.getJsonValue());
    streamOfJsonNode.to(outputTopic, Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));

    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(applicationId, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
    addStreams(streams);
}
 
Example #18
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamEs(String inputTopic, ParameterOutput parameterOutput) {

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, JsonNode> streamToES = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        streamToES.process(() -> applicationContext.getBean(JsonNodeToElasticSearchProcessor.class, parameterOutput.getElasticsearchRetentionLevel(), parameterOutput.getIndexShape()));

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.ES_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example #19
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamSystemOut(String inputTopic) {

        StreamsBuilder builder = new StreamsBuilder();

        builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new LoggingProcessor<>());

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.SYSOUT_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example #20
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamKafka(String inputTopic, ParameterOutput parameterOutput) {

        StreamsBuilder builder = new StreamsBuilder();

        builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()))
                //json as string powa
                .mapValues(value -> value.toString())
                .to(parameterOutput.getTopicOut(), Produced.with(Serdes.String(), Serdes.String()));

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.KAFKA_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example #21
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamSnmp(String inputTopic, ParameterOutput parameterOutput) {

        StreamsBuilder builder = new StreamsBuilder();
        builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeSnmpProcessor(snmpService));

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.SNMP_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example #22
Source File: SimulateStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamSystemOut(String topicToConsume) {

        StreamsBuilder builder = new StreamsBuilder();
        final Serde<SimulateData> simulateDataSerde = Serdes.serdeFrom(new SimulateDataSerializer(), new SimulateDataDeserializer());

        builder.stream(topicToConsume, Consumed.with(Serdes.String(), simulateDataSerde)).process(() -> new LoggingProcessor<>());

        KafkaStreams streams = new KafkaStreams(builder.build(), createKStreamProperties(SYSOUT_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
    }
 
Example #23
Source File: ErrorImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating error importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ErrorData> errorDataSerde = Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(ErrorData.class));

    KStream<String, ErrorData> streamToES = builder.stream(kafkaConfiguration.getErrorTopic(), Consumed.with(Serdes.String(), errorDataSerde));

    streamToES.process(() -> elasticsearchProcessor);

    errorStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_ERROR, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(errorStream::close));

    errorStream.start();
}
 
Example #24
Source File: StockPerformanceStreamsProcessorTopology.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static Topology build() {
    
    Serde<String> stringSerde = Serdes.String();
    Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
    Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


    StreamsBuilder builder = new StreamsBuilder();

    String stocksStateStore = "stock-performance-store";
    double differentialThreshold = 0.02;

    KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
    StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

    builder.addStateStore(storeBuilder);

    builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
            .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore)
            .to("stock-performance", Produced.with(stringSerde, stockPerformanceSerde));

    return builder.build();
}
 
Example #25
Source File: ZMartTopology.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static Topology build() {
    
    Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
    Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
    Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
    Serde<String> stringSerde = Serdes.String();

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
            .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

    KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

    patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


    KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());


    rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));

    purchaseKStream.to("purchases", Produced.with(Serdes.String(),purchaseSerde));

    return streamsBuilder.build();
}
 
Example #26
Source File: CountingWindowingAndKtableJoinExample.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<String> stringSerde = Serdes.String();
        Serde<StockTransaction> transactionSerde = StreamsSerdes.StockTransactionSerde();
        Serde<TransactionSummary> transactionKeySerde = StreamsSerdes.TransactionSummarySerde();

        StreamsBuilder builder = new StreamsBuilder();
        long twentySeconds = 1000 * 20;
        long fifteenMinutes = 1000 * 60 * 15;
        long fiveSeconds = 1000 * 5;
        KTable<Windowed<TransactionSummary>, Long> customerTransactionCounts =
                 builder.stream(STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, transactionSerde).withOffsetResetPolicy(LATEST))
                .groupBy((noKey, transaction) -> TransactionSummary.from(transaction),
                        Serialized.with(transactionKeySerde, transactionSerde))
                 // session window comment line below and uncomment another line below for a different window example
                .windowedBy(SessionWindows.with(twentySeconds).until(fifteenMinutes)).count();

                //The following are examples of different windows examples

                //Tumbling window with timeout 15 minutes
                //.windowedBy(TimeWindows.of(twentySeconds).until(fifteenMinutes)).count();

                //Tumbling window with default timeout 24 hours
                //.windowedBy(TimeWindows.of(twentySeconds)).count();

                //Hopping window 
                //.windowedBy(TimeWindows.of(twentySeconds).advanceBy(fiveSeconds).until(fifteenMinutes)).count();

        customerTransactionCounts.toStream().print(Printed.<Windowed<TransactionSummary>, Long>toSysOut().withLabel("Customer Transactions Counts"));

        KStream<String, TransactionSummary> countStream = customerTransactionCounts.toStream().map((window, count) -> {
                      TransactionSummary transactionSummary = window.key();
                      String newKey = transactionSummary.getIndustry();
                      transactionSummary.setSummaryCount(count);
                      return KeyValue.pair(newKey, transactionSummary);
        });

        KTable<String, String> financialNews = builder.table( "financial-news", Consumed.with(EARLIEST));


        ValueJoiner<TransactionSummary, String, String> valueJoiner = (txnct, news) ->
                String.format("%d shares purchased %s related news [%s]", txnct.getSummaryCount(), txnct.getStockTicker(), news);

        KStream<String,String> joined = countStream.leftJoin(financialNews, valueJoiner, Joined.with(stringSerde, transactionKeySerde, stringSerde));

        joined.print(Printed.<String, String>toSysOut().withLabel("Transactions and News"));



        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.cleanUp();
        
        kafkaStreams.setUncaughtExceptionHandler((t, e) -> {
            LOG.error("had exception ", e);
        });
        CustomDateGenerator dateGenerator = CustomDateGenerator.withTimestampsIncreasingBy(Duration.ofMillis(750));
        
        DataGenerator.setTimestampGenerator(dateGenerator::get);
        
        MockDataProducer.produceStockTransactions(2, 5, 3, false);

        LOG.info("Starting CountingWindowing and KTableJoins Example");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the CountingWindowing and KTableJoins Example Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #27
Source File: StockPerformanceInteractiveQueryApplication.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) {

        if(args.length < 2){
            LOG.error("Need to specify host, port");
            System.exit(1);
        }

        String host = args[0];
        int port = Integer.parseInt(args[1]);
        final HostInfo hostInfo = new HostInfo(host, port);

        Properties properties = getProperties();
        properties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, host+":"+port);

        StreamsConfig streamsConfig = new StreamsConfig(properties);
        Serde<String> stringSerde = Serdes.String();
        Serde<Long> longSerde = Serdes.Long();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();
        WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerde.serializer());
        WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringSerde.deserializer());
        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer);
        Serde<CustomerTransactions> customerTransactionsSerde = StreamsSerdes.CustomerTransactionsSerde();

        Aggregator<String, StockTransaction, Integer> sharesAggregator = (k, v, i) -> v.getShares() + i;

        StreamsBuilder builder = new StreamsBuilder();

        // data is already coming in keyed
        KStream<String, StockTransaction> stockTransactionKStream = builder.stream(MockDataProducer.STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, stockTransactionSerde)
                .withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST));


        stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getSector(), v))
                .groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .count(Materialized.as("TransactionsBySector"))
                .toStream()
                .peek((k,v) -> LOG.info("Transaction count for {} {}", k, v))
                .to("sector-transaction-counts", Produced.with(stringSerde, longSerde));
        
        stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getCustomerId(), v))
                .groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .windowedBy(SessionWindows.with(TimeUnit.MINUTES.toMillis(60)).until(TimeUnit.MINUTES.toMillis(120)))
                .aggregate(CustomerTransactions::new,(k, v, ct) -> ct.update(v),
                        (k, ct, other)-> ct.merge(other),
                        Materialized.<String, CustomerTransactions, SessionStore<Bytes, byte[]>>as("CustomerPurchaseSessions")
                                .withKeySerde(stringSerde).withValueSerde(customerTransactionsSerde))
                .toStream()
                .peek((k,v) -> LOG.info("Session info for {} {}", k, v))
                .to("session-transactions", Produced.with(windowedSerde, customerTransactionsSerde));


        stockTransactionKStream.groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .windowedBy(TimeWindows.of(10000))
                .aggregate(() -> 0, sharesAggregator,
                        Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("NumberSharesPerPeriod")
                                .withKeySerde(stringSerde)
                                .withValueSerde(Serdes.Integer()))
                .toStream().peek((k,v)->LOG.info("key is {} value is {}", k, v))
                .to("transaction-count", Produced.with(windowedSerde,Serdes.Integer()));


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        InteractiveQueryServer queryServer = new InteractiveQueryServer(kafkaStreams, hostInfo);
        StateRestoreHttpReporter restoreReporter = new StateRestoreHttpReporter(queryServer);

        queryServer.init();

        kafkaStreams.setGlobalStateRestoreListener(restoreReporter);

        kafkaStreams.setStateListener(((newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                LOG.info("Setting the query server to ready");
                queryServer.setReady(true);
            } else if (newState != KafkaStreams.State.RUNNING) {
                LOG.info("State not RUNNING, disabling the query server");
                queryServer.setReady(false);
            }
        }));

        kafkaStreams.setUncaughtExceptionHandler((t, e) -> {
            LOG.error("Thread {} had a fatal error {}", t, e, e);
            shutdown(kafkaStreams, queryServer);
        });


        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            shutdown(kafkaStreams, queryServer);
        }));

        LOG.info("Stock Analysis KStream Interactive Query App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
    }
 
Example #28
Source File: KafkaStreamsJoinsApp.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        StreamsBuilder builder = new StreamsBuilder();


        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<String> stringSerde = Serdes.String();

        KeyValueMapper<String, Purchase, KeyValue<String,Purchase>> custIdCCMasking = (k, v) -> {
            Purchase masked = Purchase.builder(v).maskCreditCard().build();
            return new KeyValue<>(masked.getCustomerId(), masked);
        };


        Predicate<String, Purchase> coffeePurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> electronicPurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int COFFEE_PURCHASE = 0;
        int ELECTRONICS_PURCHASE = 1;

        KStream<String, Purchase> transactionStream = builder.stream( "transactions", Consumed.with(Serdes.String(), purchaseSerde)).map(custIdCCMasking);

        KStream<String, Purchase>[] branchesStream = transactionStream.selectKey((k,v)-> v.getCustomerId()).branch(coffeePurchase, electronicPurchase);

        KStream<String, Purchase> coffeeStream = branchesStream[COFFEE_PURCHASE];
        KStream<String, Purchase> electronicsStream = branchesStream[ELECTRONICS_PURCHASE];

        ValueJoiner<Purchase, Purchase, CorrelatedPurchase> purchaseJoiner = new PurchaseJoiner();
        JoinWindows twentyMinuteWindow =  JoinWindows.of(60 * 1000 * 20);

        KStream<String, CorrelatedPurchase> joinedKStream = coffeeStream.join(electronicsStream,
                                                                              purchaseJoiner,
                                                                              twentyMinuteWindow,
                                                                              Joined.with(stringSerde,
                                                                                          purchaseSerde,
                                                                                          purchaseSerde));

        joinedKStream.print(Printed.<String, CorrelatedPurchase>toSysOut().withLabel("joined KStream"));

        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        LOG.info("Starting Join Examples");
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Join Examples now");
        kafkaStreams.close();
        MockDataProducer.shutdown();


    }
 
Example #29
Source File: StructuredDataSourceNode.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
@Override
public SchemaKStream buildStream(
    final StreamsBuilder builder,
    final KsqlConfig ksqlConfig,
    final KafkaTopicClient kafkaTopicClient,
    final FunctionRegistry functionRegistry,
    final Map<String, Object> props,
    final SchemaRegistryClient schemaRegistryClient
) {
  if (!(getTimestampExtractionPolicy() instanceof MetadataTimestampExtractionPolicy)) {
    ksqlConfig.put(KsqlConfig.KSQL_TIMESTAMP_COLUMN_INDEX,
        getTimeStampColumnIndex());
  }
  KsqlTopicSerDe ksqlTopicSerDe = getStructuredDataSource()
      .getKsqlTopic().getKsqlTopicSerDe();
  Serde<GenericRow> genericRowSerde =
      ksqlTopicSerDe.getGenericRowSerde(
          SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(
              getSchema()), ksqlConfig, false, schemaRegistryClient);

  if (getDataSourceType() == StructuredDataSource.DataSourceType.KTABLE) {
    final KsqlTable table = (KsqlTable) getStructuredDataSource();

    final KTable kTable = createKTable(
        builder,
        getAutoOffsetReset(props),
        table,
        genericRowSerde,
        table.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(
            getSchema(), ksqlConfig, true, schemaRegistryClient)
    );
    return new SchemaKTable(
        getSchema(),
        kTable,
        getKeyField(),
        new ArrayList<>(),
        table.isWindowed(),
        SchemaKStream.Type.SOURCE,
        functionRegistry,
        schemaRegistryClient
    );
  }

  return new SchemaKStream(
      getSchema(),
      builder.stream(
          getStructuredDataSource().getKsqlTopic().getKafkaTopicName(),
          Consumed.with(Serdes.String(), genericRowSerde)
      ).mapValues(nonWindowedValueMapper).transformValues(new AddTimestampColumn()),
      getKeyField(), new ArrayList<>(),
      SchemaKStream.Type.SOURCE, functionRegistry, schemaRegistryClient
  );
}
 
Example #30
Source File: KafkaStreamsYellingApp.java    From kafka-streams-in-action with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {


        //Used only to produce data for this application, not typical usage
        MockDataProducer.produceRandomTextData();

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "yelling_app_id");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

        StreamsConfig streamsConfig = new StreamsConfig(props);

        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, String> simpleFirstStream = builder.stream("src-topic", Consumed.with(stringSerde, stringSerde));


        KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase);

        upperCasedStream.to( "out-topic", Produced.with(stringSerde, stringSerde));
        upperCasedStream.print(Printed.<String, String>toSysOut().withLabel("Yelling App"));


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("Hello World Yelling App Started");
        kafkaStreams.start();
        Thread.sleep(35000);
        LOG.info("Shutting down the Yelling APP now");
        kafkaStreams.close();
        MockDataProducer.shutdown();

    }