org.apache.kafka.streams.kstream.Produced Java Examples

The following examples show how to use org.apache.kafka.streams.kstream.Produced. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: UserClicksPerMinute.java    From fluent-kafka-streams-tests with MIT License 8 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.inputTopic);

    final KTable<Windowed<Integer>, Long> counts = clickEvents
            .groupByKey()
            .windowedBy(TimeWindows.of(Duration.ofMinutes(1)))
            .count();

    counts.toStream()
            .map((key, value) -> KeyValue.pair(
                    key.key(),
                    new ClickOutput(key.key(), value, key.window().start())))
            .to(this.outputTopic, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class)));

    return builder.build();
}
 
Example #2
Source File: NameJoinGlobalKTable.java    From fluent-kafka-streams-tests with MIT License 7 votes vote down vote up
public Topology getTopologyWithIntermediateTopic() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    builder.stream(NAME_INPUT, Consumed.with(Serdes.Long(), Serdes.String()))
            .mapValues(name -> name.toUpperCase())
            .to(INTERMEDIATE_TOPIC);

    final GlobalKTable<Long, String> joinTable = builder.globalTable(INTERMEDIATE_TOPIC);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example #3
Source File: FkJoinTableToTable.java    From kafka-tutorials with Apache License 2.0 7 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();
    final String albumTopic = envProps.getProperty("album.topic.name");
    final String userTrackPurchaseTopic = envProps.getProperty("tracks.purchase.topic.name");
    final String musicInterestTopic = envProps.getProperty("music.interest.topic.name");

    final Serde<Long> longSerde = getPrimitiveAvroSerde(envProps, true);
    final Serde<MusicInterest> musicInterestSerde = getSpecificAvroSerde(envProps);
    final Serde<Album> albumSerde = getSpecificAvroSerde(envProps);
    final Serde<TrackPurchase> trackPurchaseSerde = getSpecificAvroSerde(envProps);

    final KTable<Long, Album> albums = builder.table(albumTopic, Consumed.with(longSerde, albumSerde));

    final KTable<Long, TrackPurchase> trackPurchases = builder.table(userTrackPurchaseTopic, Consumed.with(longSerde, trackPurchaseSerde));
    final MusicInterestJoiner trackJoiner = new MusicInterestJoiner();

    final KTable<Long, MusicInterest> musicInterestTable = trackPurchases.join(albums,
                                                                         TrackPurchase::getAlbumId,
                                                                         trackJoiner);

    musicInterestTable.toStream().to(musicInterestTopic, Produced.with(longSerde, musicInterestSerde));

    return builder.build();
}
 
Example #4
Source File: JoinStreamToTable.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();
    final String movieTopic = envProps.getProperty("movie.topic.name");
    final String rekeyedMovieTopic = envProps.getProperty("rekeyed.movie.topic.name");
    final String ratingTopic = envProps.getProperty("rating.topic.name");
    final String ratedMoviesTopic = envProps.getProperty("rated.movies.topic.name");
    final MovieRatingJoiner joiner = new MovieRatingJoiner();

    KStream<String, Movie> movieStream = builder.<String, Movie>stream(movieTopic)
            .map((key, movie) -> new KeyValue<>(movie.getId().toString(), movie));

    movieStream.to(rekeyedMovieTopic);

    KTable<String, Movie> movies = builder.table(rekeyedMovieTopic);

    KStream<String, Rating> ratings = builder.<String, Rating>stream(ratingTopic)
            .map((key, rating) -> new KeyValue<>(rating.getId().toString(), rating));

    KStream<String, RatedMovie> ratedMovie = ratings.join(movies, joiner);

    ratedMovie.to(ratedMoviesTopic, Produced.with(Serdes.String(), ratedMovieAvroSerde(envProps)));

    return builder.build();
}
 
Example #5
Source File: WordCountStream.java    From micronaut-kafka with Apache License 2.0 6 votes vote down vote up
@Singleton
@Named(MY_STREAM)
KStream<String, String> myStream(
        @Named(MY_STREAM) ConfiguredStreamBuilder builder) {

    // end::namedStream[]
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KStream<String, String> source = builder.stream(NAMED_WORD_COUNT_INPUT);
    KTable<String, Long> counts = source
            .flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split(" ")))
            .groupBy((key, value) -> value)
            .count();

    // need to override value serde to Long type
    counts.toStream().to(NAMED_WORD_COUNT_OUTPUT, Produced.with(Serdes.String(), Serdes.Long()));
    return source;
}
 
Example #6
Source File: WordCountStream.java    From micronaut-kafka with Apache License 2.0 6 votes vote down vote up
@Singleton
@Named(STREAM_WORD_COUNT)
KStream<String, String> wordCountStream(ConfiguredStreamBuilder builder) { // <3>
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KStream<String, String> source = builder
            .stream(INPUT);

    KTable<String, Long> groupedByWord = source
            .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
            .groupBy((key, word) -> word, Grouped.with(Serdes.String(), Serdes.String()))
            //Store the result in a store for lookup later
            .count(Materialized.as(WORD_COUNT_STORE)); // <4>

    groupedByWord
            //convert to stream
            .toStream()
            //send to output using specific serdes
            .to(OUTPUT, Produced.with(Serdes.String(), Serdes.Long()));

    return source;
}
 
Example #7
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private void createStreamInput(String inputTopic, String outputTopic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> streamInput = builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()));

    KStream<String, String> streamParsed = streamInput.mapValues((value) -> {
        Metrics.counter("skaetl_nb_read_kafka_count", Lists.newArrayList(Tag.of("processConsumerName", getProcessConsumer().getName()))).increment();
        return getGenericParser().apply(value, getProcessConsumer());
    }).filter((key, value) -> StringUtils.isNotBlank(value));

    final Serde<String> stringSerdes = Serdes.String();

    streamParsed.to(outputTopic, Produced.with(stringSerdes, stringSerdes));

    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.INPUT_PROCESS, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
    addStreams(streams);
}
 
Example #8
Source File: MetricImporter.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private KafkaStreams feedMergeTopic(String id, String mergeTopic, String destId) {

        StreamsBuilder builder = new StreamsBuilder();
        Properties properties = createProperties(kafkaConfiguration.getBootstrapServers());
        String inputTopic = id + TOPIC_TREAT_PROCESS;
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, inputTopic + "merger-stream-" + destId);

        KStream<String, JsonNode> stream = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        stream.to(mergeTopic, Produced.with(Serdes.String(),GenericSerdes.jsonNodeSerde()));

        final KafkaStreams streams = new KafkaStreams(builder.build(), properties);
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        return streams;

    }
 
Example #9
Source File: SimulateStreamService.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private void createStreamSimulate(String topic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> streamInput = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));

    KStream<String, SimulateData> streamParsed = streamInput.map((key, value) -> {
        String resultParsing = getGenericParser().apply(value, getProcessConsumer());
        ObjectNode resultTransformation = getGenericTransformator().apply(JSONUtils.getInstance().parseObj(resultParsing), getProcessConsumer());
        ValidateData item = getGenericValidator().process(resultTransformation, getProcessConsumer());
        if (item.success) {
            return callFilter(value, item);
        } else {
            return new KeyValue<>("input", generateFromValidateData(value, item));
        }
    });
    final Serde<String> stringSerdes = Serdes.String();
    final Serde<SimulateData> simulateDataSerde = Serdes.serdeFrom(new SimulateDataSerializer(), new SimulateDataDeserializer());
    streamParsed.to(SIMULATE_OUTPUT, Produced.with(stringSerdes, simulateDataSerde));

    KafkaStreams streams = new KafkaStreams(builder.build(), createKStreamProperties(SIMULATE_PROCESS, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
}
 
Example #10
Source File: PlayerCommandConnector.java    From football-events with MIT License 6 votes vote down vote up
public void build(StreamsBuilder builder) {
    KStream<byte[], JsonNode> playerSourceStream = builder.stream(
            CONNECT_PLAYERS_TOPIC, Consumed.with(Serdes.ByteArray(), new JsonNodeSerde()))
            .filter((id, json) -> creationOrSnapshot(json));

    playerSourceStream.foreach(this::debug);

    KStream<String, PlayerStartedCareer> playerReadyStream = playerSourceStream
            .map((id, json) -> {
                PlayerStartedCareer event = createEvent(json);
                return KeyValue.pair(event.getAggId(), event);
            });

    playerReadyStream.to(PLAYER_STARTED_CAREER_TOPIC, Produced.with(
            Serdes.String(), new JsonPojoSerde<>(PlayerStartedCareer.class)));
}
 
Example #11
Source File: StatisticsBuilder.java    From football-events with MIT License 6 votes vote down vote up
private void buildPlayerStatistics(KStream<String, GoalScored> goalStream) {
    KTable<String, PlayerStartedCareer> playerTable = builder
            .table(PLAYER_STARTED_TOPIC, with(String(), playerSerde));

    KTable<String, PlayerGoals> playerGoalsTable = goalStream
            .selectKey((matchId, goal) -> goal.getScorerId())
            .leftJoin(playerTable, (goal, player) -> new PlayerGoals(player).goal(goal),
                with(String(), goalScoredSerde, playerSerde))
            .groupByKey(Serialized.with(String(), playerGoalsSerde))
            .reduce(PlayerGoals::aggregate, materialized(PLAYER_GOALS_STORE, playerGoalsSerde));

    KTable<String, PlayerCards> playerCardsTable = builder
            .stream(CARD_RECEIVED_TOPIC, with(String(), cardReceivedSerde))
            .selectKey((matchId, card) -> card.getReceiverId())
            .leftJoin(playerTable, (card, player) -> new PlayerCards(player).card(card),
                with(String(), cardReceivedSerde, playerSerde))
            .groupByKey(Serialized.with(String(), playerCardsSerde))
            .reduce(PlayerCards::aggregate, materialized(PLAYER_CARDS_STORE, playerCardsSerde));

    // publish changes to a view topic
    playerCardsTable.toStream().to(PLAYER_CARDS_TOPIC, Produced.with(String(), playerCardsSerde));

    KStream<String, PlayerGoals> playerGoalsStream = playerGoalsTable.toStream();
    playerGoalsStream.to(PLAYER_GOALS_TOPIC, Produced.with(String(), playerGoalsSerde));
}
 
Example #12
Source File: AggregatingCount.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<TicketSale> ticketSaleSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde))
      // Set key to title and value to ticket value
      .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue()))
      // Group by title
      .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
      // Apply COUNT method
      .count()
      // Write to stream specified by outputTopic
      .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));

  return builder.build();
}
 
Example #13
Source File: AggregatingSum.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<TicketSale> ticketSaleSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde))
      // Set key to title and value to ticket value
      .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue()))
      // Group by title
      .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
      // Apply SUM aggregation
      .reduce(Integer::sum)
      // Write to stream specified by outputTopic
      .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Integer()));

  return builder.build();
}
 
Example #14
Source File: SchemaKStream.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
public SchemaKStream into(
    final String kafkaTopicName,
    final Serde<GenericRow> topicValueSerDe,
    final Set<Integer> rowkeyIndexes
) {

  kstream
      .mapValues(row -> {
        if (row == null) {
          return null;
        }
        List<Object> columns = new ArrayList<>();
        for (int i = 0; i < row.getColumns().size(); i++) {
          if (!rowkeyIndexes.contains(i)) {
            columns.add(row.getColumns().get(i));
          }
        }
        return new GenericRow(columns);
      }).to(kafkaTopicName, Produced.with(Serdes.String(), topicValueSerDe));
  return this;
}
 
Example #15
Source File: SerializationTutorial.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
protected Topology buildTopology(Properties envProps,
                                 final SpecificAvroSerde<Movie> movieSpecificAvroSerde,
                                 final KafkaProtobufSerde<MovieProtos.Movie> movieProtoSerde) {
  
  final String inputAvroTopicName = envProps.getProperty("input.avro.movies.topic.name");
  final String outProtoTopicName = envProps.getProperty("output.proto.movies.topic.name");

  final StreamsBuilder builder = new StreamsBuilder();

  // topic contains values in avro format
  final KStream<Long, Movie> avroMovieStream =
      builder.stream(inputAvroTopicName, Consumed.with(Long(), movieSpecificAvroSerde));

  //convert and write movie data in protobuf format
  avroMovieStream
      .map((key, avroMovie) ->
               new KeyValue<>(key, MovieProtos.Movie.newBuilder()
                   .setMovieId(avroMovie.getMovieId())
                   .setTitle(avroMovie.getTitle())
                   .setReleaseYear(avroMovie.getReleaseYear())
                   .build()))
      .to(outProtoTopicName, Produced.with(Long(), movieProtoSerde));

  return builder.build();
}
 
Example #16
Source File: WordCount.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count();

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example #17
Source File: WordCount.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count();

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example #18
Source File: WordCount.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count(Materialized.as("count"));

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example #19
Source File: NameJoinGlobalKTable.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    final GlobalKTable<Long, String> joinTable = builder.globalTable(NAME_INPUT);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example #20
Source File: StreamDemo.java    From javatech with Creative Commons Attribution Share Alike 4.0 International 6 votes vote down vote up
public static void main(String[] args) {
	// 1. 指定流的配置
	Properties config = new Properties();
	config.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application");
	config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, HOST);
	config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
	config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

	// 设置流构造器
	StreamsBuilder builder = new StreamsBuilder();
	KStream<String, String> textLines = builder.stream("TextLinesTopic");
	KTable<String, Long> wordCounts = textLines
		.flatMapValues(textLine -> Arrays.asList(textLine.toLowerCase().split("\\W+")))
		.groupBy((key, word) -> word)
		.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("counts-store"));
	wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(), Serdes.Long()));

	// 根据流构造器和流配置初始化 Kafka 流
	KafkaStreams streams = new KafkaStreams(builder.build(), config);
	streams.start();
}
 
Example #21
Source File: TopologyContext.java    From simplesource with Apache License 2.0 5 votes vote down vote up
public TopologyContext(AggregateSpec<K, C, E, A> aggregateSpec) {
    this.aggregateSpec = aggregateSpec;
    this.commandResponseRetentionInSeconds = aggregateSpec.generation().stateStoreSpec().retentionInSeconds();
    serdes = aggregateSpec.serialization().serdes();

    commandRequestConsumed = Consumed.with(serdes().aggregateKey(), serdes().commandRequest());
    commandResponseConsumed = Consumed.with(serdes().aggregateKey(), serdes().commandResponse());
    eventsConsumedProduced = Produced.with(serdes().aggregateKey(), serdes().valueWithSequence());
    aggregatedUpdateProduced = Produced.with(serdes().aggregateKey(), serdes().aggregateUpdate());
    commandResponseProduced = Produced.with(serdes().aggregateKey(), serdes().commandResponse());
    serializedCommandResponse = Serialized.with(serdes().commandId(), serdes().commandResponse());
    aggregator = aggregateSpec.generation().aggregator();
    initialValue = aggregateSpec.generation().initialValue();
}
 
Example #22
Source File: KafkaStreamsPipeline.java    From quarkus with Apache License 2.0 5 votes vote down vote up
@Produces
public Topology buildTopology() {
    StreamsBuilder builder = new StreamsBuilder();

    ObjectMapperSerde<Category> categorySerde = new ObjectMapperSerde<>(Category.class);
    ObjectMapperSerde<Customer> customerSerde = new ObjectMapperSerde<>(Customer.class);
    ObjectMapperSerde<EnrichedCustomer> enrichedCustomerSerde = new ObjectMapperSerde<>(EnrichedCustomer.class);

    KTable<Integer, Category> categories = builder.table(
            "streams-test-categories",
            Consumed.with(Serdes.Integer(), categorySerde));

    KStream<Integer, EnrichedCustomer> customers = builder
            .stream("streams-test-customers", Consumed.with(Serdes.Integer(), customerSerde))
            .selectKey((id, customer) -> customer.category)
            .join(
                    categories,
                    (customer, category) -> {
                        return new EnrichedCustomer(customer.id, customer.name, category);
                    },
                    Joined.with(Serdes.Integer(), customerSerde, categorySerde));

    KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore("countstore");
    customers.groupByKey()
            .count(Materialized.<Integer, Long> as(storeSupplier));

    customers.selectKey((categoryId, customer) -> customer.id)
            .to("streams-test-customers-processed", Produced.with(Serdes.Integer(), enrichedCustomerSerde));

    return builder.build();
}
 
Example #23
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filter("filter-1", (key, value) -> true))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");

  // the filter transformer returns true so record is not dropped

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example #24
Source File: StatisticsBuilder.java    From football-events with MIT License 5 votes vote down vote up
private void buildMatchStatistics(KStream<String, GoalScored> goalStream) {
    KStream<String, MatchStarted> matchStartedStream = builder
            .stream(MATCH_STARTED_TOPIC, with(String(), matchStartedSerde));

    KStream<String, MatchFinished> matchFinishedStream = builder
            .stream(MATCH_FINISHED_TOPIC, with(String(), matchFinishedSerde));

    KStream<String, MatchScore> scoreStream = matchStartedStream
            .leftJoin(goalStream, (match, goal) -> new MatchScore(match).goal(goal),
                JoinWindows.of(maxMatchDuration), with(String(), matchStartedSerde, goalScoredSerde)
    );

    KTable<String, MatchScore> scoreTable = scoreStream
            .groupByKey()
            .reduce(MatchScore::aggregate, materialized(MATCH_SCORES_STORE, matchScoreSerde));
    scoreTable.toStream().to(MATCH_SCORES_TOPIC, Produced.with(String(), matchScoreSerde));

    KStream<String, MatchScore> finalScoreStream = matchFinishedStream
            .leftJoin(scoreTable, (matchFinished, matchScore) -> matchScore,
                with(String(), matchFinishedSerde, matchScoreSerde)
    );

    // new key: clubId
    KStream<String, TeamRanking> rankingStream = finalScoreStream
            .flatMap((clubId, matchScore) -> {
                Collection<KeyValue<String, TeamRanking>> result = new ArrayList<>(2);
                result.add(pair(matchScore.getHomeClubId(), matchScore.homeRanking()));
                result.add(pair(matchScore.getAwayClubId(), matchScore.awayRanking()));
                return result;
            });

    KTable<String, TeamRanking> rankingTable = rankingStream
            .groupByKey(Serialized.with(String(), rankingSerde))
            .reduce(TeamRanking::aggregate, materialized(TEAM_RANKING_STORE, rankingSerde));

    // publish changes to a view topic
    rankingTable.toStream().to(TEAM_RANKING_TOPIC, Produced.with(String(), rankingSerde));
}
 
Example #25
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_false() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transformValues(kafkaStreamsTracing.markAsFiltered("filter-2", (key, value) -> false))
    .filterNot((k, v) -> Objects.isNull(v))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filter transformer returns false so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example #26
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_predicate_false() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filter("filter-2", (key, value) -> false))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filter transformer returns false so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example #27
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_not_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filterNot("filterNot-1", (key, value) -> true))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");

  // the filterNot transformer returns true so record is dropped

  streams.close();
  streams.cleanUp();
}
 
Example #28
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_filter_not_predicate_false() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transform(kafkaStreamsTracing.filterNot("filterNot-2", (key, value) -> false))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");

  // the filter transformer returns true so record is not dropped

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example #29
Source File: ITKafkaStreamsTracing.java    From brave with Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_true() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transformValues(kafkaStreamsTracing.markAsFiltered("filter-1", (key, value) -> true))
    .filterNot((k, v) -> Objects.isNull(v))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");

  // the filter transformer returns true so record is not dropped

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example #30
Source File: KStreamBinder.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private void to(boolean isNativeEncoding, String name,
				KStream<Object, Object> outboundBindTarget, Serde<Object> keySerde,
				Serde<Object> valueSerde, KafkaStreamsProducerProperties properties) {
	final Produced<Object, Object> produced = Produced.with(keySerde, valueSerde);
	StreamPartitioner streamPartitioner = null;
	if (!StringUtils.isEmpty(properties.getStreamPartitionerBeanName())) {
		streamPartitioner = getApplicationContext().getBean(properties.getStreamPartitionerBeanName(),
				StreamPartitioner.class);
	}
	if (streamPartitioner != null) {
		produced.withStreamPartitioner(streamPartitioner);
	}
	if (!isNativeEncoding) {
		LOG.info("Native encoding is disabled for " + name
				+ ". Outbound message conversion done by Spring Cloud Stream.");
		outboundBindTarget.filter((k, v) -> v == null)
				.to(name, produced);
		this.kafkaStreamsMessageConversionDelegate
				.serializeOnOutbound(outboundBindTarget)
				.to(name, produced);
	}
	else {
		LOG.info("Native encoding is enabled for " + name
				+ ". Outbound serialization done at the broker.");
		outboundBindTarget.to(name, produced);
	}
}