Java Code Examples for org.apache.kafka.streams.kstream.KStream

The following examples show how to use org.apache.kafka.streams.kstream.KStream. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: fluent-kafka-streams-tests   Source File: UserClicksPerMinute.java    License: MIT License 7 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.inputTopic);

    final KTable<Windowed<Integer>, Long> counts = clickEvents
            .groupByKey()
            .windowedBy(TimeWindows.of(Duration.ofMinutes(1)))
            .count();

    counts.toStream()
            .map((key, value) -> KeyValue.pair(
                    key.key(),
                    new ClickOutput(key.key(), value, key.window().start())))
            .to(this.outputTopic, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class)));

    return builder.build();
}
 
Example 2
@Override
public void start() throws Exception {
    Predicate<String, EventEnvelope> inventoryItemCreated = (k, v) -> k.equals(InventoryItemCreated.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemRenamed =  (k, v) -> k.equals(InventoryItemRenamed.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemDeactivated = (k, v) -> k.equals(InventoryItemDeactivated.class.getSimpleName());

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String, EventEnvelope>[] filteredStreams = builder
            .stream(INVENTORY_ITEM_TOPIC, Consumed.with(Serdes.String(), initializeEnvelopeSerde()))
            .selectKey((k, v) -> v.eventType)
            .branch(inventoryItemCreated, inventoryItemRenamed, inventoryItemDeactivated);

    filteredStreams[0].process(InventoryItemCreatedHandler::new);
    filteredStreams[1].process(InventoryItemRenamedHandler::new);
    filteredStreams[2].process(InventoryItemDeactivatedHandler::new);

    kafkaStreams = new KafkaStreams(builder.build(), getProperties());
    kafkaStreams.cleanUp(); // -- only because we are using in-memory
    kafkaStreams.start();
}
 
Example 3
Source Project: fluent-kafka-streams-tests   Source File: WordCount.java    License: MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count();

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example 4
@Override
public KStream createInput(String name) {
	BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
	ConsumerProperties consumerProperties = bindingProperties.getConsumer();
	if (consumerProperties == null) {
		consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
		consumerProperties.setUseNativeDecoding(true);
	}
	else {
		if (!encodingDecodingBindAdviceHandler.isDecodingSettingProvided()) {
			consumerProperties.setUseNativeDecoding(true);
		}
	}
	// Always set multiplex to true in the kafka streams binder
	consumerProperties.setMultiplex(true);
	return createProxyForKStream(name);
}
 
Example 5
Source Project: kafka-tutorials   Source File: RunningAverage.java    License: Apache License 2.0 6 votes vote down vote up
protected static KTable<Long, Double> getRatingAverageTable(KStream<Long, Rating> ratings,
                                                            String avgRatingsTopicName,
                                                            SpecificAvroSerde<CountAndSum> countAndSumSerde) {

  // Grouping Ratings
  KGroupedStream<Long, Double> ratingsById = ratings
      .map((key, rating) -> new KeyValue<>(rating.getMovieId(), rating.getRating()))
      .groupByKey(with(Long(), Double()));

  final KTable<Long, CountAndSum> ratingCountAndSum =
      ratingsById.aggregate(() -> new CountAndSum(0L, 0.0),
                            (key, value, aggregate) -> {
                              aggregate.setCount(aggregate.getCount() + 1);
                              aggregate.setSum(aggregate.getSum() + value);
                              return aggregate;
                            },
                            Materialized.with(Long(), countAndSumSerde));

  final KTable<Long, Double> ratingAverage =
      ratingCountAndSum.mapValues(value -> value.getSum() / value.getCount(),
                                  Materialized.as("average-ratings"));

  // persist the result in topic
  ratingAverage.toStream().to(avgRatingsTopicName);
  return ratingAverage;
}
 
Example 6
private KStream<?, ?> getkStream(String inboundName,
								KafkaStreamsStateStoreProperties storeSpec,
								BindingProperties bindingProperties,
								KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties, StreamsBuilder streamsBuilder,
								Serde<?> keySerde, Serde<?> valueSerde,
								Topology.AutoOffsetReset autoOffsetReset, boolean firstBuild) {
	if (storeSpec != null) {
		StoreBuilder storeBuilder = buildStateStore(storeSpec);
		streamsBuilder.addStateStore(storeBuilder);
		if (LOG.isInfoEnabled()) {
			LOG.info("state store " + storeBuilder.name() + " added to topology");
		}
	}
	return getKStream(inboundName, bindingProperties, kafkaStreamsConsumerProperties, streamsBuilder,
			keySerde, valueSerde, autoOffsetReset, firstBuild);
}
 
Example 7
@Bean
public Function<KStream<Object, Sensor>, KStream<String, Long>> process() {

    Map<String, Object> configs = new HashMap<>();
    configs.put("valueClass", Sensor.class);
    configs.put("contentType", "application/*+avro");
    customSerde.configure(configs, false);

    return input -> input
            .map((key, value) -> {

                String newKey = "v1";
                if (value.getId().toString().endsWith("v2")) {
                    newKey = "v2";
                }
                return new KeyValue<>(newKey, value);
            })
            .groupByKey(Grouped.with(Serdes.String(), customSerde))
            .count(Materialized.as(STORE_NAME))
            .toStream();
}
 
Example 8
Source Project: spring_io_2019   Source File: ScsApplication.java    License: Apache License 2.0 6 votes vote down vote up
@StreamListener
@SendTo(Bindings.RATED_MOVIES)
KStream<Long, RatedMovie> rateMoviesFor(@Input(Bindings.AVG_TABLE) KTable<Long, Double> ratings,
                                        @Input(Bindings.MOVIES) KTable<Long, Movie> movies) {

  ValueJoiner<Movie, Double, RatedMovie> joiner = (movie, rating) ->
      new RatedMovie(movie.getMovieId(), movie.getReleaseYear(), movie.getTitle(), rating);

  movies
      .join(ratings, joiner, Materialized
          .<Long, RatedMovie, KeyValueStore<Bytes, byte[]>>as(Bindings.RATED_MOVIES_STORE)
          .withKeySerde(Serdes.Long())
          .withValueSerde(new JsonSerde<>(RatedMovie.class)));

  return movies.join(ratings, joiner).toStream();
}
 
Example 9
@Test
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("decode-words");
	template.sendDefault("foobar");
	StopWatch stopWatch = new StopWatch();
	stopWatch.start();
	System.out.println("Starting: ");
	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
			"decode-counts");
	stopWatch.stop();
	System.out.println("Total time: " + stopWatch.getTotalTimeSeconds());
	assertThat(cr.value().equals("Count for foobar : 1")).isTrue();

	verify(conversionDelegate).serializeOnOutbound(any(KStream.class));
	verify(conversionDelegate).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example 10
Source Project: micronaut-kafka   Source File: WordCountStream.java    License: Apache License 2.0 6 votes vote down vote up
@Singleton
@Named(MY_STREAM)
KStream<String, String> myStream(
        @Named(MY_STREAM) ConfiguredStreamBuilder builder) {

    // end::namedStream[]
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KStream<String, String> source = builder.stream(NAMED_WORD_COUNT_INPUT);
    KTable<String, Long> counts = source
            .flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split(" ")))
            .groupBy((key, value) -> value)
            .count();

    // need to override value serde to Long type
    counts.toStream().to(NAMED_WORD_COUNT_OUTPUT, Produced.with(Serdes.String(), Serdes.Long()));
    return source;
}
 
Example 11
@Override
@SuppressWarnings("unchecked")
public KStream createOutput(final String name) {

	BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
	ProducerProperties producerProperties = bindingProperties.getProducer();
	if (producerProperties == null) {
		producerProperties = this.bindingServiceProperties.getProducerProperties(name);
		producerProperties.setUseNativeEncoding(true);
	}
	else {
		if (!encodingDecodingBindAdviceHandler.isEncodingSettingProvided()) {
			producerProperties.setUseNativeEncoding(true);
		}
	}
	return createProxyForKStream(name);
}
 
Example 12
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
	if (methodInvocation.getMethod().getDeclaringClass().equals(KStream.class)) {
		Assert.notNull(this.delegate,
				"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
						+ "  but no delegate has been set.");
		return methodInvocation.getMethod().invoke(this.delegate,
				methodInvocation.getArguments());
	}
	else if (methodInvocation.getMethod().getDeclaringClass()
			.equals(KStreamWrapper.class)) {
		return methodInvocation.getMethod().invoke(this,
				methodInvocation.getArguments());
	}
	else {
		throw new IllegalStateException(
				"Only KStream method invocations are permitted");
	}
}
 
Example 13
@Bean
public Consumer<KStream<String, DomainEvent>> aggregate() {

	ObjectMapper mapper = new ObjectMapper();
	Serde<DomainEvent> domainEventSerde = new JsonSerde<>( DomainEvent.class, mapper );

	return input -> input
			.groupBy(
					(s, domainEvent) -> domainEvent.boardUuid,
					Grouped.with(null, domainEventSerde))
			.aggregate(
					String::new,
					(s, domainEvent, board) -> board.concat(domainEvent.eventType),
					Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("test-events-snapshots")
							.withKeySerde(Serdes.String()).
							withValueSerde(Serdes.String())
			);
}
 
Example 14
public static void main(String[] args) {

		Properties config = new Properties();
		config.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-starter-app");
		config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
		config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
		config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
		config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

		StreamsBuilder builder = new StreamsBuilder();

		KStream<String, String> kStream = builder.stream("streams-file-input");
		// do stuff
		kStream.to("streams-wordcount-output");

		KafkaStreams streams = new KafkaStreams(builder.build(), config);
		streams.cleanUp(); // only do this in dev - not in prod
		streams.start();

		// print the topology
		System.out.println(streams.localThreadsMetadata().toString());

		// shutdown hook to correctly close the streams application
		Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

	}
 
Example 15
private QueuedSchemaKStream(
    final Schema schema,
    final KStream kstream,
    final Field keyField,
    final List<SchemaKStream> sourceSchemaKStreams,
    final Type type,
    final FunctionRegistry functionRegistry,
    final Optional<Integer> limit,
    final OutputNode outputNode,
    final SchemaRegistryClient schemaRegistryClient
) {
  super(
      schema,
      kstream,
      keyField,
      sourceSchemaKStreams,
      type,
      functionRegistry,
      schemaRegistryClient
  );
  setOutputNode(outputNode);
  kstream.foreach(new QueuedSchemaKStream.QueuePopulator(rowQueue, limit));
}
 
Example 16
Source Project: kafka-graphs   Source File: StreamUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static <K, V> KStream<K, V> streamFromCollection(
    StreamsBuilder builder,
    Properties props,
    String topic,
    int numPartitions,
    short replicationFactor,
    Serde<K> keySerde,
    Serde<V> valueSerde,
    Collection<KeyValue<K, V>> values) {

    ClientUtils.createTopic(topic, numPartitions, replicationFactor, props);
    try (Producer<K, V> producer = new KafkaProducer<>(props, keySerde.serializer(), valueSerde.serializer())) {
        for (KeyValue<K, V> value : values) {
            ProducerRecord<K, V> producerRecord = new ProducerRecord<>(topic, value.key, value.value);
            producer.send(producerRecord);
        }
        producer.flush();
    }
    return builder.stream(topic, Consumed.with(keySerde, valueSerde));
}
 
Example 17
@Override public Topology get() {
  StreamsBuilder builder = new StreamsBuilder();
  if (aggregationEnabled) {
    // Aggregate Spans to Traces
    KStream<String, List<Span>> tracesStream =
        builder.stream(spansTopic, Consumed.with(Serdes.String(), spansSerde))
            .groupByKey()
            // how long to wait for another span
            .windowedBy(SessionWindows.with(traceTimeout).grace(Duration.ZERO))
            .aggregate(ArrayList::new, aggregateSpans(), joinAggregates(),
                Materialized
                    .<String, List<Span>>as(
                        Stores.persistentSessionStore(TRACE_AGGREGATION_STORE,
                            Duration.ofDays(1)))
                    .withKeySerde(Serdes.String())
                    .withValueSerde(spansSerde)
                    .withLoggingDisabled()
                    .withCachingEnabled())
            // hold until a new record tells that a window is closed and we can process it further
            .suppress(untilWindowCloses(unbounded()))
            .toStream()
            .selectKey((windowed, spans) -> windowed.key());
    // Downstream to traces topic
    tracesStream.to(traceTopic, Produced.with(Serdes.String(), spansSerde));
    // Map to dependency links
    tracesStream.flatMapValues(spansToDependencyLinks())
        .selectKey((key, value) -> linkKey(value))
        .to(dependencyTopic, Produced.with(Serdes.String(), dependencyLinkSerde));
  }
  return builder.build();
}
 
Example 18
@Test
@Ignore
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("foos");
	template.sendDefault(1, 7, "hello");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	Consumer<String, String> consumer1 = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
			"error.foos.foobar-group");

	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
			"error.foos.foobar-group");
	assertThat(cr.value()).isEqualTo("hello");
	assertThat(cr.partition()).isEqualTo(0);

	// Ensuring that the deserialization was indeed done by the binder
	verify(conversionDelegate).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example 19
@Bean
public Function<KStream<Object, String>,KStream<?, WordCount>> process() {

	return input -> input
			.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.map((key, value) -> new KeyValue<>(value, value))
			.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
			.windowedBy(TimeWindows.of(5000))
			.count(Materialized.as("WordCounts-1"))
			.toStream()
			.map((key, value) -> new KeyValue<>(null,
					new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
}
 
Example 20
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, Person> persons = builder.stream(this.inputTopic);

    final KTable<String, Long> counts = persons
            .groupBy((name, person) -> person.getCity())
            .count();

    counts.toStream()
            .map((cityName, count) -> KeyValue.pair(cityName, new City(cityName, Math.toIntExact(count))))
            .to(this.outputTopic);

    return builder.build();
}
 
Example 21
Source Project: fluent-kafka-streams-tests   Source File: ErrorEventsPerMinute.java    License: MIT License 5 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();

    // Click Events
    final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.clickInputTopic,
            Consumed.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class)));

    final KTable<Windowed<Integer>, Long> counts = clickEvents
            .selectKey(((key, value) -> value.getStatus()))
            .filter(((key, value) -> key >= 400))
            .groupByKey(Grouped.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class)))
            .windowedBy(TimeWindows.of(Duration.ofMinutes(1)))  // 1 Minute in ms
            .count();

    // Status codes
    final KTable<Integer, StatusCode> statusCodes = builder.table(this.statusInputTopic,
            Consumed.with(Serdes.Integer(), new JsonSerde<>(StatusCode.class)));

    // Join
    final KStream<Integer, ErrorOutput> errors = counts.toStream()
            .map((key, value) -> KeyValue.pair(
                    key.key(),
                    new ErrorOutput(key.key(), value, key.window().start(), null /*empty definition*/)))
            .join(statusCodes,
                    (countRecord, code) -> new ErrorOutput(
                            countRecord.getStatusCode(), countRecord.getCount(), countRecord.getTime(), code.getDefinition()),
                    Joined.valueSerde(new JsonSerde<>(ErrorOutput.class)));
    errors.to(this.errorOutputTopic);

    // Send alert if more than 5x a certain error code per minute
    errors.filter((key, errorOutput) -> errorOutput.getCount() > 5L).to(this.alertTopic);

    return builder.build();
}
 
Example 22
@StreamListener( "input" )
public void process( KStream<Object, byte[]> input ) {
    log.debug( "process : enter" );

    input
            .map( (key, value) -> {

                try {

                    DomainEvent domainEvent = mapper.readValue( value, DomainEvent.class );
                    log.debug( "process : domainEvent=" + domainEvent );

                    return new KeyValue<>( domainEvent.getBoardUuid().toString(), domainEvent );

                } catch( IOException e ) {
                    log.error( "process : error converting json to DomainEvent", e );
                }

                return null;
            })
            .groupBy( (s, domainEvent) -> s, Serialized.with( Serdes.String(), domainEventSerde ) )
            .aggregate(
                    Board::new,
                    (key, domainEvent, board) -> board.handleEvent( domainEvent ),
                    Materialized.<String, Board, KeyValueStore<Bytes, byte[]>>as( BOARD_EVENTS_SNAPSHOTS )
                        .withKeySerde( Serdes.String() )
                        .withValueSerde( boardSerde )
            );

    log.debug( "process : exit" );
}
 
Example 23
Source Project: kafka-tutorials   Source File: TransformStream.java    License: Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();
    final String inputTopic = envProps.getProperty("input.topic.name");

    KStream<String, RawMovie> rawMovies = builder.stream(inputTopic);
    KStream<Long, Movie> movies = rawMovies.map((key, rawMovie) ->
            new KeyValue<Long, Movie>(rawMovie.getId(), convertRawMovie(rawMovie)));

    movies.to("movies", Produced.with(Serdes.Long(), movieAvroSerde(envProps)));

    return builder.build();
}
 
Example 24
Source Project: kafka-tutorials   Source File: RunningAverage.java    License: Apache License 2.0 5 votes vote down vote up
private Topology buildTopology(StreamsBuilder bldr,
                               Properties envProps) {

  final String ratingTopicName = envProps.getProperty("input.ratings.topic.name");
  final String avgRatingsTopicName = envProps.getProperty("output.rating-averages.topic.name");

  KStream<Long, Rating> ratingStream = bldr.stream(ratingTopicName,
                                                   Consumed.with(Serdes.Long(), getRatingSerde(envProps)));

  getRatingAverageTable(ratingStream, avgRatingsTopicName, getCountAndSumSerde(envProps));

  // finish the topology
  return bldr.build();
}
 
Example 25
Source Project: kafka-tutorials   Source File: MergeStreams.java    License: Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();

    final String rockTopic = envProps.getProperty("input.rock.topic.name");
    final String classicalTopic = envProps.getProperty("input.classical.topic.name");
    final String allGenresTopic = envProps.getProperty("output.topic.name");

    KStream<String, SongEvent> rockSongs = builder.stream(rockTopic);
    KStream<String, SongEvent> classicalSongs = builder.stream(classicalTopic);
    KStream<String, SongEvent> allSongs = rockSongs.merge(classicalSongs);

    allSongs.to(allGenresTopic);
    return builder.build();
}
 
Example 26
@SuppressWarnings("unchecked")
private void to(boolean isNativeEncoding, String name,
				KStream<Object, Object> outboundBindTarget, Serde<Object> keySerde,
				Serde<Object> valueSerde, KafkaStreamsProducerProperties properties) {
	final Produced<Object, Object> produced = Produced.with(keySerde, valueSerde);
	StreamPartitioner streamPartitioner = null;
	if (!StringUtils.isEmpty(properties.getStreamPartitionerBeanName())) {
		streamPartitioner = getApplicationContext().getBean(properties.getStreamPartitionerBeanName(),
				StreamPartitioner.class);
	}
	if (streamPartitioner != null) {
		produced.withStreamPartitioner(streamPartitioner);
	}
	if (!isNativeEncoding) {
		LOG.info("Native encoding is disabled for " + name
				+ ". Outbound message conversion done by Spring Cloud Stream.");
		outboundBindTarget.filter((k, v) -> v == null)
				.to(name, produced);
		this.kafkaStreamsMessageConversionDelegate
				.serializeOnOutbound(outboundBindTarget)
				.to(name, produced);
	}
	else {
		LOG.info("Native encoding is enabled for " + name
				+ ". Outbound serialization done at the broker.");
		outboundBindTarget.to(name, produced);
	}
}
 
Example 27
Source Project: kafka-graphs   Source File: EdgeStream.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Builds the neighborhood state by creating adjacency lists.
 *
 * @param directed if true, only the out-neighbors will be stored
 *                 otherwise both directions are considered
 * @return a stream of Tuple3, where the first 2 fields identify the edge processed
 * and the third field is the adjacency list that was updated by processing this edge.
 */
@Override
public KStream<Edge<K>, Set<K>> buildNeighborhood(boolean directed) {

    KStream<Edge<K>, EV> result = edges();
    if (!directed) {
        result = undirected().edges();
    }
    return result.map(new BuildNeighborhoods<K, EV>());
}
 
Example 28
@StreamListener("input")
@SendTo("output")
public KStream<Integer, Long> process(KStream<Object, Product> input) {
	return input.filter((key, product) -> product.getId() == 123)
			.map((key, value) -> new KeyValue<>(value, value))
			.groupByKey(Serialized.with(new JsonSerde<>(Product.class),
					new JsonSerde<>(Product.class)))
			.windowedBy(TimeWindows.of(5000))
			.count(Materialized.as("id-count-store-x")).toStream()
			.map((key, value) -> {
				return new KeyValue<>(key.key().id, value);
			});
}
 
Example 29
Source Project: kafka-graphs   Source File: EdgeStream.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get the degree stream
 *
 * @return a stream of vertices, with the degree as the vertex value
 */
@Override
public KStream<K, Long> degrees() {
    return aggregate(
        new DegreeTypeSeparator<K, EV>(true, true),
        new DegreeMapFunction<K>()
    );
}
 
Example 30
Source Project: micronaut-kafka   Source File: OptimizationStream.java    License: Apache License 2.0 5 votes vote down vote up
@Singleton
@Named(STREAM_OPTIMIZATION_OFF)
KStream<String, String> optimizationOff(
        @Named(STREAM_OPTIMIZATION_OFF) ConfiguredStreamBuilder builder) {
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KTable<String, String> table = builder
            .table(OPTIMIZATION_OFF_INPUT, Materialized.as(OPTIMIZATION_OFF_STORE));

    return table.toStream();
}