Java Code Examples for org.apache.kafka.streams.KeyValue

The following examples show how to use org.apache.kafka.streams.KeyValue. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: fluent-kafka-streams-tests   Source File: UserClicksPerMinute.java    License: MIT License 7 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.inputTopic);

    final KTable<Windowed<Integer>, Long> counts = clickEvents
            .groupByKey()
            .windowedBy(TimeWindows.of(Duration.ofMinutes(1)))
            .count();

    counts.toStream()
            .map((key, value) -> KeyValue.pair(
                    key.key(),
                    new ClickOutput(key.key(), value, key.window().start())))
            .to(this.outputTopic, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class)));

    return builder.build();
}
 
Example 2
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public void filter(FilterReq request, StreamObserver<io.apicurio.registry.streams.distore.proto.KeyValue> responseObserver) {
    boolean ok = false;
    try (
        Stream stream = keyValueStore(request.getStoreName()).filter(request.getFilter(), request.getOver())
    ) {
        drainToKeyValue(request.getStoreName(), stream, responseObserver);
        ok = true;
    } catch (Throwable e) {
        responseObserver.onError(e);
    }
    if (ok) {
        responseObserver.onCompleted();
    }
}
 
Example 3
@Override
public void all(VoidReq request, StreamObserver<io.apicurio.registry.streams.distore.proto.KeyValue> responseObserver) {
    boolean ok = false;
    try (
        KeyValueIterator<?, ?> iter =
            keyValueStore(request.getStoreName()).all()
    ) {
        drainToKeyValue(request.getStoreName(), iter, responseObserver);
        ok = true;
    } catch (Throwable e) {
        responseObserver.onError(e);
    }
    if (ok) {
        responseObserver.onCompleted();
    }
}
 
Example 4
@Override
public KeyValueIterator<K, V> range(K from, K to) {
    ByteString fromBytes = ByteString.copyFrom(keyValueSerde.serializeKey(from));
    ByteString toBytes = ByteString.copyFrom(keyValueSerde.serializeKey(to));
    StreamObserverSpliterator<io.apicurio.registry.streams.distore.proto.KeyValue> observer = new StreamObserverSpliterator<>();
    stub.range(
        KeyFromKeyToReq
            .newBuilder()
            .setKeyFrom(fromBytes)
            .setKeyTo(toBytes)
            .setStoreName(storeName)
            .build(),
        observer
    );
    return keyValueIterator(observer.stream());
}
 
Example 5
/**
 * Test based on
 * Kafka_Streams_TensorFlow_Image_Recognition_Example_IntegrationTest
 *
 */
@Test
public void testList() {
        // Flight data (one single flight) --> We want to predict if it will be
        // delayed or not
        List<String> inputValues = Arrays.asList(
                        "1987,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES",
                        "1999,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES");
        List<KeyValue<String, String>> records = inputValues.stream()
                        .map(v -> new KeyValue<String, String>(null, v)).collect(Collectors.toList());

        testDriver.pipeInput(recordFactory.create(Kafka_Streams_MachineLearning_H2O_GBM_Example.INPUT_TOPIC,
                        records, 1L, 100L));
        assertThat(getOutput()).isEqualTo("Prediction: Is Airline delayed? => YES");
        assertThat(getOutput()).isEqualTo("Prediction: Is Airline delayed? => NO");
}
 
Example 6
Source Project: kafka-tutorials   Source File: AggregatingCount.java    License: Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<TicketSale> ticketSaleSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde))
      // Set key to title and value to ticket value
      .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue()))
      // Group by title
      .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
      // Apply COUNT method
      .count()
      // Write to stream specified by outputTopic
      .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));

  return builder.build();
}
 
Example 7
Source Project: kafka-graphs   Source File: SpannerTest.java    License: Apache License 2.0 6 votes vote down vote up
static List<KeyValue<Edge<Long>, Void>> getEdges() {
    List<KeyValue<Edge<Long>, Void>> edges = new ArrayList<>();
    edges.add(new KeyValue<>(new Edge<>(1L, 4L), null));
    edges.add(new KeyValue<>(new Edge<>(4L, 7L), null));
    edges.add(new KeyValue<>(new Edge<>(7L, 8L), null));
    edges.add(new KeyValue<>(new Edge<>(4L, 8L), null));
    edges.add(new KeyValue<>(new Edge<>(4L, 5L), null));
    edges.add(new KeyValue<>(new Edge<>(5L, 6L), null));
    edges.add(new KeyValue<>(new Edge<>(2L, 3L), null));
    edges.add(new KeyValue<>(new Edge<>(3L, 4L), null));
    edges.add(new KeyValue<>(new Edge<>(3L, 6L), null));
    edges.add(new KeyValue<>(new Edge<>(8L, 9L), null));
    edges.add(new KeyValue<>(new Edge<>(6L, 8L), null));
    edges.add(new KeyValue<>(new Edge<>(5L, 9L), null));
    return edges;
}
 
Example 8
Source Project: kiqr   Source File: SessionWindowQueryVerticle.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void start() throws Exception {

    execute(Config.SESSION_QUERY_ADDRESS_PREFIX, (abstractQuery, keySerde, valueSerde) -> {

        KeyBasedQuery query = (KeyBasedQuery) abstractQuery;
        ReadOnlySessionStore<Object, Object> store = streams.store(query.getStoreName(), QueryableStoreTypes.sessionStore());
        try (KeyValueIterator<Windowed<Object>, Object> result = store.fetch(deserializeObject(keySerde, query.getKey()))) {

            if (result.hasNext()) {
                List<Window> results = new ArrayList<>();
                while (result.hasNext()) {


                    KeyValue<Windowed<Object>, Object> windowedEntry = result.next();
                    results.add(new Window(windowedEntry.key.window().start(), windowedEntry.key.window().end(), base64Encode(valueSerde, windowedEntry.value)));
                }
                return new SessionQueryResponse(results);
            } else {
                return new SessionQueryResponse(Collections.emptyList());
            }
        }
    });


}
 
Example 9
@Test
public void shouldSelectAllFromUsers() throws Exception {
  final QueuedQueryMetadata queryMetadata = executeQuery(
      "SELECT * from %s;", userTable);

  BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue();

  Set<String> actualUsers = new HashSet<>();
  Set<String> expectedUsers = Utils.mkSet("USER_0", "USER_1", "USER_2", "USER_3", "USER_4");
  while (actualUsers.size() < expectedUsers.size()) {
    KeyValue<String, GenericRow> nextRow = rowQueue.poll();
    if (nextRow != null) {
      List<Object> columns = nextRow.value.getColumns();
      assertEquals(6, columns.size());
      actualUsers.add((String) columns.get(1));
    }
  }
  assertEquals(expectedUsers, actualUsers);
}
 
Example 10
@StreamListener
@SendTo("output")
public KStream<?, WordCount> process(
		@Input("input") KStream<Object, String> input) {

	input.map((k, v) -> {
		System.out.println(k);
		System.out.println(v);
		return new KeyValue<>(k, v);
	});
	return input
			.flatMapValues(
					value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.map((key, value) -> new KeyValue<>(value, value))
			.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
			.count(Materialized.as("WordCounts")).toStream()
			.map((key, value) -> new KeyValue<>(null, new WordCount(key, value)));
}
 
Example 11
@StreamListener
@SendTo("output")
public KStream<String, Long> process(
		@Input("input") KStream<String, Long> userClicksStream,
		@Input("input-x") KTable<String, String> userRegionsTable) {

	return userClicksStream
			.leftJoin(userRegionsTable,
					(clicks, region) -> new RegionWithClicks(
							region == null ? "UNKNOWN" : region, clicks),
					Joined.with(Serdes.String(), Serdes.Long(), null))
			.map((user, regionWithClicks) -> new KeyValue<>(
					regionWithClicks.getRegion(), regionWithClicks.getClicks()))
			.groupByKey(Serialized.with(Serdes.String(), Serdes.Long()))
			.reduce(Long::sum)
			.toStream();
}
 
Example 12
@Override
public void punctuate(long timestamp) {
    KeyValueIterator<String, StockPerformance> performanceIterator = keyValueStore.all();

    while (performanceIterator.hasNext()) {
        KeyValue<String, StockPerformance> keyValue = performanceIterator.next();
        String key = keyValue.key;
        StockPerformance stockPerformance = keyValue.value;

        if (stockPerformance != null) {
            if (stockPerformance.priceDifferential() >= differentialThreshold ||
                    stockPerformance.volumeDifferential() >= differentialThreshold) {
                context.forward(key, stockPerformance);
            }
        }
    }
}
 
Example 13
public void cogroup(long timestamp) {
    KeyValueIterator<String, Tuple<List<ClickEvent>, List<StockTransaction>>> iterator = tupleStore.all();

    while (iterator.hasNext()) {
        KeyValue<String, Tuple<List<ClickEvent>, List<StockTransaction>>> cogrouping = iterator.next();

        if (cogrouping.value != null && (!cogrouping.value._1.isEmpty() || !cogrouping.value._2.isEmpty())) {
            List<ClickEvent> clickEvents = new ArrayList<>(cogrouping.value._1);
            List<StockTransaction> stockTransactions = new ArrayList<>(cogrouping.value._2);

            context().forward(cogrouping.key, Tuple.of(clickEvents, stockTransactions));
            cogrouping.value._1.clear();
            cogrouping.value._2.clear();
            tupleStore.put(cogrouping.key, cogrouping.value);
        }
    }
    iterator.close();
}
 
Example 14
@Override
public void punctuate(long timestamp) {
    KeyValueIterator<String, Tuple<List<ClickEvent>, List<StockTransaction>>> iterator = tupleStore.all();

    while (iterator.hasNext()) {
        KeyValue<String, Tuple<List<ClickEvent>, List<StockTransaction>>> cogrouped = iterator.next();
        // if either list contains values forward results
        if (cogrouped.value != null && (!cogrouped.value._1.isEmpty() || !cogrouped.value._2.isEmpty())) {
            List<ClickEvent> clickEvents = new ArrayList<>(cogrouped.value._1);
            List<StockTransaction> stockTransactions = new ArrayList<>(cogrouped.value._2);

            context.forward(cogrouped.key, Tuple.of(clickEvents, stockTransactions));
            // empty out the current cogrouped results
            cogrouped.value._1.clear();
            cogrouped.value._2.clear();
            tupleStore.put(cogrouped.key, cogrouped.value);
        }
    }
    iterator.close();
}
 
Example 15
/**
 * Test Word count of sentence list.
 */
@Test
public void shouldCountWords() {
    final List<String> inputLines = Arrays.asList(
            "Kafka Streams Examples",
            "Spring Cloud Stream Sample",
            "Using Kafka Streams Test Utils"
    );
    final List<KeyValue<String, String>> inputRecords = inputLines.stream().map(v -> new KeyValue<String, String>(null, v)).collect(Collectors.toList());

    final Map<String, Long> expectedWordCounts = new HashMap<>();
    expectedWordCounts.put("spring", 1L);
    expectedWordCounts.put("cloud", 1L);
    expectedWordCounts.put("examples", 1L);
    expectedWordCounts.put("sample", 1L);
    expectedWordCounts.put("streams", 2L);
    expectedWordCounts.put("stream", 1L);
    expectedWordCounts.put("test", 1L);
    expectedWordCounts.put("utils", 1L);
    expectedWordCounts.put("kafka", 2L);
    expectedWordCounts.put("using", 1L);

    testDriver.pipeInput(recordFactory.create(INPUT_TOPIC, inputRecords, 1L, 1000L)); //All feed in same 30s time window
    final Map<String, Long> actualWordCounts = getOutputList();
    assertThat(actualWordCounts).containsAllEntriesOf(expectedWordCounts).hasSameSizeAs(expectedWordCounts);
}
 
Example 16
@Override
@SuppressWarnings("deprecation")
public KeyValue<String, List<KeyValue<String, StockPerformance>>> punctuate(long timestamp) {
    List<KeyValue<String, StockPerformance>> stockPerformanceList = new ArrayList<>();
    KeyValueIterator<String, StockPerformance> performanceIterator = keyValueStore.all();
    while (performanceIterator.hasNext()) {
        KeyValue<String, StockPerformance> keyValue = performanceIterator.next();
        StockPerformance stockPerformance = keyValue.value;

        if (stockPerformance != null) {
            if (stockPerformance.priceDifferential() >= differentialThreshold ||
                    stockPerformance.volumeDifferential() >= differentialThreshold) {
                stockPerformanceList.add(keyValue);
            }
        }
    }
    return stockPerformanceList.isEmpty() ? null : KeyValue.pair(null, stockPerformanceList);
}
 
Example 17
@StreamListener("input")
@SendTo({ "output1", "output2", "output3" })
@SuppressWarnings("unchecked")
public KStream<?, WordCount>[] process(KStream<Object, String> input) {

	Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
	Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
	Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");

	return input
			.flatMapValues(
					value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.groupBy((key, value) -> value).windowedBy(TimeWindows.of(Duration.ofSeconds(5)))
			.count(Materialized.as("WordCounts-multi")).toStream()
			.map((key, value) -> new KeyValue<>(null,
					new WordCount(key.key(), value,
							new Date(key.window().start()),
							new Date(key.window().end()))))
			.branch(isEnglish, isFrench, isSpanish);
}
 
Example 18
Source Project: kafka-streams-example   Source File: MetricsResource.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Query local state store to extract metrics
 *
 * @return local Metrics
 */
private Metrics getLocalMetrics() {
    HostInfo thisInstance = GlobalAppState.getInstance().getHostPortInfo();
    KafkaStreams ks = GlobalAppState.getInstance().getKafkaStreams();

    String source = thisInstance.host() + ":" + thisInstance.port();
    Metrics localMetrics = new Metrics();

    ReadOnlyKeyValueStore<String, Double> averageStore = ks
            .store(storeName,
                    QueryableStoreTypes.<String, Double>keyValueStore());

    LOGGER.log(Level.INFO, "Entries in store {0}", averageStore.approximateNumEntries());
    KeyValueIterator<String, Double> storeIterator = averageStore.all();

    while (storeIterator.hasNext()) {
        KeyValue<String, Double> kv = storeIterator.next();
        localMetrics.add(source, kv.key, String.valueOf(kv.value));

    }
    LOGGER.log(Level.INFO, "Local store state {0}", localMetrics);
    return localMetrics;
}
 
Example 19
Source Project: kafka-graphs   Source File: StreamUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static <K, V> KTable<K, V> tableFromCollection(
    StreamsBuilder builder,
    Properties props,
    String topic,
    int numPartitions,
    short replicationFactor,
    Serde<K> keySerde,
    Serde<V> valueSerde,
    Collection<KeyValue<K, V>> values) {

    ClientUtils.createTopic(topic, numPartitions, replicationFactor, props);
    try (Producer<K, V> producer = new KafkaProducer<>(props, keySerde.serializer(), valueSerde.serializer())) {
        for (KeyValue<K, V> value : values) {
            ProducerRecord<K, V> producerRecord = new ProducerRecord<>(topic, value.key, value.value);
            producer.send(producerRecord);
        }
        producer.flush();
    }
    return builder.table(topic, Consumed.with(keySerde, valueSerde), Materialized.with(keySerde, valueSerde));
}
 
Example 20
@Bean
public Function<KStream<Object, String>,KStream<?, WordCount>> process() {

	return input -> input
			.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.map((key, value) -> new KeyValue<>(value, value))
			.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
			.windowedBy(TimeWindows.of(5000))
			.count(Materialized.as("WordCounts-1"))
			.toStream()
			.map((key, value) -> new KeyValue<>(null,
					new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
}
 
Example 21
Source Project: kafka-graphs   Source File: GraphOperationsITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFilterEdges() throws Exception {
       Properties producerConfig = ClientUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class,
           LongSerializer.class, new Properties()
       );
       StreamsBuilder builder = new StreamsBuilder();

       KTable<Long, Long> vertices =
           StreamUtils.tableFromCollection(builder, producerConfig, Serdes.Long(), Serdes.Long(),
               TestGraphUtils.getLongLongVertices());

       KTable<Edge<Long>, Long> edges =
           StreamUtils.tableFromCollection(builder, producerConfig, new KryoSerde<>(), Serdes.Long(),
               TestGraphUtils.getLongLongEdges());

       KGraph<Long, Long, Long> graph = new KGraph<>(
           vertices, edges, GraphSerialized.with(Serdes.Long(), Serdes.Long(), Serdes.Long()));

       KTable<Edge<Long>, Long> data = graph.filterOnEdges((k, e) -> e > 34).edges();

       startStreams(builder, Serdes.Long(), Serdes.Long());

       Thread.sleep(5000);

       List<KeyValue<Edge<Long>, Long>> result = StreamUtils.listFromTable(streams, data);

	expectedResult = "3,5,35\n" +
		"4,5,45\n" +
		"5,1,51\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example 22
Source Project: simplesource   Source File: AggregateTestHelper.java    License: Apache License 2.0 5 votes vote down vote up
private void publishExpectingError(
    final K key,
    final Sequence readSequence,
    final C command,
    final Consumer<NonEmptyList<CommandError>> failureValidator
) {
    final CommandId commandId = publish(key, readSequence, command);

    final KeyValue<K, CommandResponse<K>>  updateResponse = testAPI.readCommandResponseTopic()
            .orElseGet(() -> fail("Didn't find command response"));
    assertEquals(commandId, updateResponse.value.commandId());
    assertEquals(readSequence, updateResponse.value.readSequence());
    updateResponse.value.sequenceResult().fold(
            reasons -> {
                failureValidator.accept(reasons);
                return null;
            },
            aggregateUpdate -> fail("Expected update failure for command " + command + " but got update " + aggregateUpdate));

    assertEquals(Optional.empty(), testAPI.readEventTopic());
    assertEquals(Optional.empty(), testAPI.readAggregateTopic());

    final Result<CommandError, Sequence> queryByCommandId = testAPI
        .queryCommandResult(commandId, Duration.ofSeconds(30))
        .unsafePerform(AggregateTestHelper::commandError);
    queryByCommandId.fold(
        reasons -> {
            failureValidator.accept(reasons);
            return null;
        },
        aggregateUpdate -> fail("Expected update failure for command " + command + " but got update " + aggregateUpdate));
}
 
Example 23
Source Project: simplesource   Source File: ResultDistributor.java    License: Apache License 2.0 5 votes vote down vote up
static <K, V> void distribute(DistributorContext<K, V> ctx, final KStream<?, V> resultStream, final KStream<K, String> topicNameStream) {

        DistributorSerdes<K, V> serdes = ctx.serdes();
        long retentionMillis = ctx.responseWindowSpec().retentionInSeconds() * 1000L;

        KStream<String, V> joined = resultStream.selectKey((k, v) -> ctx.idMapper.apply(v))
                .join(topicNameStream,
                        Tuple2::of,
                        JoinWindows.of(retentionMillis).until(retentionMillis * 2 + 1),
                        Joined.with(serdes.uuid(), serdes.value(), Serdes.String()))
                .map((uuid, tuple) -> KeyValue.pair(String.format("%s:%s", tuple.v2(), ctx.keyToUuid.apply(uuid).toString()), tuple.v1()));

        joined.to((key, value, context) -> key.substring(0, key.length() - 37), Produced.with(Serdes.String(), serdes.value()));
    }
 
Example 24
Source Project: kafka-graphs   Source File: EdgeStream.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Iterable<KeyValue<Short, VV>> apply(Short key, VV vv) {
    if (!vv.equals(previousValue)) {
        previousValue = vv;
        return Collections.singletonList(new KeyValue<>(GLOBAL_KEY, vv));
    }
    return Collections.emptyList();
}
 
Example 25
public static <K, V> Stream<KeyValue<K, V>> toStream(KeyValueIterator<K, V> kvIterator) {
    if (kvIterator instanceof StreamToKeyValueIteratorAdapter) {
        return ((StreamToKeyValueIteratorAdapter<K, V>) kvIterator).stream;
    }
    return StreamSupport
        .stream(
            Spliterators.spliteratorUnknownSize(
                kvIterator,
                Spliterator.ORDERED | Spliterator.IMMUTABLE | Spliterator.NONNULL
            ),
            false
        )
        .onClose(kvIterator::close);
}
 
Example 26
@Override
public KeyValueIterator<K, V> all() {
    StreamObserverSpliterator<io.apicurio.registry.streams.distore.proto.KeyValue> observer = new StreamObserverSpliterator<>();
    stub.all(
        VoidReq.newBuilder()
               .setStoreName(storeName)
               .build(),
        observer
    );
    return keyValueIterator(observer.stream());
}
 
Example 27
@StreamListener
@SendTo("output")
public KStream<?, KafkaStreamsBinderWordCountIntegrationTests.WordCount> process(
		@Input("input") KStream<Object, String> input) {

	return input
			.flatMapValues(
					value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.map((key, value) -> new KeyValue<>(value, value))
			.groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
			.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts"))
			.toStream()
			.map((key, value) -> new KeyValue<>(null, null));
}
 
Example 28
@Test
public void testSumOfOutNeighborsNoValue() throws Exception {
    Properties producerConfig = ClientUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class,
        LongSerializer.class, new Properties()
    );
    StreamsBuilder builder = new StreamsBuilder();

    KTable<Long, Long> vertices =
        StreamUtils.tableFromCollection(builder, producerConfig, Serdes.Long(), Serdes.Long(),
            TestGraphUtils.getLongLongVertices());

    KTable<Edge<Long>, Long> edges =
        StreamUtils.tableFromCollection(builder, producerConfig, new KryoSerde<>(), Serdes.Long(),
            TestGraphUtils.getLongLongEdges());

    KGraph<Long, Long, Long> graph = new KGraph<>(
        vertices, edges, GraphSerialized.with(Serdes.Long(), Serdes.Long(), Serdes.Long()));

    KTable<Long, Long> verticesWithSumOfOutNeighborValues =
        graph.reduceOnNeighbors((v1, v2) -> v1 + v2, EdgeDirection.OUT);

    startStreams(builder, Serdes.Long(), Serdes.Long());

    Thread.sleep(5000);

    List<KeyValue<Long, Long>> result = StreamUtils.listFromTable(streams, verticesWithSumOfOutNeighborValues);

    expectedResult = "1,5\n" +
        "2,3\n" +
        "3,9\n" +
        "4,5\n" +
        "5,1\n";

    compareResultAsTuples(result, expectedResult);
}
 
Example 29
Source Project: kafka-graphs   Source File: ReduceOnEdgesMethodsITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLowestWeightOutNeighborNoValue() throws Exception {
    Properties producerConfig = ClientUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class,
        LongSerializer.class, new Properties()
    );
    StreamsBuilder builder = new StreamsBuilder();

    KTable<Long, Long> vertices =
        StreamUtils.tableFromCollection(builder, producerConfig, Serdes.Long(), Serdes.Long(),
            TestGraphUtils.getLongLongVertices());

    KTable<Edge<Long>, Long> edges =
        StreamUtils.tableFromCollection(builder, producerConfig, new KryoSerde<>(), Serdes.Long(),
            TestGraphUtils.getLongLongEdges());

    KGraph<Long, Long, Long> graph = new KGraph<>(
        vertices, edges, GraphSerialized.with(Serdes.Long(), Serdes.Long(), Serdes.Long()));

    KTable<Long, Long> verticesWithLowestOutNeighbor =
        graph.reduceOnEdges(new SelectMinWeightNeighborNoValue(), EdgeDirection.OUT);

    startStreams(builder, Serdes.Long(), Serdes.Long());

    Thread.sleep(5000);

    List<KeyValue<Long, Long>> result = StreamUtils.listFromTable(streams, verticesWithLowestOutNeighbor);

    expectedResult = "1,12\n" +
        "2,23\n" +
        "3,34\n" +
        "4,45\n" +
        "5,51\n";

    TestUtils.compareResultAsTuples(result, expectedResult);
}
 
Example 30
Source Project: kafka-graphs   Source File: JoinWithVerticesITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testJoinWithVertexSet() throws Exception {
    Properties producerConfig = ClientUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class,
        LongSerializer.class, new Properties()
    );
    StreamsBuilder builder = new StreamsBuilder();

    KTable<Long, Long> vertices =
        StreamUtils.tableFromCollection(builder, producerConfig, Serdes.Long(), Serdes.Long(),
            TestGraphUtils.getLongLongVertices());

    KTable<Edge<Long>, Long> edges =
        StreamUtils.tableFromCollection(builder, producerConfig, new KryoSerde<>(), Serdes.Long(),
            TestGraphUtils.getLongLongEdges());

    KGraph<Long, Long, Long> graph = new KGraph<>(
        vertices, edges, GraphSerialized.with(Serdes.Long(), Serdes.Long(), Serdes.Long()));

    KGraph<Long, Long, Long> res = graph.joinWithVertices(graph.vertices()
        .mapValues(v -> v), new AddValuesMapper());

    KTable<Long, Long> data = res.vertices();

    startStreams(builder, Serdes.Long(), Serdes.Long());

    Thread.sleep(5000);

    List<KeyValue<Long, Long>> result = StreamUtils.listFromTable(streams, data);

    expectedResult = "1,2\n" +
        "2,4\n" +
        "3,6\n" +
        "4,8\n" +
        "5,10\n";

    TestUtils.compareResultAsTuples(result, expectedResult);
}