org.apache.kafka.streams.kstream.KeyValueMapper Java Examples

The following examples show how to use org.apache.kafka.streams.kstream.KeyValueMapper. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 6 votes vote down vote up
/**
 * Returns a global aggregate on the previously split vertex stream
 *
 * @param edgeMapper     the mapper that converts the edge stream to a vertex stream
 * @param vertexMapper   the mapper that aggregates vertex values
 * @param collectUpdates boolean specifying whether the aggregate should only be collected when there is an update
 * @param <VV>           the return value type
 * @return a stream of the aggregated values
 */
@Override
public <VV> KStream<Short, VV> globalAggregate(
    KeyValueMapper<Edge<K>, EV, Iterable<KeyValue<K, VV>>> edgeMapper,
    KeyValueMapper<K, VV, Iterable<KeyValue<Short, VV>>> vertexMapper, boolean collectUpdates
) {

    KStream<Short, VV> result = edges.flatMap(edgeMapper).flatMap(vertexMapper);

    if (collectUpdates) {
        result = result.flatMap(new GlobalAggregateMapper<VV>());
    }

    return result;
}
 
Example #2
Source File: TwitterStreamsAnalyzer.java    From kafka-streams with Apache License 2.0 5 votes vote down vote up
public void run()  {
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    JsonSerializer<Tweet> tweetJsonSerializer = new JsonSerializer<>();
    JsonDeserializer<Tweet> tweetJsonDeserializer = new JsonDeserializer<>(Tweet.class);
    Serde<Tweet> tweetSerde = Serdes.serdeFrom(tweetJsonSerializer, tweetJsonDeserializer);

    KStreamBuilder kStreamBuilder = new KStreamBuilder();

    Classifier classifier = new Classifier();
    classifier.train(new File("src/main/resources/kafkaStreamsTwitterTrainingData_clean.csv"));

    KeyValueMapper<String, Tweet, String> languageToKey = (k, v) ->
       StringUtils.isNotBlank(v.getText()) ? classifier.classify(v.getText()):"unknown";

    Predicate<String, Tweet> isEnglish = (k, v) -> k.equals("english");
    Predicate<String, Tweet> isFrench =  (k, v) -> k.equals("french");
    Predicate<String, Tweet> isSpanish = (k, v) -> k.equals("spanish");

    KStream<String, Tweet> tweetKStream = kStreamBuilder.stream(Serdes.String(), tweetSerde, "twitterData");

    KStream<String, Tweet>[] filteredStreams = tweetKStream.selectKey(languageToKey).branch(isEnglish, isFrench, isSpanish);

    filteredStreams[0].to(Serdes.String(), tweetSerde, "english");
    filteredStreams[1].to(Serdes.String(), tweetSerde, "french");
    filteredStreams[2].to(Serdes.String(), tweetSerde, "spanish");

    kafkaStreams = new KafkaStreams(kStreamBuilder, streamsConfig);
    System.out.println("Starting twitter analysis streams");
    kafkaStreams.start();
    System.out.println("Started");

}
 
Example #3
Source File: KafkaStreamsITest.java    From java-specialagent with Apache License 2.0 4 votes vote down vote up
public static void main(final String[] args) throws Exception {
  final EmbeddedKafkaRule embeddedKafkaRule = TestUtil.retry(new Callable<EmbeddedKafkaRule>() {
    @Override
    public EmbeddedKafkaRule call() {
      final EmbeddedKafkaRule rule = new EmbeddedKafkaRule(1, true, 1, "stream-test");
      try {
        rule.before();
        return rule;
      }
      catch (final Exception e) {
        rule.after();
        throw e;
      }
    }
  }, 10);

  final Map<String,Object> senderProps = KafkaTestUtils.producerProps(embeddedKafkaRule.getEmbeddedKafka());
  try (final Producer<Integer,String> producer = new KafkaProducer<>(senderProps)) {
    final CountDownLatch latch = TestUtil.initExpectedSpanLatch(4);

    final Properties config = new Properties();
    config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-app");
    config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, senderProps.get("bootstrap.servers"));
    config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
    config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

    final ProducerRecord<Integer,String> record = new ProducerRecord<>("stream-test", 1, "test");
    producer.send(record);

    final Serde<String> stringSerde = Serdes.String();
    final Serde<Integer> intSerde = Serdes.Integer();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer,String> kStream = builder.stream("stream-test");

    kStream.map(new KeyValueMapper<Integer,String,KeyValue<Integer,String>>() {
      @Override
      public KeyValue<Integer,String> apply(final Integer key, final String value) {
        TestUtil.checkActiveSpan();
        return new KeyValue<>(key, value + "map");
      }
    }).to("stream-out", Produced.with(intSerde, stringSerde));

    KafkaStreams streams = new KafkaStreams(builder.build(), config);
    streams.start();

    TestUtil.checkSpan(true, latch, new ComponentSpanCount("java-kafka", 3), new ComponentSpanCount("kafka-streams", 1));
    streams.close();
  }
  catch (final Throwable t) {
    t.printStackTrace(System.err);
    embeddedKafkaRule.after();
    System.exit(1);
  }
  finally {
    embeddedKafkaRule.after();
    System.exit(0);
  }
}
 
Example #4
Source File: KafkaStreamsJoinsApp.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        StreamsBuilder builder = new StreamsBuilder();


        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<String> stringSerde = Serdes.String();

        KeyValueMapper<String, Purchase, KeyValue<String,Purchase>> custIdCCMasking = (k, v) -> {
            Purchase masked = Purchase.builder(v).maskCreditCard().build();
            return new KeyValue<>(masked.getCustomerId(), masked);
        };


        Predicate<String, Purchase> coffeePurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> electronicPurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int COFFEE_PURCHASE = 0;
        int ELECTRONICS_PURCHASE = 1;

        KStream<String, Purchase> transactionStream = builder.stream( "transactions", Consumed.with(Serdes.String(), purchaseSerde)).map(custIdCCMasking);

        KStream<String, Purchase>[] branchesStream = transactionStream.selectKey((k,v)-> v.getCustomerId()).branch(coffeePurchase, electronicPurchase);

        KStream<String, Purchase> coffeeStream = branchesStream[COFFEE_PURCHASE];
        KStream<String, Purchase> electronicsStream = branchesStream[ELECTRONICS_PURCHASE];

        ValueJoiner<Purchase, Purchase, CorrelatedPurchase> purchaseJoiner = new PurchaseJoiner();
        JoinWindows twentyMinuteWindow =  JoinWindows.of(60 * 1000 * 20);

        KStream<String, CorrelatedPurchase> joinedKStream = coffeeStream.join(electronicsStream,
                                                                              purchaseJoiner,
                                                                              twentyMinuteWindow,
                                                                              Joined.with(stringSerde,
                                                                                          purchaseSerde,
                                                                                          purchaseSerde));

        joinedKStream.print(Printed.<String, CorrelatedPurchase>toSysOut().withLabel("joined KStream"));

        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        LOG.info("Starting Join Examples");
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Join Examples now");
        kafkaStreams.close();
        MockDataProducer.shutdown();


    }
 
Example #5
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 4 votes vote down vote up
/**
 * Apply a function to the attribute of each edge in the graph stream.
 *
 * @param mapper the map function to apply.
 * @return a new graph stream.
 */
@Override
public <NV> EdgeStream<K, NV> mapEdges(
    final KeyValueMapper<Edge<K>, EV, KeyValue<Edge<K>, NV>> mapper, Serde<NV> newValueSerde
) {
    KStream<Edge<K>, NV> mappedEdges = edges.map(mapper);
    return new EdgeStream<K, NV>(
        mappedEdges,
        GraphSerialized.with(serialized.keySerde(), serialized.vertexValueSerde(), newValueSerde)
    );
}
 
Example #6
Source File: FindDistinctEvents.java    From kafka-tutorials with Apache License 2.0 3 votes vote down vote up
/**
 * @param maintainDurationPerEventInMs how long to "remember" a known ip address
 *                                     during the time of which any incoming duplicates
 *                                     will be dropped, thereby de-duplicating the
 *                                     input.
 * @param idExtractor                  extracts a unique identifier from a record by which we de-duplicate input
 *                                     records; if it returns null, the record will not be considered for
 *                                     de-duping but forwarded as-is.
 */
DeduplicationTransformer(final long maintainDurationPerEventInMs, final KeyValueMapper<K, V, E> idExtractor) {
    if (maintainDurationPerEventInMs < 1) {
        throw new IllegalArgumentException("maintain duration per event must be >= 1");
    }
    leftDurationMs = maintainDurationPerEventInMs / 2;
    rightDurationMs = maintainDurationPerEventInMs - leftDurationMs;
    this.idExtractor = idExtractor;
}
 
Example #7
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 3 votes vote down vote up
/**
 * The aggregate function splits the edge stream up into a vertex stream and applies
 * a mapper on the resulting vertices
 *
 * @param edgeMapper   the mapper that converts the edge stream to a vertex stream
 * @param vertexMapper the mapper that aggregates vertex values
 * @param <VV>         the vertex value used
 * @return a stream of vertices with the aggregated vertex value
 */
@Override
public <VV> KStream<K, VV> aggregate(
    KeyValueMapper<Edge<K>, EV, Iterable<KeyValue<K, VV>>> edgeMapper,
    KeyValueMapper<K, VV, KeyValue<K, VV>> vertexMapper
) {
    return edges.flatMap(edgeMapper)
        .map(vertexMapper);
}
 
Example #8
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 3 votes vote down vote up
/**
 * Create a map transformer, similar to {@link KStream#map(KeyValueMapper)}, where its mapper
 * action will be recorded in a new span with the indicated name.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *        .transform(kafkaStreamsTracing.map("myMap", (k, v) -> ...)
 *        .to(outputTopic);
 * }</pre>
 */
public <K, V, KR, VR> TransformerSupplier<K, V, KeyValue<KR, VR>> map(String spanName,
  KeyValueMapper<K, V, KeyValue<KR, VR>> mapper) {
  return new TracingTransformerSupplier<>(this, spanName, () ->
    new AbstractTracingTransformer<K, V, KeyValue<KR, VR>>() {
      @Override public KeyValue<KR, VR> transform(K key, V value) {
        return mapper.apply(key, value);
      }
    });
}
 
Example #9
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 3 votes vote down vote up
/**
 * Create a flatMap transformer, similar to {@link KStream#flatMap(KeyValueMapper)}, where its
 * mapper action will be recorded in a new span with the indicated name.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *        .flatTransform(kafkaStreamsTracing.flatMap("myflatMap", (k, v) -> ...)
 *        .to(outputTopic);
 * }</pre>
 */
public <K, V, KR, VR> TransformerSupplier<K, V, Iterable<KeyValue<KR, VR>>> flatMap(
  String spanName,
  KeyValueMapper<K, V, Iterable<KeyValue<KR, VR>>> mapper) {
  return new TracingTransformerSupplier<>(this, spanName, () ->
    new AbstractTracingTransformer<K, V, Iterable<KeyValue<KR, VR>>>() {
      @Override public Iterable<KeyValue<KR, VR>> transform(K key, V value) {
        return mapper.apply(key, value);
      }
    });
}
 
Example #10
Source File: ZMartKafkaStreamsAdvancedReqsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();


        // previous requirements
        KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



           // selecting a key for storage and filtering out low dollar purchases


        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases"));
        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));



         // branching stream for separating out purchases in new departments to their own topics

        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee"));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics"));




         // security Requirements to record transactions for certain employee
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) ->
                SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased());

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("ZMart Advanced Requirements Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #11
Source File: GlobalKTableExample.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<String> stringSerde = Serdes.String();
        Serde<StockTransaction> transactionSerde = StreamsSerdes.StockTransactionSerde();
        Serde<TransactionSummary> transactionSummarySerde = StreamsSerdes.TransactionSummarySerde();


        StreamsBuilder builder = new StreamsBuilder();
        long twentySeconds = 1000 * 20;

        KeyValueMapper<Windowed<TransactionSummary>, Long, KeyValue<String, TransactionSummary>> transactionMapper = (window, count) -> {
            TransactionSummary transactionSummary = window.key();
            String newKey = transactionSummary.getIndustry();
            transactionSummary.setSummaryCount(count);
            return KeyValue.pair(newKey, transactionSummary);
        };

        KStream<String, TransactionSummary> countStream =
                builder.stream( STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, transactionSerde).withOffsetResetPolicy(LATEST))
                        .groupBy((noKey, transaction) -> TransactionSummary.from(transaction), Serialized.with(transactionSummarySerde, transactionSerde))
                        .windowedBy(SessionWindows.with(twentySeconds)).count()
                        .toStream().map(transactionMapper);

        GlobalKTable<String, String> publicCompanies = builder.globalTable(COMPANIES.topicName());
        GlobalKTable<String, String> clients = builder.globalTable(CLIENTS.topicName());


        countStream.leftJoin(publicCompanies, (key, txn) -> txn.getStockTicker(),TransactionSummary::withCompanyName)
                .leftJoin(clients, (key, txn) -> txn.getCustomerId(), TransactionSummary::withCustomerName)
                .print(Printed.<String, TransactionSummary>toSysOut().withLabel("Resolved Transaction Summaries"));


        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.cleanUp();


        kafkaStreams.setUncaughtExceptionHandler((t, e) -> {
            LOG.error("had exception ", e);
        });

        CustomDateGenerator dateGenerator = CustomDateGenerator.withTimestampsIncreasingBy(Duration.ofMillis(750));

        DataGenerator.setTimestampGenerator(dateGenerator::get);

        MockDataProducer.produceStockTransactions(2, 5, 3, true);

        LOG.info("Starting GlobalKTable Example");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the GlobalKTable Example Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #12
Source File: ZMartKafkaStreamsAdvancedReqsMetricsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder streamsBuilder = new StreamsBuilder();


        /**
         * Previous requirements
         */
        KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));


        /**
         *  Selecting a key for storage and filtering out low dollar purchases
         */

        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));


        /**
         * Branching stream for separating out purchases in new departments to their own topics
         */
        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to("coffee", Produced.with(stringSerde, purchaseSerde));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));



        /**
         * Security Requirements to record transactions for certain employee
         */
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) -> { };

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);

        Topology topology = streamsBuilder.build();


        KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig);

        KafkaStreams.StateListener stateListener = (newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application has gone from REBALANCING to RUNNING ");
                LOG.info("Topology Layout {}", streamsBuilder.build().describe());
            }

            if (newState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application is entering REBALANCING phase");
            }
        };

        kafkaStreams.setStateListener(stateListener);
        LOG.info("ZMart Advanced Requirements Metrics Application Started");
        kafkaStreams.cleanUp();
        CountDownLatch stopSignal = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread(()-> {
            LOG.info("Shutting down the Kafka Streams Application now");
            kafkaStreams.close();
            MockDataProducer.shutdown();
            stopSignal.countDown();
        }));



        MockDataProducer.producePurchaseData(DataGenerator.DEFAULT_NUM_PURCHASES, 250, DataGenerator.NUMBER_UNIQUE_CUSTOMERS);
        kafkaStreams.start();

        stopSignal.await();
        LOG.info("All done now, good-bye");
    }
 
Example #13
Source File: KGraphStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * Apply a function to the attribute of each edge in the graph stream.
 *
 * @param <NV> the new vertex value type
 * @param mapper the map function to apply.
 * @param newValueSerde the new value serde
 * @return a new graph stream.
 */
<NV> KGraphStream<K, VV, NV> mapEdges(
    final KeyValueMapper<Edge<K>, EV, KeyValue<Edge<K>, NV>> mapper, Serde<NV> newValueSerde);
 
Example #14
Source File: KGraphStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * The aggregate function splits the edge stream up into a vertex stream and applies
 * a mapper on the resulting vertices
 *
 * @param edgeMapper   the mapper that converts the edge stream to a vertex stream
 * @param vertexMapper the mapper that aggregates vertex values
 * @param <VV>         the vertex value used
 * @return a stream of vertices with the aggregated vertex value
 */
<VV> KStream<K, VV> aggregate(
    KeyValueMapper<Edge<K>, EV, Iterable<KeyValue<K, VV>>> edgeMapper,
    KeyValueMapper<K, VV, KeyValue<K, VV>> vertexMapper);
 
Example #15
Source File: KGraphStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * Returns a global aggregate on the previously split vertex stream
 *
 * @param edgeMapper     the mapper that converts the edge stream to a vertex stream
 * @param vertexMapper   the mapper that aggregates vertex values
 * @param collectUpdates boolean specifying whether the aggregate should only be collected when there is an update
 * @param <VV>           the return value type
 * @return a stream of the aggregated values
 */
<VV> KStream<Short, VV> globalAggregate(
    KeyValueMapper<Edge<K>, EV, Iterable<KeyValue<K, VV>>> edgeMapper,
    KeyValueMapper<K, VV, Iterable<KeyValue<Short, VV>>> vertexMapper, boolean collectUpdates);