org.apache.kafka.streams.kstream.Predicate Java Examples

The following examples show how to use org.apache.kafka.streams.kstream.Predicate. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlPredicate.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 7 votes vote down vote up
private Predicate<String, GenericRow> getStringKeyPredicate() {
  final ExpressionMetadata expressionEvaluator = createExpressionMetadata();

  return (key, row) -> {
    try {
      Kudf[] kudfs = expressionEvaluator.getUdfs();
      Object[] values = new Object[columnIndexes.length];
      for (int i = 0; i < values.length; i++) {
        if (columnIndexes[i] < 0) {
          values[i] = kudfs[i];
        } else {
          values[i] = genericRowValueTypeEnforcer.enforceFieldType(columnIndexes[i], row
              .getColumns().get(columnIndexes[i]));
        }
      }
      return (Boolean) ee.evaluate(values);
    } catch (Exception e) {
      log.error(e.getMessage(), e);
    }
    log.error("Invalid format: " + key + " : " + row);
    return false;
  };
}
 
Example #2
Source File: SqlPredicate.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
private Predicate getWindowedKeyPredicate() {
  final ExpressionMetadata expressionEvaluator = createExpressionMetadata();
  return (Predicate<Windowed<String>, GenericRow>) (key, row) -> {
    try {
      Kudf[] kudfs = expressionEvaluator.getUdfs();
      Object[] values = new Object[columnIndexes.length];
      for (int i = 0; i < values.length; i++) {
        if (columnIndexes[i] < 0) {
          values[i] = kudfs[i];
        } else {
          values[i] = genericRowValueTypeEnforcer
              .enforceFieldType(
                  columnIndexes[i],
                  row.getColumns().get(columnIndexes[i]
                  )
              );
        }
      }
      return (Boolean) ee.evaluate(values);
    } catch (Exception e) {
      log.error(e.getMessage(), e);
    }
    log.error("Invalid format: " + key + " : " + row);
    return false;
  };
}
 
Example #3
Source File: KGraph.java    From kafka-graphs with Apache License 2.0 6 votes vote down vote up
public KGraph<K, VV, EV> subgraph(Predicate<K, VV> vertexFilter, Predicate<Edge<K>, EV> edgeFilter) {
    KTable<K, VV> filteredVertices = vertices.filter(vertexFilter);

    KTable<Edge<K>, EV> remainingEdges = edgesBySource()
        .join(filteredVertices, (e, v) -> e, Joined.with(keySerde(), new KryoSerde<>(), vertexValueSerde()))
        .map((k, edge) -> new KeyValue<>(edge.target(), edge))
        .join(filteredVertices, (e, v) -> e, Joined.with(keySerde(), new KryoSerde<>(), vertexValueSerde()))
        .map((k, edge) -> new KeyValue<>(new Edge<>(edge.source(), edge.target()), edge.value()))
        .groupByKey(Grouped.with(new KryoSerde<>(), edgeValueSerde()))
        .reduce((v1, v2) -> v2, Materialized.with(new KryoSerde<>(), edgeValueSerde()));

    KTable<Edge<K>, EV> filteredEdges = remainingEdges
        .filter(edgeFilter, Materialized.<Edge<K>, EV, KeyValueStore<Bytes, byte[]>>as(generateStoreName()).withKeySerde(new KryoSerde<>()).withValueSerde(edgeValueSerde()));

    return new KGraph<>(filteredVertices, filteredEdges, serialized);
}
 
Example #4
Source File: KafkaStreamsBranchingSample.java    From spring-cloud-stream-samples with Apache License 2.0 6 votes vote down vote up
@Bean
@SuppressWarnings("unchecked")
public Function<KStream<Object, String>, KStream<?, WordCount>[]> process() {

	Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
	Predicate<Object, WordCount> isFrench =  (k, v) -> v.word.equals("french");
	Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");

	return input -> input
			.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.groupBy((key, value) -> value)
			.windowedBy(TimeWindows.of(Duration.ofSeconds(6)))
			.count(Materialized.as("WordCounts-1"))
			.toStream()
			.map((key, value) -> new KeyValue<>(null,
					new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))))
			.branch(isEnglish, isFrench, isSpanish);
}
 
Example #5
Source File: KafkaStreamsBinderWordCountBranchesFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Bean
@SuppressWarnings("unchecked")
public Function<KStream<Object, String>, KStream<?, WordCount>[]> process() {

	Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
	Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
	Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");

	return input -> input
			.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.groupBy((key, value) -> value)
			.windowedBy(TimeWindows.of(5000))
			.count(Materialized.as("WordCounts-branch"))
			.toStream()
			.map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value,
					new Date(key.window().start()), new Date(key.window().end()))))
			.branch(isEnglish, isFrench, isSpanish);
}
 
Example #6
Source File: WordCountMultipleBranchesIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@StreamListener("input")
@SendTo({ "output1", "output2", "output3" })
@SuppressWarnings("unchecked")
public KStream<?, WordCount>[] process(KStream<Object, String> input) {

	Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
	Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french");
	Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");

	return input
			.flatMapValues(
					value -> Arrays.asList(value.toLowerCase().split("\\W+")))
			.groupBy((key, value) -> value).windowedBy(TimeWindows.of(Duration.ofSeconds(5)))
			.count(Materialized.as("WordCounts-multi")).toStream()
			.map((key, value) -> new KeyValue<>(null,
					new WordCount(key.key(), value,
							new Date(key.window().start()),
							new Date(key.window().end()))))
			.branch(isEnglish, isFrench, isSpanish);
}
 
Example #7
Source File: GeoLocationStreams.java    From Microservices-Deployment-Cookbook with MIT License 6 votes vote down vote up
@PostConstruct
public void init() {
	Map<String, Object> props = new HashMap<>();
	props.put(StreamsConfig.APPLICATION_ID_CONFIG, "geolocation-application");
	props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.99.100:9092");
	props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, GeoLocationSerdes.class.getName());

	StreamsConfig config = new StreamsConfig(props);
	KStreamBuilder builder = new KStreamBuilder();

	builder.stream("geolocationStreams").filter(new Predicate<Object, Object>() {
		@Override
		public boolean test(Object key, Object value) {
			GeoLocation geolocation = (GeoLocation) value;
			System.out.println("Stream received => " + value);
			return geolocation.getLatitude() >= -90 
					&& geolocation.getLatitude() < 90 
					&& geolocation.getLongitude() >= -180 
					&& geolocation.getLongitude() < 180;
		}
	}).to("geolocations");

	KafkaStreams streams = new KafkaStreams(builder, config);
	streams.start();
}
 
Example #8
Source File: KafkaDenormalizer.java    From cqrs-eventsourcing-kafka with Apache License 2.0 6 votes vote down vote up
@Override
public void start() throws Exception {
    Predicate<String, EventEnvelope> inventoryItemCreated = (k, v) -> k.equals(InventoryItemCreated.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemRenamed =  (k, v) -> k.equals(InventoryItemRenamed.class.getSimpleName());
    Predicate<String, EventEnvelope> inventoryItemDeactivated = (k, v) -> k.equals(InventoryItemDeactivated.class.getSimpleName());

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String, EventEnvelope>[] filteredStreams = builder
            .stream(INVENTORY_ITEM_TOPIC, Consumed.with(Serdes.String(), initializeEnvelopeSerde()))
            .selectKey((k, v) -> v.eventType)
            .branch(inventoryItemCreated, inventoryItemRenamed, inventoryItemDeactivated);

    filteredStreams[0].process(InventoryItemCreatedHandler::new);
    filteredStreams[1].process(InventoryItemRenamedHandler::new);
    filteredStreams[2].process(InventoryItemDeactivatedHandler::new);

    kafkaStreams = new KafkaStreams(builder.build(), getProperties());
    kafkaStreams.cleanUp(); // -- only because we are using in-memory
    kafkaStreams.start();
}
 
Example #9
Source File: SqlPredicate.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
Predicate getPredicate() {
  if (isWindowedKey) {
    return getWindowedKeyPredicate();
  } else {
    return getStringKeyPredicate();
  }
}
 
Example #10
Source File: TracingFilterValueTransformerWithKeySupplier.java    From brave with Apache License 2.0 5 votes vote down vote up
public TracingFilterValueTransformerWithKeySupplier(KafkaStreamsTracing kafkaStreamsTracing,
  String spanName, Predicate<K, V> delegatePredicate, boolean filterNot) {
  this.kafkaStreamsTracing = kafkaStreamsTracing;
  this.spanName = spanName;
  this.delegatePredicate = delegatePredicate;
  this.filterNot = filterNot;
}
 
Example #11
Source File: TracingFilter.java    From brave with Apache License 2.0 5 votes vote down vote up
TracingFilter(KafkaStreamsTracing kafkaStreamsTracing, String spanName,
  Predicate<K, V> delegatePredicate, boolean filterNot) {
  this.kafkaStreamsTracing = kafkaStreamsTracing;
  this.tracer = kafkaStreamsTracing.tracer;
  this.spanName = spanName;
  this.delegatePredicate = delegatePredicate;
  this.filterNot = filterNot;
}
 
Example #12
Source File: TracingFilterTransformerSupplier.java    From brave with Apache License 2.0 5 votes vote down vote up
public TracingFilterTransformerSupplier(KafkaStreamsTracing kafkaStreamsTracing,
  String spanName, Predicate<K, V> delegatePredicate, boolean filterNot) {
  this.kafkaStreamsTracing = kafkaStreamsTracing;
  this.spanName = spanName;
  this.delegatePredicate = delegatePredicate;
  this.filterNot = filterNot;
}
 
Example #13
Source File: TwitterStreamsAnalyzer.java    From kafka-streams with Apache License 2.0 5 votes vote down vote up
public void run()  {
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    JsonSerializer<Tweet> tweetJsonSerializer = new JsonSerializer<>();
    JsonDeserializer<Tweet> tweetJsonDeserializer = new JsonDeserializer<>(Tweet.class);
    Serde<Tweet> tweetSerde = Serdes.serdeFrom(tweetJsonSerializer, tweetJsonDeserializer);

    KStreamBuilder kStreamBuilder = new KStreamBuilder();

    Classifier classifier = new Classifier();
    classifier.train(new File("src/main/resources/kafkaStreamsTwitterTrainingData_clean.csv"));

    KeyValueMapper<String, Tweet, String> languageToKey = (k, v) ->
       StringUtils.isNotBlank(v.getText()) ? classifier.classify(v.getText()):"unknown";

    Predicate<String, Tweet> isEnglish = (k, v) -> k.equals("english");
    Predicate<String, Tweet> isFrench =  (k, v) -> k.equals("french");
    Predicate<String, Tweet> isSpanish = (k, v) -> k.equals("spanish");

    KStream<String, Tweet> tweetKStream = kStreamBuilder.stream(Serdes.String(), tweetSerde, "twitterData");

    KStream<String, Tweet>[] filteredStreams = tweetKStream.selectKey(languageToKey).branch(isEnglish, isFrench, isSpanish);

    filteredStreams[0].to(Serdes.String(), tweetSerde, "english");
    filteredStreams[1].to(Serdes.String(), tweetSerde, "french");
    filteredStreams[2].to(Serdes.String(), tweetSerde, "spanish");

    kafkaStreams = new KafkaStreams(kStreamBuilder, streamsConfig);
    System.out.println("Starting twitter analysis streams");
    kafkaStreams.start();
    System.out.println("Started");

}
 
Example #14
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 5 votes vote down vote up
/**
 * Apply a filter to each vertex in the graph stream
 * Since this is an edge-only stream, the vertex filter can only access the key of vertices
 *
 * @param filter the filter function to apply.
 * @return the filtered graph stream.
 */
@Override
public EdgeStream<K, EV> filterVertices(Predicate<K, Void> filter) {
    KStream<Edge<K>, EV> remainingEdges = edges
        .filter(new ApplyVertexFilterToEdges<K, EV>(filter));

    return new EdgeStream<>(remainingEdges, serialized);
}
 
Example #15
Source File: KGraph.java    From kafka-graphs with Apache License 2.0 5 votes vote down vote up
public KGraph<K, VV, EV> filterOnVertices(Predicate<K, VV> vertexFilter) {
    KTable<K, VV> filteredVertices = vertices.filter(vertexFilter);

    KTable<Edge<K>, EV> remainingEdges = edgesBySource()
        .join(filteredVertices, (e, v) -> e, Joined.with(keySerde(), new KryoSerde<>(), vertexValueSerde()))
        .map((k, edge) -> new KeyValue<>(edge.target(), edge))
        .join(filteredVertices, (e, v) -> e, Joined.with(keySerde(), new KryoSerde<>(), vertexValueSerde()))
        .map((k, edge) -> new KeyValue<>(new Edge<>(edge.source(), edge.target()), edge.value()))
        .groupByKey(Grouped.with(new KryoSerde<>(), edgeValueSerde()))
        .reduce((v1, v2) -> v2, Materialized.<Edge<K>, EV, KeyValueStore<Bytes, byte[]>>as(generateStoreName()).withKeySerde(new KryoSerde<>()).withValueSerde(edgeValueSerde()));

    return new KGraph<>(filteredVertices, remainingEdges, serialized);
}
 
Example #16
Source File: TracingFilterValueTransformerWithKey.java    From brave with Apache License 2.0 4 votes vote down vote up
TracingFilterValueTransformerWithKey(KafkaStreamsTracing tracing, String spanName,
  Predicate<K, V> delegatePredicate, boolean filterNot) {
  super(tracing, spanName, delegatePredicate, filterNot);
}
 
Example #17
Source File: TracingFilterTransformer.java    From brave with Apache License 2.0 4 votes vote down vote up
TracingFilterTransformer(KafkaStreamsTracing tracing, String spanName,
  Predicate<K, V> delegatePredicate, boolean filterNot) {
  super(tracing, spanName, delegatePredicate, filterNot);
}
 
Example #18
Source File: OrderService.java    From qcon-microservices with Apache License 2.0 4 votes vote down vote up
FilteredResponse(AsyncResponse asyncResponse, Predicate<K, V> predicate) {
    this.asyncResponse = asyncResponse;
    this.predicate = predicate;
}
 
Example #19
Source File: KafkaStreamsJoinsApp.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        StreamsBuilder builder = new StreamsBuilder();


        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<String> stringSerde = Serdes.String();

        KeyValueMapper<String, Purchase, KeyValue<String,Purchase>> custIdCCMasking = (k, v) -> {
            Purchase masked = Purchase.builder(v).maskCreditCard().build();
            return new KeyValue<>(masked.getCustomerId(), masked);
        };


        Predicate<String, Purchase> coffeePurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> electronicPurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int COFFEE_PURCHASE = 0;
        int ELECTRONICS_PURCHASE = 1;

        KStream<String, Purchase> transactionStream = builder.stream( "transactions", Consumed.with(Serdes.String(), purchaseSerde)).map(custIdCCMasking);

        KStream<String, Purchase>[] branchesStream = transactionStream.selectKey((k,v)-> v.getCustomerId()).branch(coffeePurchase, electronicPurchase);

        KStream<String, Purchase> coffeeStream = branchesStream[COFFEE_PURCHASE];
        KStream<String, Purchase> electronicsStream = branchesStream[ELECTRONICS_PURCHASE];

        ValueJoiner<Purchase, Purchase, CorrelatedPurchase> purchaseJoiner = new PurchaseJoiner();
        JoinWindows twentyMinuteWindow =  JoinWindows.of(60 * 1000 * 20);

        KStream<String, CorrelatedPurchase> joinedKStream = coffeeStream.join(electronicsStream,
                                                                              purchaseJoiner,
                                                                              twentyMinuteWindow,
                                                                              Joined.with(stringSerde,
                                                                                          purchaseSerde,
                                                                                          purchaseSerde));

        joinedKStream.print(Printed.<String, CorrelatedPurchase>toSysOut().withLabel("joined KStream"));

        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        LOG.info("Starting Join Examples");
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Join Examples now");
        kafkaStreams.close();
        MockDataProducer.shutdown();


    }
 
Example #20
Source File: KGraph.java    From kafka-graphs with Apache License 2.0 4 votes vote down vote up
public KGraph<K, VV, EV> filterOnEdges(Predicate<Edge<K>, EV> edgeFilter) {
    KTable<Edge<K>, EV> filteredEdges = edges
        .filter(edgeFilter, Materialized.<Edge<K>, EV, KeyValueStore<Bytes, byte[]>>as(generateStoreName()).withKeySerde(new KryoSerde<>()).withValueSerde(edgeValueSerde()));

    return new KGraph<>(vertices, filteredEdges, serialized);
}
 
Example #21
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 4 votes vote down vote up
public ApplyVertexFilterToEdges(Predicate<K, Void> vertexFilter) {
    this.vertexFilter = vertexFilter;
}
 
Example #22
Source File: KGraphStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * Apply a filter to each edge in the graph stream
 *
 * @param filter the filter function to apply.
 * @return the filtered graph stream.
 */
KGraphStream<K, VV, EV> filterEdges(Predicate<Edge<K>, EV> filter);
 
Example #23
Source File: KGraphStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * Apply a filter to each vertex in the graph stream
 * Since this is an edge-only stream, the vertex filter can only access the key of vertices
 *
 * @param filter the filter function to apply.
 * @return the filtered graph stream.
 */
KGraphStream<K, VV, EV> filterVertices(Predicate<K, Void> filter);
 
Example #24
Source File: ZMartKafkaStreamsAdvancedReqsMetricsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder streamsBuilder = new StreamsBuilder();


        /**
         * Previous requirements
         */
        KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));


        /**
         *  Selecting a key for storage and filtering out low dollar purchases
         */

        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));


        /**
         * Branching stream for separating out purchases in new departments to their own topics
         */
        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to("coffee", Produced.with(stringSerde, purchaseSerde));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));



        /**
         * Security Requirements to record transactions for certain employee
         */
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) -> { };

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);

        Topology topology = streamsBuilder.build();


        KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig);

        KafkaStreams.StateListener stateListener = (newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application has gone from REBALANCING to RUNNING ");
                LOG.info("Topology Layout {}", streamsBuilder.build().describe());
            }

            if (newState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application is entering REBALANCING phase");
            }
        };

        kafkaStreams.setStateListener(stateListener);
        LOG.info("ZMart Advanced Requirements Metrics Application Started");
        kafkaStreams.cleanUp();
        CountDownLatch stopSignal = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread(()-> {
            LOG.info("Shutting down the Kafka Streams Application now");
            kafkaStreams.close();
            MockDataProducer.shutdown();
            stopSignal.countDown();
        }));



        MockDataProducer.producePurchaseData(DataGenerator.DEFAULT_NUM_PURCHASES, 250, DataGenerator.NUMBER_UNIQUE_CUSTOMERS);
        kafkaStreams.start();

        stopSignal.await();
        LOG.info("All done now, good-bye");
    }
 
Example #25
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 2 votes vote down vote up
/**
 * Create a filter transformer.
 *
 * WARNING: this filter implementation uses the Streams transform API, meaning that
 * re-partitioning can occur if a key modifying operation like grouping or joining operation is
 * applied after this filter.
 *
 * In that case, consider using {@link #markAsFiltered(String, Predicate)} instead which uses
 * {@link ValueTransformerWithKey} API instead.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *       .transform(kafkaStreamsTracing.filter("myFilter", (k, v) -> ...)
 *       .to(outputTopic);
 * }</pre>
 */
public <K, V> TransformerSupplier<K, V, KeyValue<K, V>> filter(String spanName,
  Predicate<K, V> predicate) {
  return new TracingFilterTransformerSupplier<>(this, spanName, predicate, false);
}
 
Example #26
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 2 votes vote down vote up
/**
 * Create a filterNot transformer.
 *
 * WARNING: this filter implementation uses the Streams transform API, meaning that
 * re-partitioning can occur if a key modifying operation like grouping or joining operation is
 * applied after this filter. In that case, consider using {@link #markAsNotFiltered(String,
 * Predicate)} instead which uses {@link ValueTransformerWithKey} API instead.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *       .transform(kafkaStreamsTracing.filterNot("myFilter", (k, v) -> ...)
 *       .to(outputTopic);
 * }</pre>
 */
public <K, V> TransformerSupplier<K, V, KeyValue<K, V>> filterNot(String spanName,
  Predicate<K, V> predicate) {
  return new TracingFilterTransformerSupplier<>(this, spanName, predicate, true);
}
 
Example #27
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 2 votes vote down vote up
/**
 * Create a markAsFiltered valueTransformer.
 *
 * Instead of filtering, and not emitting values downstream as {@code filter} does; {@code
 * markAsFiltered} creates a span, marking it as filtered or not. If filtered, value returned will
 * be {@code null} and will require an additional non-null value filter to complete the
 * filtering.
 *
 * This operation is offered as lack of a processor that allows to continue conditionally with the
 * processing without risk of accidental re-partitioning.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *       .transformValues(kafkaStreamsTracing.markAsFiltered("myFilter", (k, v) -> ...)
 *       .filterNot((k, v) -> Objects.isNull(v))
 *       .to(outputTopic);
 * }</pre>
 */
public <K, V> ValueTransformerWithKeySupplier<K, V, V> markAsFiltered(String spanName,
  Predicate<K, V> predicate) {
  return new TracingFilterValueTransformerWithKeySupplier<>(this, spanName, predicate, false);
}
 
Example #28
Source File: KafkaStreamsTracing.java    From brave with Apache License 2.0 2 votes vote down vote up
/**
 * Create a markAsNotFiltered valueTransformer.
 *
 * Instead of filtering, and not emitting values downstream as {@code filterNot} does; {@code
 * markAsNotFiltered} creates a span, marking it as filtered or not. If filtered, value returned
 * will be {@code null} and will require an additional non-null value filter to complete the
 * filtering.
 *
 * This operation is offered as lack of a processor that allows to continue conditionally with the
 * processing without risk of accidental re-partitioning.
 *
 * <p>Simple example using Kafka Streams DSL:
 * <pre>{@code
 * StreamsBuilder builder = new StreamsBuilder();
 * builder.stream(inputTopic)
 *       .transformValues(kafkaStreamsTracing.markAsNotFiltered("myFilter", (k, v) -> ...)
 *       .filterNot((k, v) -> Objects.isNull(v))
 *       .to(outputTopic);
 * }</pre>
 */
public <K, V> ValueTransformerWithKeySupplier<K, V, V> markAsNotFiltered(String spanName,
  Predicate<K, V> predicate) {
  return new TracingFilterValueTransformerWithKeySupplier<>(this, spanName, predicate, true);
}
 
Example #29
Source File: ZMartKafkaStreamsAdvancedReqsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();


        // previous requirements
        KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



           // selecting a key for storage and filtering out low dollar purchases


        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases"));
        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));



         // branching stream for separating out purchases in new departments to their own topics

        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee"));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics"));




         // security Requirements to record transactions for certain employee
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) ->
                SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased());

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("ZMart Advanced Requirements Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #30
Source File: EdgeStream.java    From kafka-graphs with Apache License 2.0 2 votes vote down vote up
/**
 * Apply a filter to each edge in the graph stream
 *
 * @param filter the filter function to apply.
 * @return the filtered graph stream.
 */
@Override
public EdgeStream<K, EV> filterEdges(Predicate<Edge<K>, EV> filter) {
    KStream<Edge<K>, EV> remainingEdges = edges.filter(filter);
    return new EdgeStream<>(remainingEdges, serialized);
}