Java Code Examples for org.apache.kafka.streams.kstream.KStream#to()

The following examples show how to use org.apache.kafka.streams.kstream.KStream#to() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamsStarterApp.java    From kafka-streams-machine-learning-examples with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {

		Properties config = new Properties();
		config.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-starter-app");
		config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
		config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
		config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
		config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

		StreamsBuilder builder = new StreamsBuilder();

		KStream<String, String> kStream = builder.stream("streams-file-input");
		// do stuff
		kStream.to("streams-wordcount-output");

		KafkaStreams streams = new KafkaStreams(builder.build(), config);
		streams.cleanUp(); // only do this in dev - not in prod
		streams.start();

		// print the topology
		System.out.println(streams.localThreadsMetadata().toString());

		// shutdown hook to correctly close the streams application
		Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

	}
 
Example 2
Source File: StatisticsBuilder.java    From football-events with MIT License 6 votes vote down vote up
private void buildPlayerStatistics(KStream<String, GoalScored> goalStream) {
    KTable<String, PlayerStartedCareer> playerTable = builder
            .table(PLAYER_STARTED_TOPIC, with(String(), playerSerde));

    KTable<String, PlayerGoals> playerGoalsTable = goalStream
            .selectKey((matchId, goal) -> goal.getScorerId())
            .leftJoin(playerTable, (goal, player) -> new PlayerGoals(player).goal(goal),
                with(String(), goalScoredSerde, playerSerde))
            .groupByKey(Serialized.with(String(), playerGoalsSerde))
            .reduce(PlayerGoals::aggregate, materialized(PLAYER_GOALS_STORE, playerGoalsSerde));

    KTable<String, PlayerCards> playerCardsTable = builder
            .stream(CARD_RECEIVED_TOPIC, with(String(), cardReceivedSerde))
            .selectKey((matchId, card) -> card.getReceiverId())
            .leftJoin(playerTable, (card, player) -> new PlayerCards(player).card(card),
                with(String(), cardReceivedSerde, playerSerde))
            .groupByKey(Serialized.with(String(), playerCardsSerde))
            .reduce(PlayerCards::aggregate, materialized(PLAYER_CARDS_STORE, playerCardsSerde));

    // publish changes to a view topic
    playerCardsTable.toStream().to(PLAYER_CARDS_TOPIC, Produced.with(String(), playerCardsSerde));

    KStream<String, PlayerGoals> playerGoalsStream = playerGoalsTable.toStream();
    playerGoalsStream.to(PLAYER_GOALS_TOPIC, Produced.with(String(), playerGoalsSerde));
}
 
Example 3
Source File: IPFraudKafkaStreamApp.java    From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    Properties kafkaStreamProperties = new Properties();
    kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "IP-Fraud-Detection");
    kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181");
    kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

    Serde<String> stringSerde = Serdes.String();

    KStreamBuilder fraudDetectionTopology = new KStreamBuilder();

    KStream<String, String> ipRecords = fraudDetectionTopology.stream(stringSerde, stringSerde, propertyReader.getPropertyValue("topic"));

    KStream<String, String> fraudIpRecords = ipRecords
            .filter((k, v) -> isFraud(v));

    fraudIpRecords.to(propertyReader.getPropertyValue("output_topic"));

    KafkaStreams streamManager = new KafkaStreams(fraudDetectionTopology, kafkaStreamProperties);
    streamManager.start();

    Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close));
}
 
Example 4
Source File: PlayerCommandConnector.java    From football-events with MIT License 6 votes vote down vote up
public void build(StreamsBuilder builder) {
    KStream<byte[], JsonNode> playerSourceStream = builder.stream(
            CONNECT_PLAYERS_TOPIC, Consumed.with(Serdes.ByteArray(), new JsonNodeSerde()))
            .filter((id, json) -> creationOrSnapshot(json));

    playerSourceStream.foreach(this::debug);

    KStream<String, PlayerStartedCareer> playerReadyStream = playerSourceStream
            .map((id, json) -> {
                PlayerStartedCareer event = createEvent(json);
                return KeyValue.pair(event.getAggId(), event);
            });

    playerReadyStream.to(PLAYER_STARTED_CAREER_TOPIC, Produced.with(
            Serdes.String(), new JsonPojoSerde<>(PlayerStartedCareer.class)));
}
 
Example 5
Source File: MetricImporter.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
private KafkaStreams feedMergeTopic(String id, String mergeTopic, String destId) {

        StreamsBuilder builder = new StreamsBuilder();
        Properties properties = createProperties(kafkaConfiguration.getBootstrapServers());
        String inputTopic = id + TOPIC_TREAT_PROCESS;
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, inputTopic + "merger-stream-" + destId);

        KStream<String, JsonNode> stream = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        stream.to(mergeTopic, Produced.with(Serdes.String(),GenericSerdes.jsonNodeSerde()));

        final KafkaStreams streams = new KafkaStreams(builder.build(), properties);
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        return streams;

    }
 
Example 6
Source File: ErrorEventsPerMinute.java    From fluent-kafka-streams-tests with MIT License 5 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();

    // Click Events
    final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.clickInputTopic,
            Consumed.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class)));

    final KTable<Windowed<Integer>, Long> counts = clickEvents
            .selectKey(((key, value) -> value.getStatus()))
            .filter(((key, value) -> key >= 400))
            .groupByKey(Grouped.with(Serdes.Integer(), new JsonSerde<>(ClickEvent.class)))
            .windowedBy(TimeWindows.of(Duration.ofMinutes(1)))  // 1 Minute in ms
            .count();

    // Status codes
    final KTable<Integer, StatusCode> statusCodes = builder.table(this.statusInputTopic,
            Consumed.with(Serdes.Integer(), new JsonSerde<>(StatusCode.class)));

    // Join
    final KStream<Integer, ErrorOutput> errors = counts.toStream()
            .map((key, value) -> KeyValue.pair(
                    key.key(),
                    new ErrorOutput(key.key(), value, key.window().start(), null /*empty definition*/)))
            .join(statusCodes,
                    (countRecord, code) -> new ErrorOutput(
                            countRecord.getStatusCode(), countRecord.getCount(), countRecord.getTime(), code.getDefinition()),
                    Joined.valueSerde(new JsonSerde<>(ErrorOutput.class)));
    errors.to(this.errorOutputTopic);

    // Send alert if more than 5x a certain error code per minute
    errors.filter((key, errorOutput) -> errorOutput.getCount() > 5L).to(this.alertTopic);

    return builder.build();
}
 
Example 7
Source File: KStreamBinder.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private void to(boolean isNativeEncoding, String name,
				KStream<Object, Object> outboundBindTarget, Serde<Object> keySerde,
				Serde<Object> valueSerde, KafkaStreamsProducerProperties properties) {
	final Produced<Object, Object> produced = Produced.with(keySerde, valueSerde);
	StreamPartitioner streamPartitioner = null;
	if (!StringUtils.isEmpty(properties.getStreamPartitionerBeanName())) {
		streamPartitioner = getApplicationContext().getBean(properties.getStreamPartitionerBeanName(),
				StreamPartitioner.class);
	}
	if (streamPartitioner != null) {
		produced.withStreamPartitioner(streamPartitioner);
	}
	if (!isNativeEncoding) {
		LOG.info("Native encoding is disabled for " + name
				+ ". Outbound message conversion done by Spring Cloud Stream.");
		outboundBindTarget.filter((k, v) -> v == null)
				.to(name, produced);
		this.kafkaStreamsMessageConversionDelegate
				.serializeOnOutbound(outboundBindTarget)
				.to(name, produced);
	}
	else {
		LOG.info("Native encoding is enabled for " + name
				+ ". Outbound serialization done at the broker.");
		outboundBindTarget.to(name, produced);
	}
}
 
Example 8
Source File: WordCountProcessorApplicationTests.java    From spring-cloud-stream-samples with Apache License 2.0 5 votes vote down vote up
/**
   * Setup Stream topology
   * Add KStream based on @StreamListener annotation
   * Add to(topic) based @SendTo annotation
   */
  @Before
  public void setup() {
      final StreamsBuilder builder = new StreamsBuilder();
      KStream<Bytes, String> input = builder.stream(INPUT_TOPIC, Consumed.with(nullSerde, stringSerde));
      KafkaStreamsWordCountApplication.WordCountProcessorApplication app = new KafkaStreamsWordCountApplication.WordCountProcessorApplication();
      final Function<KStream<Bytes, String>, KStream<Bytes, KafkaStreamsWordCountApplication.WordCount>> process = app.process();

final KStream<Bytes, KafkaStreamsWordCountApplication.WordCount> output = process.apply(input);

output.to(OUTPUT_TOPIC, Produced.with(nullSerde, countSerde));

      testDriver = new TopologyTestDriver(builder.build(), getStreamsConfiguration());
  }
 
Example 9
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
private void feedStream(String consumerId, ProcessReferential processReferential, String topicMerge) {
    String topicSource = consumerId + TOPIC_PARSED_PROCESS;
    log.info("creating {} Process Merge for topicsource {}", consumerId, topicSource);
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, JsonNode> streamToMerge = builder.stream(topicSource, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    streamToMerge.to(topicMerge, Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(processReferential.getIdProcess() + "_" + consumerId + "-_merge-topic", kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    runningMergeProcess.get(processReferential).add(streams);
    streams.start();
}
 
Example 10
Source File: TransformStream.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps) {
    final StreamsBuilder builder = new StreamsBuilder();
    final String inputTopic = envProps.getProperty("input.topic.name");

    KStream<String, RawMovie> rawMovies = builder.stream(inputTopic);
    KStream<Long, Movie> movies = rawMovies.map((key, rawMovie) ->
            new KeyValue<Long, Movie>(rawMovie.getId(), convertRawMovie(rawMovie)));

    movies.to("movies", Produced.with(Serdes.Long(), movieAvroSerde(envProps)));

    return builder.build();
}
 
Example 11
Source File: KafkaStreamsLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
@Ignore("it needs to have kafka broker running on local")
public void shouldTestKafkaStreams() throws InterruptedException {
    // given
    String inputTopic = "inputTopic";

    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());

    // when
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> textLines = builder.stream(inputTopic);
    Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);

    KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();

    textLines.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));

    String outputTopic = "outputTopic";
    final Serde<String> stringSerde = Serdes.String();
    final Serde<String> longSerde = Serdes.String();
    textLines.to(outputTopic, Produced.with(stringSerde,longSerde));

    KafkaStreams streams = new KafkaStreams(new Topology(), streamsConfiguration);
    streams.start();

    // then
    Thread.sleep(30000);
    streams.close();
}
 
Example 12
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
private void toKafkaTopic(KStream<String, JsonNode> result, ParameterOutput parameterOutput) {
    result.to(parameterOutput.getTopicOut(), Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
}
 
Example 13
Source File: EventSourcedPublisher.java    From simplesource with Apache License 2.0 4 votes vote down vote up
static <K> void publishCommandResponses(TopologyContext<K, ?, ?, ?> ctx, final KStream<K, CommandResponse<K>> responseStream) {
    responseStream.to(ctx.topicName(AggregateResources.TopicEntity.COMMAND_RESPONSE), ctx.commandResponseProduced());
}
 
Example 14
Source File: WordCountExample.java    From kafka-streams-wordcount with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception{

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

        // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
        // Note: To re-run the demo, you need to use the offset reset tool:
        // https://cwiki.apache.org/confluence/display/KAFKA/Kafka+Streams+Application+Reset+Tool
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        // work-around for an issue around timing of creating internal topics
        // Fixed in Kafka 0.10.2.0
        // don't use in large production apps - this increases network load
        // props.put(CommonClientConfigs.METADATA_MAX_AGE_CONFIG, 500);

        KStreamBuilder builder = new KStreamBuilder();

        KStream<String, String> source = builder.stream("wordcount-input");


        final Pattern pattern = Pattern.compile("\\W+");
        KStream counts  = source.flatMapValues(value-> Arrays.asList(pattern.split(value.toLowerCase())))
                .map((key, value) -> new KeyValue<Object, Object>(value, value))
                .filter((key, value) -> (!value.equals("the")))
                .groupByKey()
                .count("CountStore").mapValues(value->Long.toString(value)).toStream();
        counts.to("wordcount-output");

        KafkaStreams streams = new KafkaStreams(builder, props);

        // This is for reset to work. Don't use in production - it causes the app to re-load the state from Kafka on every start
        streams.cleanUp();

        streams.start();

        // usually the stream application would be running forever,
        // in this example we just let it run for some time and stop since the input data is finite.
        Thread.sleep(5000L);

        streams.close();

    }
 
Example 15
Source File: Kafka_Streams_TensorFlow_Image_Recognition_Example.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
static Topology getStreamTopology() throws IOException {
	// Create TensorFlow object

	String modelDir = "src/main/resources/generatedModels/CNN_inception5h";

	Path pathGraph = Paths.get(modelDir, "tensorflow_inception_graph.pb");
	byte[] graphDef = Files.readAllBytes(pathGraph);

	Path pathModel = Paths.get(modelDir, "imagenet_comp_graph_label_strings.txt");
	List<String> labels = Files.readAllLines(pathModel, Charset.forName("UTF-8"));

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic "ImageInputTopic", where
	// message values represent lines of text
	final KStream<String, String> imageInputLines = builder.stream(imageInputTopic);

	//imageInputLines.print(Printed.toSysOut());

	// Stream Processor (in this case inside mapValues to add custom logic, i.e. apply the
	// analytic model)
	// Transform message: Add prediction information
	KStream<String, Object> transformedMessage =
	imageInputLines.mapValues(value ->  {

		String imageClassification = "unknown";
		String imageProbability = "unknown";

		String imageFile = value;

		Path pathImage = Paths.get(imageFile);
		byte[] imageBytes;
		try {
			imageBytes = Files.readAllBytes(pathImage);

			try (Tensor image = constructAndExecuteGraphToNormalizeImage(imageBytes)) {
				float[] labelProbabilities = executeInceptionGraph(graphDef, image);
				int bestLabelIdx = maxIndex(labelProbabilities);

				imageClassification = labels.get(bestLabelIdx);

				imageProbability = Float.toString(labelProbabilities[bestLabelIdx] * 100f);

				System.out.println(String.format("BEST MATCH: %s (%.2f%% likely)", imageClassification,
						labelProbabilities[bestLabelIdx] * 100f));
			}

		} catch (IOException e) {
			e.printStackTrace();
		}
		return "Prediction: What is the content of this picture? => " + imageClassification
				+ ", probability = " + imageProbability;
	});

	// Send prediction information to Output Topic
	transformedMessage.to(imageOutputTopic);

	return builder.build();
}
 
Example 16
Source File: PregelComputation.java    From kafka-graphs with Apache License 2.0 4 votes vote down vote up
public void prepare(StreamsBuilder builder, Properties streamsConfig) {
    Properties producerConfig = ClientUtils.producerConfig(
        bootstrapServers, serialized.keySerde().serializer().getClass(), KryoSerializer.class,
        streamsConfig != null ? streamsConfig : new Properties()
    );
    producerConfig.setProperty(ProducerConfig.CLIENT_ID_CONFIG, applicationId + "-producer");
    this.producer = new KafkaProducer<>(producerConfig);

    final StoreBuilder<KeyValueStore<Integer, Map<K, Map<K, List<Message>>>>> workSetStoreBuilder =
        Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(localworkSetStoreName),
            Serdes.Integer(), new KryoSerde<>()
        );
    builder.addStateStore(workSetStoreBuilder);

    final StoreBuilder<KeyValueStore<K, Tuple4<Integer, VV, Integer, VV>>> solutionSetStoreBuilder =
        Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(localSolutionSetStoreName),
            serialized.keySerde(), new KryoSerde<>()
        );
    builder.addStateStore(solutionSetStoreBuilder);

    this.vertices = builder
        .table(
            verticesTopic,
            Materialized.<K, VV, KeyValueStore<Bytes, byte[]>>as(verticesStoreName)
                .withKeySerde(serialized.keySerde()).withValueSerde(serialized.vertexValueSerde())
        );

    this.edgesGroupedBySource = builder
        .table(
            edgesGroupedBySourceTopic,
            Materialized.<K, Map<K, EV>, KeyValueStore<Bytes, byte[]>>as(edgesStoreName)
                .withKeySerde(serialized.keySerde()).withValueSerde(new KryoSerde<>())
        );

    this.solutionSet = builder
        .table(solutionSetTopic, Consumed.<K, Tuple4<Integer, VV, Integer, VV>>with(serialized.keySerde(), new KryoSerde<>()))
        .mapValues(v -> v._4, Materialized.as(solutionSetStore));

    // Initalize solution set
    this.vertices
        .toStream()
        .mapValues(v -> new Tuple4<>(-1, v, 0, v))
        .to(solutionSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    // Initialize workset
    this.vertices
        .toStream()
        .peek((k, v) -> {
            try {
                int partition = PregelComputation.vertexToPartition(k, serialized.keySerde().serializer(), numPartitions);
                ZKUtils.addChild(curator, applicationId, new PregelState(State.CREATED, 0, Stage.SEND), childPath(partition));
            } catch (Exception e) {
                throw toRuntimeException(e);
            }

        })
        .mapValues((k, v) -> new Tuple3<>(0, k, initialMessage.map(Collections::singletonList).orElse(Collections.emptyList())))
        .peek((k, v) -> log.trace("workset 0 before topic: (" + k + ", " + v + ")"))
        .<K, Tuple3<Integer, K, List<Message>>>to(workSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    this.workSet = builder
        .stream(workSetTopic, Consumed.with(serialized.keySerde(), new KryoSerde<Tuple3<Integer, K, List<Message>>>()))
        .peek((k, v) -> log.trace("workset 1 after topic: (" + k + ", " + v + ")"));

    KStream<K, Tuple2<Integer, Map<K, List<Message>>>> syncedWorkSet = workSet
        .transform(BarrierSync::new, localworkSetStoreName)
        .peek((k, v) -> log.trace("workset 2 after join: (" + k + ", " + v + ")"));

    KStream<K, Tuple3<Integer, Tuple4<Integer, VV, Integer, VV>, Map<K, List<Message>>>> superstepComputation =
        syncedWorkSet
            .transformValues(VertexComputeUdf::new, localSolutionSetStoreName, vertices.queryableStoreName(),
                edgesGroupedBySource.queryableStoreName());

    // Compute the solution set delta
    KStream<K, Tuple4<Integer, VV, Integer, VV>> solutionSetDelta = superstepComputation
        .flatMapValues(v -> v._2 != null ? Collections.singletonList(v._2) : Collections.emptyList())
        .peek((k, v) -> log.trace("solution set: (" + k + ", " + v + ")"));

    solutionSetDelta
        .to(solutionSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    // Compute the inbox of each vertex for the next step (new workset)
    KStream<K, Tuple2<Integer, Map<K, List<Message>>>> newworkSet = superstepComputation
        .mapValues(v -> new Tuple2<>(v._1, v._3))
        .peek((k, v) -> log.trace("workset new: (" + k + ", " + v + ")"));

    newworkSet.process(() -> new SendMessages(producer));
}
 
Example 17
Source File: PurchaseKafkaStreamsDriver.java    From kafka-streams with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        JsonDeserializer<Purchase> purchaseJsonDeserializer = new JsonDeserializer<>(Purchase.class);
        JsonSerializer<Purchase> purchaseJsonSerializer = new JsonSerializer<>();

        JsonSerializer<RewardAccumulator> rewardAccumulatorJsonSerializer = new JsonSerializer<>();
        JsonDeserializer<RewardAccumulator> rewardAccumulatorJsonDeserializer = new JsonDeserializer<>(RewardAccumulator.class);

        Serde<RewardAccumulator> rewardAccumulatorSerde = Serdes.serdeFrom(rewardAccumulatorJsonSerializer,rewardAccumulatorJsonDeserializer);

        JsonSerializer<PurchasePattern> purchasePatternJsonSerializer = new JsonSerializer<>();
        JsonDeserializer<PurchasePattern> purchasePatternJsonDeserializer = new JsonDeserializer<>(PurchasePattern.class);

        Serde<PurchasePattern> purchasePatternSerde = Serdes.serdeFrom(purchasePatternJsonSerializer,purchasePatternJsonDeserializer);

        Serde<Purchase> purchaseSerde = Serdes.serdeFrom(purchaseJsonSerializer,purchaseJsonDeserializer);

        Serde<String> stringSerde = Serdes.String();

        KStreamBuilder kStreamBuilder = new KStreamBuilder();


        KStream<String,Purchase> purchaseKStream = kStreamBuilder.stream(stringSerde,purchaseSerde,"src-topic")
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()).to(stringSerde,purchasePatternSerde,"patterns");

        purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()).to(stringSerde,rewardAccumulatorSerde,"rewards");

        purchaseKStream.to(stringSerde,purchaseSerde,"purchases");

        System.out.println("Starting PurchaseStreams Example");
        KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder,streamsConfig);
        kafkaStreams.start();
        System.out.println("Now started PurchaseStreams Example");

    }
 
Example 18
Source File: ZMartKafkaStreamsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder streamsBuilder = new StreamsBuilder();

        KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());
        
        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));

        
        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



        purchaseKStream.print(Printed.<String, Purchase>toSysOut().withLabel("purchases"));
        purchaseKStream.to("purchases", Produced.with(stringSerde,purchaseSerde));


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();

        KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(),streamsConfig);
        LOG.info("ZMart First Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 19
Source File: ZMartKafkaStreamsAdvancedReqsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();


        // previous requirements
        KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



           // selecting a key for storage and filtering out low dollar purchases


        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases"));
        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));



         // branching stream for separating out purchases in new departments to their own topics

        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee"));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics"));




         // security Requirements to record transactions for certain employee
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) ->
                SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased());

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("ZMart Advanced Requirements Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 20
Source File: ZMartKafkaStreamsAddStateApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {
    
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
    Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
    Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
    Serde<String> stringSerde = Serdes.String();

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
            .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

    KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

    patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
    patternKStream.to("patterns", Produced.with(stringSerde, purchasePatternSerde));



     // adding State to processor
    String rewardsStateStoreName = "rewardsPointsStore";
    RewardsStreamPartitioner streamPartitioner = new RewardsStreamPartitioner();

    KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(rewardsStateStoreName);
    StoreBuilder<KeyValueStore<String, Integer>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), Serdes.Integer());

    builder.addStateStore(storeBuilder);

    KStream<String, Purchase> transByCustomerStream = purchaseKStream.through( "customer_transactions", Produced.with(stringSerde, purchaseSerde, streamPartitioner));


    KStream<String, RewardAccumulator> statefulRewardAccumulator = transByCustomerStream.transformValues(() ->  new PurchaseRewardTransformer(rewardsStateStoreName),
            rewardsStateStoreName);

    statefulRewardAccumulator.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
    statefulRewardAccumulator.to("rewards", Produced.with(stringSerde, rewardAccumulatorSerde));



    // used only to produce data for this application, not typical usage
    MockDataProducer.producePurchaseData();

    
    LOG.info("Starting Adding State Example");
    KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
    LOG.info("ZMart Adding State Application Started");
    kafkaStreams.cleanUp();
    kafkaStreams.start();
    Thread.sleep(65000);
    LOG.info("Shutting down the Add State Application now");
    kafkaStreams.close();
    MockDataProducer.shutdown();
}