Java Code Examples for org.apache.kafka.streams.kstream.KStream#mapValues()

The following examples show how to use org.apache.kafka.streams.kstream.KStream#mapValues() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
public void routeResult(KStream<String, Referential> result, List<ProcessOutput> processOutputs, Class<? extends AbstractElasticsearchProcessor> toElasticsearchProcessorClass) {
    KStream<String, JsonNode> resultAsJsonNode = result.mapValues(value -> JSONUtils.getInstance().toJsonNode(value));
    for (ProcessOutput processOutput : processOutputs) {
        switch (processOutput.getTypeOutput()) {
            case KAFKA:
                toKafkaTopic(resultAsJsonNode, processOutput.getParameterOutput());
                break;
            case ELASTICSEARCH:
                toElasticsearch(resultAsJsonNode, processOutput.getParameterOutput(), toElasticsearchProcessorClass);
                break;
            case SYSTEM_OUT:
                toSystemOut(resultAsJsonNode);
                break;
            case EMAIL:
                toEmail(resultAsJsonNode, processOutput.getParameterOutput());
                break;
            case SLACK:
                toSlack(resultAsJsonNode, processOutput.getParameterOutput());
                break;
            case SNMP:
                toSnmp(resultAsJsonNode, processOutput.getParameterOutput());
        }
    }

}
 
Example 2
Source File: EventSourcedStreams.java    From simplesource with Apache License 2.0 6 votes vote down vote up
static <K, E, A> KStream<K, AggregateUpdateResult<A>> getAggregateUpdateResults(
        TopologyContext<K, ?, E, A> ctx,
        final KStream<K, CommandEvents<E, A>> eventResultStream) {
    return eventResultStream
            .mapValues((serializedKey, result) -> {
                final Result<CommandError, AggregateUpdate<A>> aggregateUpdateResult = result.eventValue().map(events -> {
                    final BiFunction<AggregateUpdate<A>, ValueWithSequence<E>, AggregateUpdate<A>> reducer =
                            (aggregateUpdate, eventWithSequence) -> new AggregateUpdate<>(
                                    ctx.aggregator().applyEvent(aggregateUpdate.aggregate(), eventWithSequence.value()),
                                    eventWithSequence.sequence()
                            );
                    return events.fold(
                            eventWithSequence -> new AggregateUpdate<>(
                                    ctx.aggregator().applyEvent(result.aggregate(), eventWithSequence.value()),
                                    eventWithSequence.sequence()
                            ),
                            reducer
                    );
                });
                return new AggregateUpdateResult<>(
                        result.commandId(),
                        result.readSequence(),
                        aggregateUpdateResult);
            });
}
 
Example 3
Source File: AbstractKafkaStreamsBinderProcessor.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
private KStream<?, ?> getkStream(BindingProperties bindingProperties, KStream<?, ?> stream, boolean nativeDecoding) {
	if (!nativeDecoding) {
		stream = stream.mapValues((value) -> {
			Object returnValue;
			String contentType = bindingProperties.getContentType();
			if (value != null && !StringUtils.isEmpty(contentType)) {
				returnValue = MessageBuilder.withPayload(value)
						.setHeader(MessageHeaders.CONTENT_TYPE, contentType).build();
			}
			else {
				returnValue = value;
			}
			return returnValue;
		});
	}
	return stream;
}
 
Example 4
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
private void createStreamValidAndTransformAndFilter(String inputTopic, String outputTopic) {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, JsonNode> streamInput = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
    String applicationId = getProcessConsumer().getIdProcess() + ProcessConstants.VALIDATE_PROCESS;
    Counter counter = Metrics.counter("skaetl_nb_transformation_validation_count", Lists.newArrayList(Tag.of("processConsumerName", getProcessConsumer().getName())));
    KStream<String, ValidateData> streamValidation = streamInput.mapValues((value) -> {
        ObjectNode resultTransformer = getGenericTransformator().apply(value, getProcessConsumer());
        ValidateData item = getGenericValidator().process(resultTransformer, getProcessConsumer());
        counter.increment();
        return item;
    }).filter((key, value) -> {
        //Validation
        if (!value.success) {
            //produce to errorTopic
            esErrorRetryWriter.sendToErrorTopic(applicationId, value);
            return false;
        }
        //FILTER
        return processFilter(value);
    });

    KStream<String, JsonNode> streamOfJsonNode = streamValidation.mapValues(value -> value.getJsonValue());
    streamOfJsonNode.to(outputTopic, Produced.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));

    KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(applicationId, getBootstrapServer()));
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
    streams.start();
    addStreams(streams);
}
 
Example 5
Source File: MegabusRefResolver.java    From emodb with Apache License 2.0 5 votes vote down vote up
@Override
protected Topology topology() {
    StreamsBuilder streamsBuilder = new StreamsBuilder();

    // merge the ref stream with the ref-retry stream. They must be merged into a single stream for ordering purposes
    final KStream<String, List<MegabusRef>> refStream = streamsBuilder.stream(_megabusRefTopic.getName(), Consumed.with(Serdes.String(), new JsonPOJOSerde<>(new TypeReference<List<MegabusRef>>() {})))
            .merge(streamsBuilder.stream(_retryRefTopic.getName(), Consumed.with(Serdes.String(), new JsonPOJOSerde<>(new TypeReference<List<MegabusRef>>() {}))));

    // resolve refs into documents
    KStream<String, ResolutionResult> resolutionResults = refStream.mapValues(value -> {
        try {
            return resolveRefs(value);
        } catch (Throwable t) {
            _errorProcessingMeter.mark();
            throw t;
        }
    });

    resolutionResults
            // extract the resolved documents
            .flatMap((key, value) -> value.getKeyedResolvedDocs())
            // convert deleted documents to null
            .mapValues(doc -> Optional.ofNullable(doc).map(Intrinsic::isDeleted).orElse(true) ? null : doc)
            // send to megabus
            .to(_megabusResolvedTopic.getName(), Produced.with(Serdes.String(), new JsonPOJOSerde<>(new TypeReference<Map<String, Object>>() {})));

    resolutionResults
            // filter out all resolution results without missing refs
            .filterNot((key, result) -> result.getMissingRefs().isEmpty())
            // add timestamp for missing refs
            .mapValues(result -> new MissingRefCollection(result.getMissingRefs(), Date.from(_clock.instant())))
            // send to missing topic
            .to(_missingRefTopic.getName(), Produced.with(Serdes.String(), new JsonPOJOSerde<>(MissingRefCollection.class)));
    return streamsBuilder.build();
}
 
Example 6
Source File: ZMartTopology.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static Topology build() {
    
    Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
    Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
    Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
    Serde<String> stringSerde = Serdes.String();

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
            .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

    KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

    patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


    KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());


    rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));

    purchaseKStream.to("purchases", Produced.with(Serdes.String(),purchaseSerde));

    return streamsBuilder.build();
}
 
Example 7
Source File: EventSourcedStreams.java    From simplesource with Apache License 2.0 4 votes vote down vote up
static <K, A>  KStream<K, CommandResponse<K>> getCommandResponses(final KStream<K, AggregateUpdateResult<A>> aggregateUpdateStream) {
    return aggregateUpdateStream
            .mapValues((key, update) ->
                    CommandResponse.of(update.commandId(), key, update.readSequence(), update.updatedAggregateResult().map(AggregateUpdate::sequence))
            );
}
 
Example 8
Source File: Kafka_Streams_TensorFlow_Keras_Example_IntegrationTest.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldPredictValues() throws Exception {

	// ########################################################
	// Step 1: Load Keras Model using DeepLearning4J API
	// ########################################################
	String simpleMlp = new ClassPathResource("generatedModels/Keras/simple_mlp.h5").getFile().getPath();
	System.out.println(simpleMlp.toString());

	MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp);

	// Create test data which is sent from Kafka Producer into Input Topic
	List<String> inputValues = Arrays.asList("256,100");

	// ####################################################################
	// Step 2: Configure and start the Kafka Streams processor topology.
	// ####################################################################

	Properties streamsConfiguration = new Properties();
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test");
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());

	// Configure Kafka Streams Application
	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic, where
	// message values represent lines of text (for the sake of this example, we
	// ignore whatever may be stored in the message keys).
	final KStream<String, String> inputEvents = builder.stream(inputTopic);

	// ###############################################################
	// THIS IS WHERE WE DO REAL TIME MODEL INFERENCE FOR EACH EVENT
	// ###############################################################
	inputEvents.foreach((key, value) -> {

		// Transform input values (list of Strings) to expected DL4J parameters (two
		// Integer values):
		String[] valuesAsArray = value.split(",");
		INDArray input = Nd4j.create(Integer.parseInt(valuesAsArray[0]), Integer.parseInt(valuesAsArray[1]));

		// Apply the analytic model:
		output = model.output(input);
		prediction = output.toString();

	});

	// Transform message: Add prediction result
	KStream<String, Object> transformedMessage = inputEvents.mapValues(value -> "Prediction => " + prediction);

	// Send prediction result to Output Topic
	transformedMessage.to(outputTopic);

	// Start Kafka Streams Application to process new incoming messages from
	// Input Topic
	final KafkaStreams streams = new TestKafkaStreams(builder.build(), streamsConfiguration);
	streams.cleanUp();
	streams.start();
	System.out.println("Prediction Microservice is running...");
	System.out.println("Input to Kafka Topic " + inputTopic + "; Output to Kafka Topic " + outputTopic);

	// ########################################################
	// Step 3: Produce some input data to the input topic.
	// ########################################################

	Properties producerConfig = new Properties();
	producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
	producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
	producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig, new MockTime());

	// ########################################################
	// Step 4: Verify the application's output data.
	// ########################################################

	Properties consumerConfig = new Properties();
	consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test-standard-consumer");
	consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	List<KeyValue<String, String>> response = IntegrationTestUtils
			.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, 1);
	streams.close();

	System.out.println("VALUE: " + response.get(0).value);

	assertThat(response).isNotNull();
	assertThat(response.get(0).value).doesNotMatch("Value => unknown");
	assertThat(response.get(0).value).contains("0.1000,    0.1000,    0.1000");
}
 
Example 9
Source File: Kafka_Streams_TensorFlow_Image_Recognition_Example.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
static Topology getStreamTopology() throws IOException {
	// Create TensorFlow object

	String modelDir = "src/main/resources/generatedModels/CNN_inception5h";

	Path pathGraph = Paths.get(modelDir, "tensorflow_inception_graph.pb");
	byte[] graphDef = Files.readAllBytes(pathGraph);

	Path pathModel = Paths.get(modelDir, "imagenet_comp_graph_label_strings.txt");
	List<String> labels = Files.readAllLines(pathModel, Charset.forName("UTF-8"));

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic "ImageInputTopic", where
	// message values represent lines of text
	final KStream<String, String> imageInputLines = builder.stream(imageInputTopic);

	//imageInputLines.print(Printed.toSysOut());

	// Stream Processor (in this case inside mapValues to add custom logic, i.e. apply the
	// analytic model)
	// Transform message: Add prediction information
	KStream<String, Object> transformedMessage =
	imageInputLines.mapValues(value ->  {

		String imageClassification = "unknown";
		String imageProbability = "unknown";

		String imageFile = value;

		Path pathImage = Paths.get(imageFile);
		byte[] imageBytes;
		try {
			imageBytes = Files.readAllBytes(pathImage);

			try (Tensor image = constructAndExecuteGraphToNormalizeImage(imageBytes)) {
				float[] labelProbabilities = executeInceptionGraph(graphDef, image);
				int bestLabelIdx = maxIndex(labelProbabilities);

				imageClassification = labels.get(bestLabelIdx);

				imageProbability = Float.toString(labelProbabilities[bestLabelIdx] * 100f);

				System.out.println(String.format("BEST MATCH: %s (%.2f%% likely)", imageClassification,
						labelProbabilities[bestLabelIdx] * 100f));
			}

		} catch (IOException e) {
			e.printStackTrace();
		}
		return "Prediction: What is the content of this picture? => " + imageClassification
				+ ", probability = " + imageProbability;
	});

	// Send prediction information to Output Topic
	transformedMessage.to(imageOutputTopic);

	return builder.build();
}
 
Example 10
Source File: Kafka_Streams_MachineLearning_H2O_Application.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
static Topology getStreamTopology(String modelClassName) throws InstantiationException, IllegalAccessException, ClassNotFoundException {
	// Create H2O object (see gbm_pojo_test.java)
	hex.genmodel.GenModel rawModel;
	rawModel = (hex.genmodel.GenModel) Class.forName(modelClassName).newInstance();
	EasyPredictModelWrapper model = new EasyPredictModelWrapper(rawModel);

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic "AirlineInputTopic", where
	// message values
	// represent lines of text (for the sake of this example, we ignore
	// whatever may be stored
	// in the message keys).
	final KStream<String, String> airlineInputLines = builder.stream(INPUT_TOPIC);

	// Stream Processor (in this case 'mapValues' to add custom logic, i.e. apply
	// the analytic model)
	KStream<String, String> transformedMessage =
	airlineInputLines.mapValues(value -> {

		// Year,Month,DayofMonth,DayOfWeek,DepTime,CRSDepTime,ArrTime,CRSArrTime,UniqueCarrier,FlightNum,TailNum,ActualElapsedTime,CRSElapsedTime,AirTime,ArrDelay,DepDelay,Origin,Dest,Distance,TaxiIn,TaxiOut,Cancelled,CancellationCode,Diverted,CarrierDelay,WeatherDelay,NASDelay,SecurityDelay,LateAircraftDelay,IsArrDelayed,IsDepDelayed
		// value:
		// YES, probably delayed:
		// 1987,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES
		// NO, probably not delayed:
		// 1999,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES

		if (value != null && !value.equals("")) {
			System.out.println("#####################");
			System.out.println("Flight Input:" + value);

			String[] valuesArray = value.split(",");

			RowData row = new RowData();
			row.put("Year", valuesArray[0]);
			row.put("Month", valuesArray[1]);
			row.put("DayofMonth", valuesArray[2]);
			row.put("DayOfWeek", valuesArray[3]);
			row.put("CRSDepTime", valuesArray[5]);
			row.put("UniqueCarrier", valuesArray[8]);
			row.put("Origin", valuesArray[16]);
			row.put("Dest", valuesArray[17]);
			BinomialModelPrediction p = null;
			try {
				p = model.predictBinomial(row);
			} catch (PredictException e) {
				e.printStackTrace();
			}

			System.out.println("Label (aka prediction) is flight departure delayed: " + p.label);
			System.out.print("Class probabilities: ");
			for (int i = 0; i < p.classProbabilities.length; i++) {
				if (i > 0) {
					System.out.print(",");
				}
				System.out.print(p.classProbabilities[i]);
			}
			System.out.println("");
			System.out.println("#####################");
			return "Prediction: Is Airline delayed? => " + p.label;
		}
		//No prediction
		return null;
	});

	// Send prediction information to Output Topic
	transformedMessage.to(OUTPUT_TOPIC);
	return builder.build();
}
 
Example 11
Source File: StreamingApp.java    From Apache-Kafka-1-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception { 

   Properties props = new Properties(); 
   props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streaming_app_id");// 1 
   props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); //2 

   StreamsConfig config = new StreamsConfig(props); // 3 
   StreamsBuilder builder = new StreamsBuilder(); //4 

   Topology topology = builder.build(); 

   KafkaStreams streams = new KafkaStreams(topology, config); 

   KStream<String, String> simpleFirstStream = builder.stream("src-topic"); //5 

   KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase); //6 

   upperCasedStream.to("out-topic"); //7 
   

   System.out.println("Streaming App Started"); 
   streams.start(); 
   Thread.sleep(30000);  //8 
   System.out.println("Shutting down the Streaming App"); 
   streams.close(); 
 }
 
Example 12
Source File: KafkaStreamsYellingApp.java    From kafka-streams-in-action with Apache License 2.0 3 votes vote down vote up
public static void main(String[] args) throws Exception {


        //Used only to produce data for this application, not typical usage
        MockDataProducer.produceRandomTextData();

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "yelling_app_id");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

        StreamsConfig streamsConfig = new StreamsConfig(props);

        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, String> simpleFirstStream = builder.stream("src-topic", Consumed.with(stringSerde, stringSerde));


        KStream<String, String> upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase);

        upperCasedStream.to( "out-topic", Produced.with(stringSerde, stringSerde));
        upperCasedStream.print(Printed.<String, String>toSysOut().withLabel("Yelling App"));


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("Hello World Yelling App Started");
        kafkaStreams.start();
        Thread.sleep(35000);
        LOG.info("Shutting down the Yelling APP now");
        kafkaStreams.close();
        MockDataProducer.shutdown();

    }
 
Example 13
Source File: ZMartKafkaStreamsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder streamsBuilder = new StreamsBuilder();

        KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());
        
        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));

        
        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



        purchaseKStream.print(Printed.<String, Purchase>toSysOut().withLabel("purchases"));
        purchaseKStream.to("purchases", Produced.with(stringSerde,purchaseSerde));


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();

        KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(),streamsConfig);
        LOG.info("ZMart First Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 14
Source File: ZMartKafkaStreamsAdvancedReqsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder builder = new StreamsBuilder();


        // previous requirements
        KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));



           // selecting a key for storage and filtering out low dollar purchases


        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases"));
        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));



         // branching stream for separating out purchases in new departments to their own topics

        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee"));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));
        kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics"));




         // security Requirements to record transactions for certain employee
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) ->
                SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased());

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);


        // used only to produce data for this application, not typical usage
        MockDataProducer.producePurchaseData();
        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
        LOG.info("ZMart Advanced Requirements Kafka Streams Application Started");
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the Kafka Streams Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 15
Source File: ZMartKafkaStreamsAdvancedReqsMetricsApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {

        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
        Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
        Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
        Serde<String> stringSerde = Serdes.String();

        StreamsBuilder streamsBuilder = new StreamsBuilder();


        /**
         * Previous requirements
         */
        KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde))
                .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

        KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

        patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde));


        KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build());

        rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde));


        /**
         *  Selecting a key for storage and filtering out low dollar purchases
         */

        KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime();

        KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey);

        filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde));


        /**
         * Branching stream for separating out purchases in new departments to their own topics
         */
        Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee");
        Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics");

        int coffee = 0;
        int electronics = 1;

        KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics);

        kstreamByDept[coffee].to("coffee", Produced.with(stringSerde, purchaseSerde));

        kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde));



        /**
         * Security Requirements to record transactions for certain employee
         */
        ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) -> { };

        
        purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction);

        Topology topology = streamsBuilder.build();


        KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig);

        KafkaStreams.StateListener stateListener = (newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application has gone from REBALANCING to RUNNING ");
                LOG.info("Topology Layout {}", streamsBuilder.build().describe());
            }

            if (newState == KafkaStreams.State.REBALANCING) {
                LOG.info("Application is entering REBALANCING phase");
            }
        };

        kafkaStreams.setStateListener(stateListener);
        LOG.info("ZMart Advanced Requirements Metrics Application Started");
        kafkaStreams.cleanUp();
        CountDownLatch stopSignal = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread(()-> {
            LOG.info("Shutting down the Kafka Streams Application now");
            kafkaStreams.close();
            MockDataProducer.shutdown();
            stopSignal.countDown();
        }));



        MockDataProducer.producePurchaseData(DataGenerator.DEFAULT_NUM_PURCHASES, 250, DataGenerator.NUMBER_UNIQUE_CUSTOMERS);
        kafkaStreams.start();

        stopSignal.await();
        LOG.info("All done now, good-bye");
    }
 
Example 16
Source File: ZMartKafkaStreamsAddStateApp.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {
    
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde();
    Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde();
    Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde();
    Serde<String> stringSerde = Serdes.String();

    StreamsBuilder builder = new StreamsBuilder();

    KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde))
            .mapValues(p -> Purchase.builder(p).maskCreditCard().build());

    KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build());

    patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns"));
    patternKStream.to("patterns", Produced.with(stringSerde, purchasePatternSerde));



     // adding State to processor
    String rewardsStateStoreName = "rewardsPointsStore";
    RewardsStreamPartitioner streamPartitioner = new RewardsStreamPartitioner();

    KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(rewardsStateStoreName);
    StoreBuilder<KeyValueStore<String, Integer>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), Serdes.Integer());

    builder.addStateStore(storeBuilder);

    KStream<String, Purchase> transByCustomerStream = purchaseKStream.through( "customer_transactions", Produced.with(stringSerde, purchaseSerde, streamPartitioner));


    KStream<String, RewardAccumulator> statefulRewardAccumulator = transByCustomerStream.transformValues(() ->  new PurchaseRewardTransformer(rewardsStateStoreName),
            rewardsStateStoreName);

    statefulRewardAccumulator.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards"));
    statefulRewardAccumulator.to("rewards", Produced.with(stringSerde, rewardAccumulatorSerde));



    // used only to produce data for this application, not typical usage
    MockDataProducer.producePurchaseData();

    
    LOG.info("Starting Adding State Example");
    KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig);
    LOG.info("ZMart Adding State Application Started");
    kafkaStreams.cleanUp();
    kafkaStreams.start();
    Thread.sleep(65000);
    LOG.info("Shutting down the Add State Application now");
    kafkaStreams.close();
    MockDataProducer.shutdown();
}