Java Code Examples for org.apache.kafka.streams.kstream.KStream#foreach()

The following examples show how to use org.apache.kafka.streams.kstream.KStream#foreach() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: QueuedSchemaKStream.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
private QueuedSchemaKStream(
    final Schema schema,
    final KStream kstream,
    final Field keyField,
    final List<SchemaKStream> sourceSchemaKStreams,
    final Type type,
    final FunctionRegistry functionRegistry,
    final Optional<Integer> limit,
    final OutputNode outputNode,
    final SchemaRegistryClient schemaRegistryClient
) {
  super(
      schema,
      kstream,
      keyField,
      sourceSchemaKStreams,
      type,
      functionRegistry,
      schemaRegistryClient
  );
  setOutputNode(outputNode);
  kstream.foreach(new QueuedSchemaKStream.QueuePopulator(rowQueue, limit));
}
 
Example 2
Source File: HelloStreams.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 6 votes vote down vote up
/**
 * Application entry point
 *
 * @param args topicName (Name of the Kafka topic to read)
 */

public static void main(String[] args) {

    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "HelloStreams");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, String> kStream = builder.stream(topicName);
    kStream.foreach((k, v) -> System.out.println("Key = " + k + " Value = " + v));
    //kStream.peek((k, v) -> System.out.println("Key = " + k + " Value = " + v));
    Topology topology = builder.build();

    KafkaStreams streams = new KafkaStreams(topology, props);

    logger.info("Starting the stream");
    streams.start();

    Runtime.getRuntime().addShutdownHook(new Thread(() -> {
        logger.info("Stopping Stream");
        streams.close();
    }));
}
 
Example 3
Source File: PlayerCommandConnector.java    From football-events with MIT License 6 votes vote down vote up
public void build(StreamsBuilder builder) {
    KStream<byte[], JsonNode> playerSourceStream = builder.stream(
            CONNECT_PLAYERS_TOPIC, Consumed.with(Serdes.ByteArray(), new JsonNodeSerde()))
            .filter((id, json) -> creationOrSnapshot(json));

    playerSourceStream.foreach(this::debug);

    KStream<String, PlayerStartedCareer> playerReadyStream = playerSourceStream
            .map((id, json) -> {
                PlayerStartedCareer event = createEvent(json);
                return KeyValue.pair(event.getAggId(), event);
            });

    playerReadyStream.to(PLAYER_STARTED_CAREER_TOPIC, Produced.with(
            Serdes.String(), new JsonPojoSerde<>(PlayerStartedCareer.class)));
}
 
Example 4
Source File: KafkaStreamsStateStoreIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@StreamListener
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void process(@Input("input1")KStream<Object, Product> input, @Input("input2")KStream<Object, Product> input2) {

	input.process(() -> new Processor<Object, Product>() {

		@Override
		public void init(ProcessorContext processorContext) {
			state = (WindowStore) processorContext.getStateStore("mystate");
		}

		@Override
		public void process(Object s, Product product) {
			processed = true;
		}

		@Override
		public void close() {
			if (state != null) {
				state.close();
			}
		}
	}, "mystate");

	//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
	input2.foreach((key, value) -> { });
}
 
Example 5
Source File: KafkaStreamsLiveTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
@Ignore("it needs to have kafka broker running on local")
public void shouldTestKafkaStreams() throws InterruptedException {
    // given
    String inputTopic = "inputTopic";

    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());

    // when
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> textLines = builder.stream(inputTopic);
    Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);

    KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();

    textLines.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));

    String outputTopic = "outputTopic";
    final Serde<String> stringSerde = Serdes.String();
    final Serde<String> longSerde = Serdes.String();
    textLines.to(outputTopic, Produced.with(stringSerde,longSerde));

    KafkaStreams streams = new KafkaStreams(new Topology(), streamsConfiguration);
    streams.start();

    // then
    Thread.sleep(30000);
    streams.close();
}
 
Example 6
Source File: Kafka_Streams_TensorFlow_Keras_Example_IntegrationTest.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldPredictValues() throws Exception {

	// ########################################################
	// Step 1: Load Keras Model using DeepLearning4J API
	// ########################################################
	String simpleMlp = new ClassPathResource("generatedModels/Keras/simple_mlp.h5").getFile().getPath();
	System.out.println(simpleMlp.toString());

	MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp);

	// Create test data which is sent from Kafka Producer into Input Topic
	List<String> inputValues = Arrays.asList("256,100");

	// ####################################################################
	// Step 2: Configure and start the Kafka Streams processor topology.
	// ####################################################################

	Properties streamsConfiguration = new Properties();
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test");
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());

	// Configure Kafka Streams Application
	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic, where
	// message values represent lines of text (for the sake of this example, we
	// ignore whatever may be stored in the message keys).
	final KStream<String, String> inputEvents = builder.stream(inputTopic);

	// ###############################################################
	// THIS IS WHERE WE DO REAL TIME MODEL INFERENCE FOR EACH EVENT
	// ###############################################################
	inputEvents.foreach((key, value) -> {

		// Transform input values (list of Strings) to expected DL4J parameters (two
		// Integer values):
		String[] valuesAsArray = value.split(",");
		INDArray input = Nd4j.create(Integer.parseInt(valuesAsArray[0]), Integer.parseInt(valuesAsArray[1]));

		// Apply the analytic model:
		output = model.output(input);
		prediction = output.toString();

	});

	// Transform message: Add prediction result
	KStream<String, Object> transformedMessage = inputEvents.mapValues(value -> "Prediction => " + prediction);

	// Send prediction result to Output Topic
	transformedMessage.to(outputTopic);

	// Start Kafka Streams Application to process new incoming messages from
	// Input Topic
	final KafkaStreams streams = new TestKafkaStreams(builder.build(), streamsConfiguration);
	streams.cleanUp();
	streams.start();
	System.out.println("Prediction Microservice is running...");
	System.out.println("Input to Kafka Topic " + inputTopic + "; Output to Kafka Topic " + outputTopic);

	// ########################################################
	// Step 3: Produce some input data to the input topic.
	// ########################################################

	Properties producerConfig = new Properties();
	producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
	producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
	producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig, new MockTime());

	// ########################################################
	// Step 4: Verify the application's output data.
	// ########################################################

	Properties consumerConfig = new Properties();
	consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test-standard-consumer");
	consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	List<KeyValue<String, String>> response = IntegrationTestUtils
			.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, 1);
	streams.close();

	System.out.println("VALUE: " + response.get(0).value);

	assertThat(response).isNotNull();
	assertThat(response.get(0).value).doesNotMatch("Value => unknown");
	assertThat(response.get(0).value).contains("0.1000,    0.1000,    0.1000");
}