org.apache.kafka.streams.integration.utils.IntegrationTestUtils Java Examples

The following examples show how to use org.apache.kafka.streams.integration.utils.IntegrationTestUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaStreamsYellingIntegrationTest.java    From kafka-streams-in-action with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
    Properties properties = StreamsTestUtils.getStreamsConfig("integrationTest",
            EMBEDDED_KAFKA.bootstrapServers(),
            STRING_SERDE_CLASSNAME,
            STRING_SERDE_CLASSNAME,
            new Properties());
    properties.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    
    streamsConfig = new StreamsConfig(properties);

    producerConfig = TestUtils.producerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringSerializer.class,
            StringSerializer.class);

    consumerConfig = TestUtils.consumerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringDeserializer.class,
            StringDeserializer.class);
}
 
Example #2
Source File: KafkaStorageIT.java    From zipkin-storage-kafka with Apache License 2.0 5 votes vote down vote up
@Test void should_aggregate() throws Exception {
  // Given: a set of incoming spans
  Span parent = Span.newBuilder().traceId("a").id("a").name("op_a").kind(Span.Kind.CLIENT)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_a").build())
    .timestamp(MILLISECONDS.toMicros(System.currentTimeMillis())).duration(10)
    .build();
  Span child = Span.newBuilder().traceId("a").id("b").name("op_b").kind(Span.Kind.SERVER)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_b").build())
    .timestamp(MILLISECONDS.toMicros(System.currentTimeMillis())).duration(2)
    .build();
  final SpanConsumer spanConsumer = storage.spanConsumer();
  // When: are consumed by storage
  spanConsumer.accept(Arrays.asList(parent, child)).execute();
  storage.getProducer().flush();
  // Then: they are partitioned
  IntegrationTestUtils.waitUntilMinRecordsReceived(
    consumerConfig, storageBuilder.spanPartitioning.spansTopic, 1, 10000);
  // Given: some time for stream processes to kick in
  Thread.sleep(traceTimeout.toMillis() * 2);
  // Given: another span to move 'event time' forward
  Span another = Span.newBuilder().traceId("c").id("d").name("op_a").kind(Span.Kind.SERVER)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_b").build())
    .timestamp(MILLISECONDS.toMicros(System.currentTimeMillis())).duration(2)
    .build();
  // When: published
  spanConsumer.accept(Collections.singletonList(another)).execute();
  storage.getProducer().flush();
  // Then: a trace is published
  IntegrationTestUtils.waitUntilMinRecordsReceived(
    consumerConfig, storageBuilder.spanAggregation.spansTopic, 1, 10000);
  IntegrationTestUtils.waitUntilMinRecordsReceived(
    consumerConfig, storageBuilder.spanAggregation.traceTopic, 1, 30000);
  // Then: and a dependency link created
  IntegrationTestUtils.waitUntilMinRecordsReceived(
    consumerConfig, storageBuilder.spanAggregation.dependencyTopic, 1, 10000);
}
 
Example #3
Source File: AppEmbeddedTest.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 5 votes vote down vote up
@AfterAll
static void tearDown() throws IOException {
    streams.close();
    try {
        IntegrationTestUtils.cleanStateAfterTest(kafkaCluster, streams);
    } catch (Exception e) {
        FileUtils.deleteDirectory(new File(AppConfigs.stateStoreLocationUT));
    }

}
 
Example #4
Source File: AbstractIntegrationTest.java    From kafka-graphs with Apache License 2.0 5 votes vote down vote up
@After
public void cleanup() throws Exception {
    if (streams != null) {
        streams.close();
    }
    if (streamsConfiguration != null) {
        IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
    }
}
 
Example #5
Source File: KafkaStorageIT.java    From zipkin-storage-kafka with Apache License 2.0 4 votes vote down vote up
@Test void should_returnTraces_whenQuery() throws Exception {
  // Given: a trace prepared to be published
  Span parent = Span.newBuilder().traceId("a").id("a").name("op_a").kind(Span.Kind.CLIENT)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_a").build())
    .remoteEndpoint(Endpoint.newBuilder().serviceName("svc_b").build())
    .timestamp(MILLISECONDS.toMicros(TODAY)).duration(10)
    .build();
  Span child = Span.newBuilder().traceId("a").id("b").name("op_b").kind(Span.Kind.SERVER)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_b").build())
    .timestamp(MILLISECONDS.toMicros(TODAY)).duration(2)
    .build();
  Span other = Span.newBuilder().traceId("c").id("c").name("op_c").kind(Span.Kind.SERVER)
    .localEndpoint(Endpoint.newBuilder().serviceName("svc_c").build())
    .timestamp(MILLISECONDS.toMicros(TODAY) + 10).duration(8)
    .build();
  List<Span> spans = Arrays.asList(parent, child);
  // When: and stores running
  ServiceAndSpanNames serviceAndSpanNames = storage.serviceAndSpanNames();
  // When: been published
  tracesProducer.send(new ProducerRecord<>(storageBuilder.traceStorage.spansTopic, parent.traceId(), spans));
  tracesProducer.send(new ProducerRecord<>(storageBuilder.traceStorage.spansTopic, other.traceId(),
    Collections.singletonList(other)));
  tracesProducer.flush();
  // Then: stored
  IntegrationTestUtils.waitUntilMinRecordsReceived(
    consumerConfig, storageBuilder.traceStorage.spansTopic, 2, 10000);
  // Then: services names are searchable
  await().atMost(100, TimeUnit.SECONDS).until(() -> {
    List<List<Span>> traces = storage.spanStore().getTraces(QueryRequest.newBuilder()
      .endTs(TODAY + 1)
      .lookback(Duration.ofSeconds(30).toMillis())
      .serviceName("svc_a")
      .limit(10)
      .build())
      .execute();
    return (1 == traces.size()) &&
      (traces.get(0).size() == 2); // Trace is found and has two spans
  });
  List<List<Span>> filteredTraces =
    storage.spanStore().getTraces(QueryRequest.newBuilder()
      .endTs(TODAY + 1)
      .lookback(Duration.ofMinutes(1).toMillis())
      .limit(1)
      .build())
      .execute();
  assertThat(filteredTraces).hasSize(1);
  assertThat(filteredTraces.get(0)).hasSize(1); // last trace is returned first
  List<String> services = serviceAndSpanNames.getServiceNames().execute();
  assertThat(services).hasSize(3);
  List<String> spanNames = serviceAndSpanNames.getSpanNames("svc_a").execute();
  assertThat(spanNames).hasSize(1); // Service names have one span name
  List<String> remoteServices = serviceAndSpanNames.getRemoteServiceNames("svc_a").execute();
  assertThat(remoteServices).hasSize(1); // And one remote service name
  List<List<Span>> manyTraces =
    storage.traces().getTraces(Arrays.asList(parent.traceId(), other.traceId())).execute();
  assertThat(manyTraces).hasSize(2);
}
 
Example #6
Source File: KafkaStreamsYellingIntegrationTest.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldYellFromMultipleTopics() throws Exception {

    StreamsBuilder streamsBuilder = new StreamsBuilder();

    streamsBuilder.<String, String>stream(Pattern.compile("yell.*"))
            .mapValues(String::toUpperCase)
            .to(OUT_TOPIC);

    kafkaStreams = new KafkaStreams(streamsBuilder.build(), streamsConfig);
    kafkaStreams.start();

    List<String> valuesToSendList = Arrays.asList("this", "should", "yell", "at", "you");
    List<String> expectedValuesList = valuesToSendList.stream()
                                                      .map(String::toUpperCase)
                                                      .collect(Collectors.toList());

    IntegrationTestUtils.produceValuesSynchronously(YELL_A_TOPIC,
                                                    valuesToSendList,
                                                    producerConfig,
                                                    mockTime);
    int expectedNumberOfRecords = 5;
    List<String> actualValues = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfig,
                                                                                       OUT_TOPIC,
                                                                                       expectedNumberOfRecords);

    assertThat(actualValues, equalTo(expectedValuesList));

    EMBEDDED_KAFKA.createTopic(YELL_B_TOPIC);

    valuesToSendList = Arrays.asList("yell", "at", "you", "too");
    IntegrationTestUtils.produceValuesSynchronously(YELL_B_TOPIC,
                                                    valuesToSendList,
                                                    producerConfig,
                                                    mockTime);

    expectedValuesList = valuesToSendList.stream().map(String::toUpperCase).collect(Collectors.toList());

    expectedNumberOfRecords = 4;
    actualValues = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfig,
                                                                          OUT_TOPIC,
                                                                          expectedNumberOfRecords);

    assertThat(actualValues, equalTo(expectedValuesList));

}
 
Example #7
Source File: AppEmbeddedTest.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 4 votes vote down vote up
@Test
@DisplayName("End to End flow testing with embedded cluster")
void testImpressionFlow() throws ExecutionException, InterruptedException {
    //Setup data for Impressions
    List<KeyValue<String, AdImpression>> impList = new ArrayList<>();
    impList.add(KeyValue.pair("100001", new AdImpression()
        .withImpressionID("100001").withCampaigner("ABC Ltd")));
    impList.add(KeyValue.pair("100002", new AdImpression()
        .withImpressionID("100002").withCampaigner("ABC Ltd")));

    //Setup a producer for Impressions
    Properties impProperties = TestUtils.producerConfig(
        kafkaCluster.bootstrapServers(),
        AppSerdes.String().serializer().getClass(),
        AppSerdes.AdImpression().serializer().getClass());

    IntegrationTestUtils.produceKeyValuesSynchronously(
        AppConfigs.impressionTopic,
        impList,
        impProperties,
        Time.SYSTEM);

    //Setup data for Clicks
    List<KeyValue<String, AdClick>> clkList = new ArrayList<>();
    clkList.add(KeyValue.pair("100001", new AdClick()
        .withImpressionID("100001").withCampaigner("ABC Ltd")));

    //Setup a producer for Clicks
    Properties clkProperties = TestUtils.producerConfig(
        kafkaCluster.bootstrapServers(),
        AppSerdes.String().serializer().getClass(),
        AppSerdes.AdClick().serializer().getClass());

    IntegrationTestUtils.produceKeyValuesSynchronously(
        AppConfigs.clicksTopic,
        clkList,
        clkProperties,
        Time.SYSTEM);

    Properties serdeProps = new Properties();
    serdeProps.put("specific.class.name", CampaignPerformance.class);

    Properties cmpProperties = TestUtils.consumerConfig(
        kafkaCluster.bootstrapServers(),
        CONSUMER_GROUP,
        AppSerdes.String().deserializer().getClass(),
        AppSerdes.CampaignPerformance().deserializer().getClass(),
        serdeProps
    );

    List<KeyValue<String, CampaignPerformance>> outputList =
        IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(
            cmpProperties, AppConfigs.outputTopic, 1
        );

    outputList.forEach((record) -> {
            logger.info(record.value);
            assertAll(() -> assertEquals("ABC Ltd", record.value.getCampaigner()),
                () -> assertEquals("2", record.value.getAdImpressions().toString()),
                () -> assertEquals("1", record.value.getAdClicks().toString())
            );
        }
    );

}
 
Example #8
Source File: Kafka_Streams_TensorFlow_Keras_Example_IntegrationTest.java    From kafka-streams-machine-learning-examples with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldPredictValues() throws Exception {

	// ########################################################
	// Step 1: Load Keras Model using DeepLearning4J API
	// ########################################################
	String simpleMlp = new ClassPathResource("generatedModels/Keras/simple_mlp.h5").getFile().getPath();
	System.out.println(simpleMlp.toString());

	MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp);

	// Create test data which is sent from Kafka Producer into Input Topic
	List<String> inputValues = Arrays.asList("256,100");

	// ####################################################################
	// Step 2: Configure and start the Kafka Streams processor topology.
	// ####################################################################

	Properties streamsConfiguration = new Properties();
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test");
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());

	// Configure Kafka Streams Application
	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// In the subsequent lines we define the processing topology of the
	// Streams application.
	final StreamsBuilder builder = new StreamsBuilder();

	// Construct a `KStream` from the input topic, where
	// message values represent lines of text (for the sake of this example, we
	// ignore whatever may be stored in the message keys).
	final KStream<String, String> inputEvents = builder.stream(inputTopic);

	// ###############################################################
	// THIS IS WHERE WE DO REAL TIME MODEL INFERENCE FOR EACH EVENT
	// ###############################################################
	inputEvents.foreach((key, value) -> {

		// Transform input values (list of Strings) to expected DL4J parameters (two
		// Integer values):
		String[] valuesAsArray = value.split(",");
		INDArray input = Nd4j.create(Integer.parseInt(valuesAsArray[0]), Integer.parseInt(valuesAsArray[1]));

		// Apply the analytic model:
		output = model.output(input);
		prediction = output.toString();

	});

	// Transform message: Add prediction result
	KStream<String, Object> transformedMessage = inputEvents.mapValues(value -> "Prediction => " + prediction);

	// Send prediction result to Output Topic
	transformedMessage.to(outputTopic);

	// Start Kafka Streams Application to process new incoming messages from
	// Input Topic
	final KafkaStreams streams = new TestKafkaStreams(builder.build(), streamsConfiguration);
	streams.cleanUp();
	streams.start();
	System.out.println("Prediction Microservice is running...");
	System.out.println("Input to Kafka Topic " + inputTopic + "; Output to Kafka Topic " + outputTopic);

	// ########################################################
	// Step 3: Produce some input data to the input topic.
	// ########################################################

	Properties producerConfig = new Properties();
	producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
	producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
	producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
	IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig, new MockTime());

	// ########################################################
	// Step 4: Verify the application's output data.
	// ########################################################

	Properties consumerConfig = new Properties();
	consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
	consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG,
			"kafka-streams-tensorflow-keras-integration-test-standard-consumer");
	consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	List<KeyValue<String, String>> response = IntegrationTestUtils
			.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, 1);
	streams.close();

	System.out.println("VALUE: " + response.get(0).value);

	assertThat(response).isNotNull();
	assertThat(response.get(0).value).doesNotMatch("Value => unknown");
	assertThat(response.get(0).value).contains("0.1000,    0.1000,    0.1000");
}