com.salesforce.kafka.test.KafkaTestUtils Java Examples

The following examples show how to use com.salesforce.kafka.test.KafkaTestUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Kafka011Test.java    From Alink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKafka011Sink() throws Exception {
    Row[] rows = new Row[]{
        Row.of(1L, 1L, 1.0),
        Row.of(2L, 2L, 1.0),
        Row.of(2L, 3L, 1.0),
    };

    final String topicName = "topic_1";
    KafkaTestUtils kafkaTestUtils = SHARED_KAFKA_TEST_RESOURCE.getKafkaTestUtils();
    kafkaTestUtils.createTopic(topicName, 1, (short) 1);

    StreamOperator data = new MemSourceStreamOp(rows, new String[]{"f1", "f2", "f3"});

    StreamOperator sink = new Kafka011SinkStreamOp()
        .setBootstrapServers(SHARED_KAFKA_TEST_RESOURCE.getKafkaConnectString())
        .setDataFormat("csv")
        .setTopic(topicName);

    data.link(sink);
    StreamOperator.execute();

    int s = kafkaTestUtils.consumeAllRecordsFromTopic(topicName).size();
    Assert.assertEquals(s, 3);
}
 
Example #2
Source File: KafkaTest.java    From Alink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKafkaSink() throws Exception {
    Row[] rows = new Row[]{
        Row.of(1L, 1L, 1.0),
        Row.of(2L, 2L, 1.0),
        Row.of(2L, 3L, 1.0),
    };

    final String topicName = "topic_1";
    KafkaTestUtils kafkaTestUtils = SHARED_KAFKA_TEST_RESOURCE.getKafkaTestUtils();
    kafkaTestUtils.createTopic(topicName, 1, (short) 1);

    StreamOperator data = new MemSourceStreamOp(rows, new String[]{"f1", "f2", "f3"});

    StreamOperator sink = new KafkaSinkStreamOp()
        .setBootstrapServers(SHARED_KAFKA_TEST_RESOURCE.getKafkaConnectString())
        .setDataFormat("csv")
        .setTopic(topicName);

    data.link(sink);
    StreamOperator.execute();

    int s = kafkaTestUtils.consumeAllRecordsFromTopic(topicName).size();
    Assert.assertEquals(s, 3);
}
 
Example #3
Source File: Kafka011Test.java    From Alink with Apache License 2.0 5 votes vote down vote up
@Test
public void testKafka011Source() throws Exception {
    final String topicName = "topic_2";
    KafkaTestUtils kafkaTestUtils = SHARED_KAFKA_TEST_RESOURCE.getKafkaTestUtils();
    kafkaTestUtils.createTopic(topicName, 1, (short) 1);
    kafkaTestUtils.produceRecords(4, topicName, 0);

    StreamOperator data = new Kafka011SourceStreamOp()
        .setBootstrapServers(SHARED_KAFKA_TEST_RESOURCE.getKafkaConnectString())
        .setGroupId("g")
        .setStartupMode("earliest")
        .setTopic(topicName);

    Assert.assertEquals(data.getColNames().length, 5);
}
 
Example #4
Source File: KafkaTest.java    From Alink with Apache License 2.0 5 votes vote down vote up
@Test
public void testKafkaSource() throws Exception {
    final String topicName = "topic_2";
    KafkaTestUtils kafkaTestUtils = SHARED_KAFKA_TEST_RESOURCE.getKafkaTestUtils();
    kafkaTestUtils.createTopic(topicName, 1, (short) 1);
    kafkaTestUtils.produceRecords(4, topicName, 0);

    StreamOperator data = new KafkaSourceStreamOp()
        .setBootstrapServers(SHARED_KAFKA_TEST_RESOURCE.getKafkaConnectString())
        .setGroupId("g")
        .setStartupMode("earliest")
        .setTopic(topicName);

    Assert.assertEquals(data.getColNames().length, 5);
}
 
Example #5
Source File: DevCluster.java    From kafka-webview with MIT License 5 votes vote down vote up
/**
 * Fire up a new thread running an enless consumer script that reads from the given topic.
 * @param topicNames Topics to consume from.
 * @param utils KafkaUtils instance.
 */
private static void runEndlessConsumer(final Collection<String> topicNames, final KafkaTestUtils utils) {
    final Thread consumerThread = new Thread(() -> {
        // Start a consumer
        final Properties properties = new Properties();
        properties.put("max.poll.records", 37);
        properties.put("group.id", "MyConsumerId");

        try (final KafkaConsumer<String, String> consumer
                 = utils.getKafkaConsumer(StringDeserializer.class, StringDeserializer.class, properties)) {

            consumer.subscribe(topicNames);
            do {
                final ConsumerRecords<String, String> records = consumer.poll(1000);
                consumer.commitSync();

                logger.info("Consumed {} records", records.count());

                if (records.isEmpty()) {
                    consumer.seekToBeginning(consumer.assignment());
                    consumer.commitSync();
                }
                Thread.sleep(1000);
            } while (true);

        } catch (final InterruptedException e) {
            return;
        }
    });

    logger.info("Starting endless consumer for topic {}", topicNames);
    consumerThread.setName("Endless consumer for topic " + topicNames);
    consumerThread.start();
}
 
Example #6
Source File: ConsumerTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
private KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #7
Source File: KafkaTestCase.java    From jstarcraft-core with Apache License 2.0 4 votes vote down vote up
@Test
public void test() throws Exception {
    KafkaTestUtils utility = new KafkaTestUtils(server);

    // Create a topic
    final String topicName = "KafkaTestCase";
    utility.createTopic(topicName, 1, (short) 1);
    final int partitionId = 0;
    // Define our message
    final String messageKey = "key";
    final String messageValue = "value";

    // Create producer
    try (final KafkaProducer<String, String> kafkaProducer = utility.getKafkaProducer(StringSerializer.class, StringSerializer.class)) {
        // Define the record we want to produce
        final ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topicName, partitionId, messageKey, messageValue);
        // Produce it & wait for it to complete.
        final Future<RecordMetadata> future = kafkaProducer.send(producerRecord);
        kafkaProducer.flush();
        while (!future.isDone()) {
            Thread.sleep(500L);
        }
    }

    // Create consumer
    try (final KafkaConsumer<String, String> kafkaConsumer = utility.getKafkaConsumer(StringDeserializer.class, StringDeserializer.class)) {
        final List<TopicPartition> topicPartitionList = new ArrayList<>();
        for (final PartitionInfo partitionInfo : kafkaConsumer.partitionsFor(topicName)) {
            topicPartitionList.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));
        }
        kafkaConsumer.assign(topicPartitionList);
        kafkaConsumer.seekToBeginning(topicPartitionList);
        // Pull records from kafka, keep polling until we get nothing back
        ConsumerRecords<String, String> consumerRecords;
        do {
            consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(2));
            for (ConsumerRecord<String, String> record : consumerRecords) {
                // Validate
                Assert.assertEquals(messageKey, record.key());
                Assert.assertEquals(messageValue, record.value());
            }
        } while (!consumerRecords.isEmpty());
    }
}
 
Example #8
Source File: KafkaConsumerSpoutTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
private KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #9
Source File: SidelineSpoutTest.java    From storm-dynamic-spout with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
private KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #10
Source File: DevCluster.java    From kafka-webview with MIT License 4 votes vote down vote up
/**
 * Main entry point
 * @param args command line args.
 */
public static void main(final String[] args) throws Exception {
    // Parse command line arguments
    final CommandLine cmd = parseArguments(args);

    // Right now we accept one parameter, the number of nodes in the cluster.
    final int clusterSize = Integer.parseInt(cmd.getOptionValue("size"));
    logger.info("Starting up kafka cluster with {} brokers", clusterSize);

    // Default to plaintext listener.
    BrokerListener listener = new PlainListener();

    final URL trustStore = DevCluster.class.getClassLoader().getResource("kafka.truststore.jks");
    final URL keyStore = DevCluster.class.getClassLoader().getResource("kafka.keystore.jks");

    final Properties properties = new Properties();
    if (cmd.hasOption("sasl") && cmd.hasOption("ssl")) {
        listener = new SaslSslListener()
            // SSL Options
            .withClientAuthRequired()
            .withTrustStoreLocation(trustStore.getFile())
            .withTrustStorePassword("password")
            .withKeyStoreLocation(keyStore.getFile())
            .withKeyStorePassword("password")
            .withKeyPassword("password")
            // SASL Options.
            .withUsername("kafkaclient")
            .withPassword("client-secret");
    } else if (cmd.hasOption("sasl")) {
        listener = new SaslPlainListener()
            .withUsername("kafkaclient")
            .withPassword("client-secret");
    } else if (cmd.hasOption("ssl")) {
        listener = new SslListener()
            .withClientAuthRequired()
            .withTrustStoreLocation(trustStore.getFile())
            .withTrustStorePassword("password")
            .withKeyStoreLocation(keyStore.getFile())
            .withKeyStorePassword("password")
            .withKeyPassword("password");
    }

    // Create a test cluster
    final KafkaTestCluster kafkaTestCluster = new KafkaTestCluster(
        clusterSize,
        properties,
        Collections.singletonList(listener)
    );

    // Start the cluster.
    kafkaTestCluster.start();

    // Create topics
    String[] topicNames = null;
    if (cmd.hasOption("topic")) {
        topicNames = cmd.getOptionValues("topic");

        for (final String topicName : topicNames) {
            final KafkaTestUtils utils = new KafkaTestUtils(kafkaTestCluster);
            utils.createTopic(topicName, clusterSize, (short) clusterSize);

            // Publish some data into that topic
            for (int partition = 0; partition < clusterSize; partition++) {
                utils.produceRecords(1000, topicName, partition);
            }
        }
    }

    // Log topic names created.
    if (topicNames != null) {
        logger.info("Created topics: {}", String.join(", ", topicNames));
    }

    // Log how to connect to cluster brokers.
    kafkaTestCluster
        .getKafkaBrokers()
        .stream()
        .forEach((broker) -> {
            logger.info("Started broker with Id {} at {}", broker.getBrokerId(), broker.getConnectString());
        });

    if (topicNames != null && topicNames.length > 0) {
        final KafkaTestUtils testUtils = new KafkaTestUtils(kafkaTestCluster);
        if (cmd.hasOption("consumer")) {
            runEndlessConsumer(Arrays.asList(topicNames), testUtils);
        }

        if (cmd.hasOption("producer")) {
            runEndlessProducer(Arrays.asList(topicNames), testUtils);
        }
    }

    // Log cluster connect string.
    logger.info("Cluster started at: {}", kafkaTestCluster.getKafkaConnectString());

    // Wait forever.
    Thread.currentThread().join();
}
 
Example #11
Source File: KafkaSaslClusterTests.java    From kafka-webview with MIT License 4 votes vote down vote up
@Override
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #12
Source File: KafkaSslClusterTests.java    From kafka-webview with MIT License 4 votes vote down vote up
@Override
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #13
Source File: KafkaSaslSslClusterTests.java    From kafka-webview with MIT License 4 votes vote down vote up
@Override
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #14
Source File: SharedKafkaTestResourceWithSaslSslTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #15
Source File: SharedKafkaTestResourceTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #16
Source File: SharedKafkaTestResourceWithSslTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #17
Source File: SharedKafkaTestResourceWithSaslPlainTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #18
Source File: KafkaTestUtilsTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
private KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #19
Source File: KafkaTestUtilsTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple example of how to produce records into a topic.
 */
@Test
public void testProducerAndConsumer() {
    final int numberOfRecords = 12;

    // Create our utility class
    final KafkaTestUtils kafkaTestUtils = getKafkaTestUtils();

    // Get a producer
    try (final KafkaProducer<String, String> producer
        = kafkaTestUtils.getKafkaProducer(StringSerializer.class, StringSerializer.class)) {
        
        // Produce 12 records
        for (int recordCount = 0; recordCount < numberOfRecords; recordCount++) {
            // Create a record.
            final ProducerRecord<String, String> record = new ProducerRecord<>(
                topicName,
                "My Key " + recordCount,
                "My Value " + recordCount
            );
            producer.send(record);
        }
        // Ensure messages are flushed.
        producer.flush();
    }

    // Consume records back out
    final List<ConsumerRecord<String, String>> consumerRecords
        = kafkaTestUtils.consumeAllRecordsFromTopic(topicName, StringDeserializer.class, StringDeserializer.class);

    assertNotNull("Should have non-null result.", consumerRecords);
    assertEquals("Should have 10 records.", numberOfRecords, consumerRecords.size());

    // Log the records we found.
    for (final ConsumerRecord<String, String> consumerRecord : consumerRecords) {
        logger.info(
            "Found Key: {} on Partition: {} with Value: {}",
            consumerRecord.key(),
            consumerRecord.partition(),
            consumerRecord.value()
        );
    }
}
 
Example #20
Source File: KStreamBuilderSmokeTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Integration test validates that streams can be used against KafkaTestServer.
 */
@Test
void testStreamConsumer() throws Exception {
    // Define topic to test with.
    final String inputTopic = "stream-input-topic" + System.currentTimeMillis();
    final String outputTopic = "stream-output-topic" + System.currentTimeMillis();

    // Define how many records
    final int numberOfRecords = 25;
    final int partitionId = 0;

    // Tracks how many records the Stream consumer has processed.
    final AtomicInteger recordCounter = new AtomicInteger(0);

    // Create our test server instance.
    try (final KafkaTestServer kafkaTestServer = new KafkaTestServer()) {
        // Start it and create our topic.
        kafkaTestServer.start();

        // Create test utils instance.
        final KafkaTestUtils kafkaTestUtils = new KafkaTestUtils(kafkaTestServer);

        // Create topics
        kafkaTestUtils.createTopic(inputTopic, 1, (short) 1);
        kafkaTestUtils.createTopic(outputTopic, 1, (short) 1);

        // Produce random data into input topic
        kafkaTestUtils.produceRecords(numberOfRecords, inputTopic, partitionId);

        // Define stream consumer properties.
        final Properties config = new Properties();
        config.put(StreamsConfig.APPLICATION_ID_CONFIG, "testStreamProcessor");
        config.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaTestServer.getKafkaConnectString());
        config.put("group.id", "test-stream-group");
        config.put("auto.offset.reset", "earliest");

        // Serialization definition.
        final Serde<String> stringSerde = Serdes.String();

        // Build the stream
        final KStreamBuilder kStreamBuilder = new KStreamBuilder();
        kStreamBuilder
            // Read from input topic.
            .stream(stringSerde, stringSerde, inputTopic)

            // For each record processed, increment our counter
            .map((key, word) -> {
                recordCounter.incrementAndGet();
                return new KeyValue<>(word, word);
            })

            // Write to output topic.
            .to(stringSerde, stringSerde, outputTopic);

        // Create stream
        final KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder, new StreamsConfig(config));
        try {
            // Start the stream consumer
            kafkaStreams.start();

            // Since stream processing is async, we need to wait for the Stream processor to start, consume messages
            // from the input topic, and process them. We'll wait for Wait for it to do its thing up to 10 seconds.
            for (int timeoutCounter = 0; timeoutCounter <= 10; timeoutCounter++) {
                // If we've processed all of our records
                if (recordCounter.get() >= numberOfRecords) {
                    // Break out of sleep loop.
                    break;
                }
                // Otherwise, we need to wait longer, sleep 1 second.
                Thread.sleep(1000L);
            }
        } finally {
            // Close the stream consumer.
            kafkaStreams.close();
        }

        // Validation.
        Assertions.assertEquals(numberOfRecords, recordCounter.get(), "Should have 25 records processed");

        // Consume records from output topic.
        final List<ConsumerRecord<String, String>> outputRecords =
            kafkaTestUtils.consumeAllRecordsFromTopic(outputTopic, StringDeserializer.class, StringDeserializer.class);

        // Validate we got the correct number of records.
        Assertions.assertEquals(numberOfRecords, outputRecords.size());
    }
}
 
Example #21
Source File: StreamsBuilderSmokeTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Integration test validates that streams can be used against KafkaTestServer.
 */
@Test
void testStreamConsumer() throws Exception {
    // Define topic to test with.
    final String inputTopic = "stream-input-topic" + System.currentTimeMillis();
    final String outputTopic = "stream-output-topic" + System.currentTimeMillis();

    // Define how many records
    final int numberOfRecords = 25;
    final int partitionId = 0;

    // Tracks how many records the Stream consumer has processed.
    final AtomicInteger recordCounter = new AtomicInteger(0);

    // Create our test server instance.
    try (final KafkaTestServer kafkaTestServer = new KafkaTestServer()) {
        // Start it and create our topic.
        kafkaTestServer.start();

        // Create test utils instance.
        final KafkaTestUtils kafkaTestUtils = new KafkaTestUtils(kafkaTestServer);

        // Create topics
        kafkaTestUtils.createTopic(inputTopic, 1, (short) 1);
        kafkaTestUtils.createTopic(outputTopic, 1, (short) 1);

        // Produce random data into input topic
        kafkaTestUtils.produceRecords(numberOfRecords, inputTopic, partitionId);

        // Define stream consumer properties.
        final Properties config = new Properties();
        config.put(StreamsConfig.APPLICATION_ID_CONFIG, "testStreamProcessor");
        config.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaTestServer.getKafkaConnectString());
        config.put("group.id", "test-stream-group");
        config.put("auto.offset.reset", "earliest");

        // Build the stream
        final StreamsBuilder streamsBuilder = new StreamsBuilder();
        streamsBuilder
            // Read from input topic.
            .stream(inputTopic)

            // For each record processed, increment our counter
            .map((key, word) -> {
                recordCounter.incrementAndGet();
                return new KeyValue<>(word, word);
            })

            // Write to output topic.
            .to(outputTopic);

        // Create stream
        final KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(), new StreamsConfig(config));
        try {
            // Start the stream consumer
            kafkaStreams.start();

            // Since stream processing is async, we need to wait for the Stream processor to start, consume messages
            // from the input topic, and process them. We'll wait for Wait for it to do its thing up to 10 seconds.
            for (int timeoutCounter = 0; timeoutCounter <= 10; timeoutCounter++) {
                // If we've processed all of our records
                if (recordCounter.get() >= numberOfRecords) {
                    // Break out of sleep loop.
                    break;
                }
                // Otherwise, we need to wait longer, sleep 1 second.
                Thread.sleep(1000L);
            }
        } finally {
            // Close the stream consumer.
            kafkaStreams.close();
        }

        // Validation.
        Assertions.assertEquals(numberOfRecords, recordCounter.get(), "Should have 25 records processed");

        // Consume records from output topic.
        final List<ConsumerRecord<String, String>> outputRecords =
            kafkaTestUtils.consumeAllRecordsFromTopic(outputTopic, StringDeserializer.class, StringDeserializer.class);

        // Validate we got the correct number of records.
        Assertions.assertEquals(numberOfRecords, outputRecords.size());
    }
}
 
Example #22
Source File: SharedKafkaTestResourceWithSaslSslTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #23
Source File: SharedKafkaTestResourceTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #24
Source File: SharedKafkaTestResourceWithSslTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #25
Source File: SharedKafkaTestResourceWithSaslPlainTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
protected KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #26
Source File: KafkaTestUtilsTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple accessor.
 */
private KafkaTestUtils getKafkaTestUtils() {
    return sharedKafkaTestResource.getKafkaTestUtils();
}
 
Example #27
Source File: KafkaTestUtilsTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Simple example of how to produce records into a topic.
 */
@Test
void testProducerAndConsumer() {
    final int numberOfRecords = 12;

    // Create our utility class
    final KafkaTestUtils kafkaTestUtils = getKafkaTestUtils();

    // Get a producer
    try (final KafkaProducer<String, String> producer
        = kafkaTestUtils.getKafkaProducer(StringSerializer.class, StringSerializer.class)) {

        // Produce 12 records
        for (int recordCount = 0; recordCount < numberOfRecords; recordCount++) {
            // Create a record.
            final ProducerRecord<String, String> record = new ProducerRecord<>(
                topicName,
                "My Key " + recordCount,
                "My Value " + recordCount
            );
            producer.send(record);
        }
        // Ensure messages are flushed.
        producer.flush();
    }

    // Consume records back out
    final List<ConsumerRecord<String, String>> consumerRecords
        = kafkaTestUtils.consumeAllRecordsFromTopic(topicName, StringDeserializer.class, StringDeserializer.class);

    Assertions.assertNotNull(consumerRecords, "Should have non-null result.");
    Assertions.assertEquals(numberOfRecords, consumerRecords.size(), "Should have 10 records.");

    // Log the records we found.
    for (final ConsumerRecord<String, String> consumerRecord : consumerRecords) {
        logger.info(
            "Found Key: {} on Partition: {} with Value: {}",
            consumerRecord.key(),
            consumerRecord.partition(),
            consumerRecord.value()
        );
    }
}
 
Example #28
Source File: AbstractSharedKafkaTestResourceTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 2 votes vote down vote up
/**
 * Simple accessor.
 */
protected abstract KafkaTestUtils getKafkaTestUtils();
 
Example #29
Source File: AbstractSharedKafkaTestResourceTest.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 2 votes vote down vote up
/**
 * Simple accessor.
 */
protected abstract KafkaTestUtils getKafkaTestUtils();
 
Example #30
Source File: AbstractKafkaClusterTests.java    From kafka-webview with MIT License 2 votes vote down vote up
/**
 * Properly configured KafkaTestUtils instance.
 */
abstract protected KafkaTestUtils getKafkaTestUtils();