Java Code Examples for org.springframework.kafka.test.utils.KafkaTestUtils

The following examples show how to use org.springframework.kafka.test.utils.KafkaTestUtils. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: java-specialagent   Source File: KafkaClientITest.java    License: Apache License 2.0 6 votes vote down vote up
private static void createConsumer(final EmbeddedKafkaRule embeddedKafkaRule, final CountDownLatch latch) {
  Executors.newSingleThreadExecutor().execute(new Runnable() {
    @Override
    public void run() {
      final Map<String,Object> consumerProps = KafkaTestUtils.consumerProps("sampleRawConsumer", "false", embeddedKafkaRule.getEmbeddedKafka());
      consumerProps.put("auto.offset.reset", "earliest");
      try (final Consumer<Long,String> consumer = new KafkaConsumer<>(consumerProps)) {
        consumer.subscribe(Collections.singletonList(TOPIC_NAME));
        for (int i = 0; i < MESSAGE_COUNT;) {
          final int count = consumer.poll(100).count();
          for (int j = 0; j < count; ++j, ++i) {
            consumer.commitSync();
          }
        }
      }

      latch.countDown();
    }
  });
}
 
Example 2
private void receiveAndValidateBoard( ConfigurableApplicationContext context ) throws Exception {

        Map<String, Object> senderProps = KafkaTestUtils.producerProps( embeddedKafka );
        DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>( senderProps );
        KafkaTemplate<String, String> template = new KafkaTemplate<>( pf, true );
        template.setDefaultTopic( RECEIVER_TOPIC );

        ObjectMapper mapper = context.getBean( ObjectMapper.class );
        BoardClient boardClient = context.getBean( BoardClient.class );

        UUID boardUuid = UUID.randomUUID();
        BoardInitialized boardInitialized = createTestBoardInitializedEvent( boardUuid );
        String event = mapper.writeValueAsString( boardInitialized );
        template.sendDefault( event );

        Thread.sleep( 1000 );

        Board board = boardClient.find( boardUuid );
        assertThat( board, is( notNullValue() ) );
        assertThat( board.getBoardUuid(), is( equalTo( boardUuid ) ) );
        assertThat( board.getName(), is( equalTo( "New Board" ) ) );
        assertThat( board.getStories().isEmpty(), is( equalTo( true ) ) );

    }
 
Example 3
private void receiveAndValidateBoard( ConfigurableApplicationContext context ) throws Exception {

        Map<String, Object> senderProps = KafkaTestUtils.producerProps(kafkaEmbedded);
        DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>( senderProps );
        KafkaTemplate<String, String> template = new KafkaTemplate<>( pf, true );
        template.setDefaultTopic( RECEIVER_TOPIC );

        ObjectMapper mapper = context.getBean( ObjectMapper.class );
        BoardClient boardClient = context.getBean( BoardClient.class );

        UUID boardUuid = UUID.randomUUID();
        BoardInitialized boardInitialized = createTestBoardInitializedEvent( boardUuid );
        String event = mapper.writeValueAsString( boardInitialized );
        template.sendDefault( event );

        Thread.sleep( 1000 );

        Board board = boardClient.find( boardUuid );
        assertThat( board, is( notNullValue() ) );
        assertThat( board.getBoardUuid(), is( equalTo( boardUuid ) ) );
        assertThat( board.getName(), is( equalTo( "New Board" ) ) );
        assertThat( board.getStories().isEmpty(), is( equalTo( true ) ) );
        assertThat( board.changes(), hasSize( 0 ) );

    }
 
Example 4
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPublishMessagesWithTransactionalModeNoUnitOfWorkShouldBePublishedAndReadSuccessfully() {
    assumeFalse(
            "Transactional producers not supported on Windows",
            System.getProperty("os.name").contains("Windows")
    );

    String testTopic = "testPublishMessagesWithTransactionalModeNoUnitOfWorkShouldBePublishedAndReadSuccessfully";
    testProducerFactory = transactionalProducerFactory(kafkaBroker, "foo", ByteArraySerializer.class);
    testConsumer = buildConsumer(testTopic);
    testSubject = buildPublisher(testTopic);
    List<GenericDomainEventMessage<String>> testMessages = domainMessages("62457", 5);

    eventBus.publish(testMessages);

    assertThat(monitor.successCount()).isEqualTo(testMessages.size());
    assertThat(monitor.failureCount()).isZero();
    assertThat(monitor.ignoreCount()).isZero();
    assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isEqualTo(testMessages.size());
}
 
Example 5
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPublishMessagesWithAckModeUnitOfWorkShouldBePublishedAndReadSuccessfully() {
    String testTopic = "testPublishMessagesWithAckModeUnitOfWorkShouldBePublishedAndReadSuccessfully";
    testProducerFactory = ackProducerFactory(kafkaBroker, ByteArraySerializer.class);
    testConsumer = buildConsumer(testTopic);
    testSubject = buildPublisher(testTopic);
    GenericDomainEventMessage<String> testMessage = domainMessage("1234");

    UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage);
    eventBus.publish(testMessage);
    uow.commit();

    assertThat(singletonList(testMessage)).isEqualTo(monitor.getReceived());
    assertThat(monitor.successCount()).isOne();
    assertThat(monitor.failureCount()).isZero();
    assertThat(monitor.ignoreCount()).isZero();
    assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isOne();
}
 
Example 6
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPublishMessagesWithTransactionalModeUnitOfWorkShouldBePublishedAndReadSuccessfully() {
    assumeFalse(
            "Transactional producers not supported on Windows",
            System.getProperty("os.name").contains("Windows")
    );

    String testTopic = "testPublishMessagesWithTransactionalModeUnitOfWorkShouldBePublishedAndReadSuccessfully";
    testProducerFactory = transactionalProducerFactory(kafkaBroker, "foo", ByteArraySerializer.class);
    testConsumer = buildConsumer(testTopic);
    testSubject = buildPublisher(testTopic);

    GenericDomainEventMessage<String> testMessage = domainMessage("121");

    UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage);
    eventBus.publish(testMessage);
    uow.commit();

    assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isOne();
}
 
Example 7
Source Project: java-kafka-client   Source File: TracingKafkaTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void with_interceptors() throws Exception {
  Map<String, Object> senderProps = KafkaTestUtils
      .producerProps(embeddedKafka.getEmbeddedKafka());
  senderProps
      .put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, TracingProducerInterceptor.class.getName());
  KafkaProducer<Integer, String> producer = new KafkaProducer<>(senderProps);

  producer.send(new ProducerRecord<>("messages", 1, "test"));

  final CountDownLatch latch = new CountDownLatch(1);
  createConsumer(latch, 1, true, null);

  producer.close();

  List<MockSpan> mockSpans = mockTracer.finishedSpans();
  assertEquals(2, mockSpans.size());
  checkSpans(mockSpans);
  assertNull(mockTracer.activeSpan());
}
 
Example 8
Source Project: java-kafka-client   Source File: TracingKafkaTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void nullKey() throws Exception {
  Producer<Integer, String> producer = createTracingProducer();

  ProducerRecord<Integer, String> record = new ProducerRecord<>("messages", "test");
  producer.send(record);

  final Map<String, Object> consumerProps = KafkaTestUtils
      .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka());
  consumerProps.put("auto.offset.reset", "earliest");

  final CountDownLatch latch = new CountDownLatch(1);
  createConsumer(latch, null, false, null);

  producer.close();
}
 
Example 9
Source Project: java-kafka-client   Source File: TracingKafkaTest.java    License: Apache License 2.0 6 votes vote down vote up
private Consumer<Integer, String> createConsumerWithDecorators(
    Collection<SpanDecorator> spanDecorators) {
  Map<String, Object> consumerProps = KafkaTestUtils
      .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka());
  consumerProps.put("auto.offset.reset", "earliest");
  KafkaConsumer<Integer, String> kafkaConsumer = new KafkaConsumer<>(consumerProps);
  TracingKafkaConsumerBuilder tracingKafkaConsumerBuilder =
      new TracingKafkaConsumerBuilder(kafkaConsumer, mockTracer);

  if (spanDecorators != null) {
    tracingKafkaConsumerBuilder = tracingKafkaConsumerBuilder.withDecorators(spanDecorators);
  }
  TracingKafkaConsumer tracingKafkaConsumer = tracingKafkaConsumerBuilder.build();
  tracingKafkaConsumer.subscribe(Collections.singletonList("messages"));

  return tracingKafkaConsumer;
}
 
Example 10
@BeforeEach
void setUp() {

    Map<String, Object> props = new HashMap<>();

    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker.getBrokersAsString());
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    AbstractInventoryUpdateEventGenerator eventGenerator = new
            KafkaTemplateInventoryUpdateEventGenerator(props, INPUT_TOPIC);
    setEventGenerator(eventGenerator);

    Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "true", broker);
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "test");
    consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
    consumerProps.put(JsonDeserializer.TRUSTED_PACKAGES, KafkaStreamsInventoryCountTests.class.getPackage().getName());
    consumerProps.put(JsonDeserializer.KEY_DEFAULT_TYPE, ProductKey.class);
    consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, InventoryCountEvent.class);
    consumerProps.put(JsonDeserializer.USE_TYPE_INFO_HEADERS, "false");
    cf = new DefaultKafkaConsumerFactory<>(consumerProps);

    consumer = cf.createConsumer(GROUP_NAME);
    consumer.subscribe(Collections.singleton(OUTPUT_TOPIC));
}
 
Example 11
@BeforeClass
public static void setUp() {
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
			embeddedKafka.getBrokersAsString());

	System.setProperty("server.port", "0");
	System.setProperty("spring.jmx.enabled", "false");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "DeserializationErrorHandlerByKafkaTests-out", "DeserializationErrorHandlerByKafkaTests-out");
}
 
Example 12
private void receiveAndValidate(String in, String out) {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	try {
		KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
		template.setDefaultTopic(in);
		template.sendDefault("foobar");
		ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
				out);
		assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
	}
	finally {
		pf.destroy();
	}
}
 
Example 13
private void sendTombStoneRecordsAndVerifyGracefulHandling() throws Exception {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	try {
		KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
		template.setDefaultTopic("words-1");
		template.sendDefault(null);
		ConsumerRecords<String, String> received = consumer
				.poll(Duration.ofMillis(5000));
		// By asserting that the received record is empty, we are ensuring that the
		// tombstone record
		// was handled by the binder gracefully.
		assertThat(received.isEmpty()).isTrue();
	}
	finally {
		pf.destroy();
	}
}
 
Example 14
private void receiveAndValidate()
		throws Exception {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("words1");
	template.sendDefault("foobar1");
	template.setDefaultTopic("words2");
	template.sendDefault("foobar2");
	// Sleep a bit so that both the messages are processed before reading from the
	// output topic.
	// Else assertions might fail arbitrarily.
	Thread.sleep(5000);
	ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer);
	List<String> wordCounts = new ArrayList<>(2);

	received.records("counts")
			.forEach((consumerRecord) -> wordCounts.add((consumerRecord.value())));
	System.out.println(wordCounts);
	assertThat(wordCounts.contains("{\"word\":\"foobar1\",\"count\":1}")).isTrue();
	assertThat(wordCounts.contains("{\"word\":\"foobar2\",\"count\":1}")).isTrue();
}
 
Example 15
@Test
public void testKafkaStreamsWordCountProcessor() throws InterruptedException {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
	try {
		KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
		template.setDefaultTopic("words");
		template.sendDefault("english");
		template.sendDefault("french");
		template.sendDefault("spanish");
		Thread.sleep(2000);
		ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "english-counts", 5000);
		assertThat(cr.value().contains("english")).isTrue();
		cr = KafkaTestUtils.getSingleRecord(consumer, "french-counts", 5000);
		assertThat(cr.value().contains("french")).isTrue();
		cr = KafkaTestUtils.getSingleRecord(consumer, "spanish-counts", 5000);
		assertThat(cr.value().contains("spanish")).isTrue();
	}
	finally {
		pf.destroy();
	}
}
 
Example 16
private void receiveAndValidate(ConfigurableApplicationContext context)
		throws Exception {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("words");
	template.sendDefault("english");
	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
			"counts");
	assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();

	template.sendDefault("french");
	template.sendDefault("french");
	cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
	assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();

	template.sendDefault("spanish");
	template.sendDefault("spanish");
	template.sendDefault("spanish");
	cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
	assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
}
 
Example 17
@Test
public void testSendReceive() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka());
	senderProps.put("value.serializer", StringSerializer.class);
	DefaultKafkaProducerFactory<byte[], String> pf = new DefaultKafkaProducerFactory<>(senderProps);
	KafkaTemplate<byte[], String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic(INPUT_TOPIC);
	template.sendDefault("foo");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "false", embeddedKafka.getEmbeddedKafka());
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put("value.deserializer", MyJsonDeserializer.class);
	DefaultKafkaConsumerFactory<byte[], Person> cf = new DefaultKafkaConsumerFactory<>(consumerProps);

	Consumer<byte[], Person> consumer = cf.createConsumer();
	consumer.subscribe(Collections.singleton(OUTPUT_TOPIC));
	ConsumerRecords<byte[], Person> records = consumer.poll(Duration.ofSeconds(10));
	consumer.commitSync();

	assertThat(records.count()).isEqualTo(1);
	assertThat(new String(records.iterator().next().value().getName())).isEqualTo("foo");
}
 
Example 18
@Test
public void test() throws Exception {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("decode-words-1");
	template.sendDefault("foobar");
	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
			"decode-counts-1");
	assertThat(cr.value().equals("Count for foobar : 1")).isTrue();

	verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
	verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example 19
@Test
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("decode-words");
	template.sendDefault("foobar");
	StopWatch stopWatch = new StopWatch();
	stopWatch.start();
	System.out.println("Starting: ");
	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer,
			"decode-counts");
	stopWatch.stop();
	System.out.println("Total time: " + stopWatch.getTotalTimeSeconds());
	assertThat(cr.value().equals("Count for foobar : 1")).isTrue();

	verify(conversionDelegate).serializeOnOutbound(any(KStream.class));
	verify(conversionDelegate).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example 20
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("words");
	template.sendDefault("english");
	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
	assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue();

	template.sendDefault("french");
	template.sendDefault("french");
	cr = KafkaTestUtils.getSingleRecord(consumer, "foo");
	assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue();

	template.sendDefault("spanish");
	template.sendDefault("spanish");
	template.sendDefault("spanish");
	cr = KafkaTestUtils.getSingleRecord(consumer, "bar");
	assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue();
}
 
Example 21
Source Project: spring-kafka   Source File: SpringKafkaReceiverTest.java    License: MIT License 6 votes vote down vote up
@Before
public void setUp() throws Exception {
  // set up the Kafka producer properties
  Map<String, Object> senderProperties =
      KafkaTestUtils.senderProps(AllSpringKafkaTests.embeddedKafka.getBrokersAsString());

  // create a Kafka producer factory
  ProducerFactory<String, String> producerFactory =
      new DefaultKafkaProducerFactory<String, String>(senderProperties);

  // create a Kafka template
  template = new KafkaTemplate<>(producerFactory);
  // set the default topic to send to
  template.setDefaultTopic(AllSpringKafkaTests.RECEIVER_TOPIC);

  // wait until the partitions are assigned
  for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
      .getListenerContainers()) {
    ContainerTestUtils.waitForAssignment(messageListenerContainer,
        AllSpringKafkaTests.embeddedKafka.getPartitionsPerTopic());
  }
}
 
Example 22
@Test
public void testStreamToTable() {
	SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example 23
@Test
public void testStreamToTableBiFunction() {
	SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example 24
Source Project: java-specialagent   Source File: KafkaTest.java    License: Apache License 2.0 5 votes vote down vote up
private static void createConsumer(final CountDownLatch latch, final Integer key, final MockTracer tracer) throws Exception {
  final Map<String,Object> consumerProps = KafkaTestUtils.consumerProps("sampleRawConsumer", "false", embeddedKafkaRule.getEmbeddedKafka());
  consumerProps.put("auto.offset.reset", "earliest");

  Executors.newSingleThreadExecutor().execute(new Runnable() {
    @Override
    public void run() {
      try (final KafkaConsumer<Integer,String> consumer = new KafkaConsumer<>(consumerProps)) {
        consumer.subscribe(Collections.singletonList("messages"));
        while (latch.getCount() > 0) {
          final ConsumerRecords<Integer,String> records = consumer.poll(100);
          for (final ConsumerRecord<Integer,String> record : records) {
            final SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer);
            assertNotNull(spanContext);
            assertEquals("test", record.value());
            if (key != null)
              assertEquals(key, record.key());

            consumer.commitSync();
            latch.countDown();
          }
        }
      }
    }
  });

  assertTrue(latch.await(30, TimeUnit.SECONDS));
}
 
Example 25
@BeforeClass
public static void setUp() {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
}
 
Example 26
@Test
void testMessagesOverKafka() {
	this.template.send(TEST_TOPIC_IN, "test".getBytes());

	Consumer<byte[], String> consumer = this.consumerFactory.createConsumer();

	embeddedKafkaBroker.consumeFromAnEmbeddedTopic(consumer, TEST_TOPIC_OUT);

	ConsumerRecords<byte[], String> replies = KafkaTestUtils.getRecords(consumer);
	assertThat(replies.count()).isEqualTo(1);

	Iterator<ConsumerRecord<byte[], String>> iterator = replies.iterator();
	assertThat(iterator.next().value()).isEqualTo("TEST");
}
 
Example 27
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testPublishMessageWithTransactionalModeUnitOfWorkRollbackShouldNeverBePublished() {
    String expectedException = "Some exception";

    String testTopic = "testPublishMessageWithTransactionalModeUnitOfWorkRollbackShouldNeverBePublished";
    testProducerFactory = transactionalProducerFactory(kafkaBroker, "foo", ByteArraySerializer.class);
    testConsumer = buildConsumer(testTopic);
    testSubject = buildPublisher(testTopic);

    GenericDomainEventMessage<String> testMessage = domainMessage("123456");
    UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage);
    uow.onPrepareCommit(u -> {
        throw new RuntimeException(expectedException);
    });

    eventBus.publish(testMessage);

    //noinspection CatchMayIgnoreException
    try {
        uow.commit();
        fail("Expected a RuntimeException to be thrown");
    } catch (Exception e) {
        assertThat(e.getMessage()).isEqualTo(expectedException);
    }

    assertTrue("Didn't expect any consumer records", KafkaTestUtils.getRecords(testConsumer, 100).isEmpty());
}
 
Example 28
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSendMessageWithKafkaTransactionRollback() {
    String expectedException = "Some exception";

    String testTopic = "testSendMessageWithKafkaTransactionRollback";
    testProducerFactory = producerFactoryWithFencedExceptionOnAbort();
    testSubject = buildPublisher(testTopic);

    GenericDomainEventMessage<String> testMessage = domainMessage("76123");
    UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage);
    uow.onPrepareCommit(u -> {
        throw new RuntimeException(expectedException);
    });

    eventBus.publish(testMessage);

    //noinspection CatchMayIgnoreException
    try {
        uow.commit();
        fail("Expected a RuntimeException to be thrown");
    } catch (Exception e) {
        assertThat(e.getMessage()).isEqualTo(expectedException);
    }

    testConsumer = buildConsumer(testTopic);
    assertTrue("Didn't expect any consumer records", KafkaTestUtils.getRecords(testConsumer, 100).isEmpty());
}
 
Example 29
Source Project: extension-kafka   Source File: KafkaPublisherTest.java    License: Apache License 2.0 5 votes vote down vote up
private void publishWithException(String topic, GenericDomainEventMessage<String> message) {
    try {
        UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(message);
        eventBus.publish(message);
        uow.commit();
    } finally {
        testConsumer = buildConsumer(topic);
        assertTrue("Didn't expect any consumer records", KafkaTestUtils.getRecords(testConsumer, 100).isEmpty());
    }
}
 
Example 30
@Test
public void testCreatedConsumerValidConfigCanCommunicateToKafka() {
    String testTopic = "testCreatedConsumer_ValidConfig_CanCommunicateToKafka";

    Producer<String, String> testProducer = producerFactory.createProducer();
    testProducer.send(new ProducerRecord<>(testTopic, 0, null, null, "foo"));
    testProducer.flush();

    ConsumerFactory<?, ?> testSubject = new DefaultConsumerFactory<>(minimal(kafkaBroker));
    testConsumer = testSubject.createConsumer(DEFAULT_GROUP_ID);
    testConsumer.subscribe(Collections.singleton(testTopic));

    assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isOne();
}