org.springframework.kafka.test.utils.KafkaTestUtils Java Examples
The following examples show how to use
org.springframework.kafka.test.utils.KafkaTestUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DeserializtionErrorHandlerByBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 11 votes |
@BeforeClass public static void setUp() throws Exception { System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); System.setProperty("server.port", "0"); System.setProperty("spring.jmx.enabled", "false"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("kafka-streams-dlq-tests", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id"); }
Example #2
Source File: KafkaStreamsBinderMultipleInputTopicsTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private void receiveAndValidate() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words1"); template.sendDefault("foobar1"); template.setDefaultTopic("words2"); template.sendDefault("foobar2"); // Sleep a bit so that both the messages are processed before reading from the // output topic. // Else assertions might fail arbitrarily. Thread.sleep(5000); ConsumerRecords<String, String> received = KafkaTestUtils.getRecords(consumer); List<String> wordCounts = new ArrayList<>(2); received.records("counts") .forEach((consumerRecord) -> wordCounts.add((consumerRecord.value()))); System.out.println(wordCounts); assertThat(wordCounts.contains("{\"word\":\"foobar1\",\"count\":1}")).isTrue(); assertThat(wordCounts.contains("{\"word\":\"foobar2\",\"count\":1}")).isTrue(); }
Example #3
Source File: KafkaStreamsBinderWordCountIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private void sendTombStoneRecordsAndVerifyGracefulHandling() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); try { KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words-1"); template.sendDefault(null); ConsumerRecords<String, String> received = consumer .poll(Duration.ofMillis(5000)); // By asserting that the received record is empty, we are ensuring that the // tombstone record // was handled by the binder gracefully. assertThat(received.isEmpty()).isTrue(); } finally { pf.destroy(); } }
Example #4
Source File: KafkaBoardClientEmbeddedKafkaTests.java From event-store-demo with GNU General Public License v3.0 | 6 votes |
private void receiveAndValidateBoard( ConfigurableApplicationContext context ) throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps( embeddedKafka ); DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>( senderProps ); KafkaTemplate<String, String> template = new KafkaTemplate<>( pf, true ); template.setDefaultTopic( RECEIVER_TOPIC ); ObjectMapper mapper = context.getBean( ObjectMapper.class ); BoardClient boardClient = context.getBean( BoardClient.class ); UUID boardUuid = UUID.randomUUID(); BoardInitialized boardInitialized = createTestBoardInitializedEvent( boardUuid ); String event = mapper.writeValueAsString( boardInitialized ); template.sendDefault( event ); Thread.sleep( 1000 ); Board board = boardClient.find( boardUuid ); assertThat( board, is( notNullValue() ) ); assertThat( board.getBoardUuid(), is( equalTo( boardUuid ) ) ); assertThat( board.getName(), is( equalTo( "New Board" ) ) ); assertThat( board.getStories().isEmpty(), is( equalTo( true ) ) ); }
Example #5
Source File: KafkaStreamsBinderWordCountIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private void receiveAndValidate(String in, String out) { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); try { KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic(in); template.sendDefault("foobar"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, out); assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue(); } finally { pf.destroy(); } }
Example #6
Source File: KafkaBoardClientEmbeddedKafkaTests.java From event-store-demo with GNU General Public License v3.0 | 6 votes |
private void receiveAndValidateBoard( ConfigurableApplicationContext context ) throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(kafkaEmbedded); DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>( senderProps ); KafkaTemplate<String, String> template = new KafkaTemplate<>( pf, true ); template.setDefaultTopic( RECEIVER_TOPIC ); ObjectMapper mapper = context.getBean( ObjectMapper.class ); BoardClient boardClient = context.getBean( BoardClient.class ); UUID boardUuid = UUID.randomUUID(); BoardInitialized boardInitialized = createTestBoardInitializedEvent( boardUuid ); String event = mapper.writeValueAsString( boardInitialized ); template.sendDefault( event ); Thread.sleep( 1000 ); Board board = boardClient.find( boardUuid ); assertThat( board, is( notNullValue() ) ); assertThat( board.getBoardUuid(), is( equalTo( boardUuid ) ) ); assertThat( board.getName(), is( equalTo( "New Board" ) ) ); assertThat( board.getStories().isEmpty(), is( equalTo( true ) ) ); assertThat( board.changes(), hasSize( 0 ) ); }
Example #7
Source File: KafkaClientITest.java From java-specialagent with Apache License 2.0 | 6 votes |
private static void createConsumer(final EmbeddedKafkaRule embeddedKafkaRule, final CountDownLatch latch) { Executors.newSingleThreadExecutor().execute(new Runnable() { @Override public void run() { final Map<String,Object> consumerProps = KafkaTestUtils.consumerProps("sampleRawConsumer", "false", embeddedKafkaRule.getEmbeddedKafka()); consumerProps.put("auto.offset.reset", "earliest"); try (final Consumer<Long,String> consumer = new KafkaConsumer<>(consumerProps)) { consumer.subscribe(Collections.singletonList(TOPIC_NAME)); for (int i = 0; i < MESSAGE_COUNT;) { final int count = consumer.poll(100).count(); for (int j = 0; j < count; ++j, ++i) { consumer.commitSync(); } } } latch.countDown(); } }); }
Example #8
Source File: StreamToTableJoinFunctionTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test public void testStreamToTable() { SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class); app.setWebApplicationType(WebApplicationType.NONE); Consumer<String, Long> consumer; Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class); DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1"); runTest(app, consumer); }
Example #9
Source File: StreamToTableJoinFunctionTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test public void testStreamToTableBiFunction() { SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class); app.setWebApplicationType(WebApplicationType.NONE); Consumer<String, Long> consumer; Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class); DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1"); runTest(app, consumer); }
Example #10
Source File: DeserializationErrorHandlerByKafkaTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUp() { System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); System.setProperty("server.port", "0"); System.setProperty("spring.jmx.enabled", "false"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "DeserializationErrorHandlerByKafkaTests-out", "DeserializationErrorHandlerByKafkaTests-out"); }
Example #11
Source File: SpringKafkaReceiverTest.java From spring-kafka with MIT License | 6 votes |
@Before public void setUp() throws Exception { // set up the Kafka producer properties Map<String, Object> senderProperties = KafkaTestUtils.senderProps(AllSpringKafkaTests.embeddedKafka.getBrokersAsString()); // create a Kafka producer factory ProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<String, String>(senderProperties); // create a Kafka template template = new KafkaTemplate<>(producerFactory); // set the default topic to send to template.setDefaultTopic(AllSpringKafkaTests.RECEIVER_TOPIC); // wait until the partitions are assigned for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry .getListenerContainers()) { ContainerTestUtils.waitForAssignment(messageListenerContainer, AllSpringKafkaTests.embeddedKafka.getPartitionsPerTopic()); } }
Example #12
Source File: KafkaStreamsBinderWordCountBranchesFunctionTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words"); template.sendDefault("english"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts"); assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue(); template.sendDefault("french"); template.sendDefault("french"); cr = KafkaTestUtils.getSingleRecord(consumer, "foo"); assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue(); template.sendDefault("spanish"); template.sendDefault("spanish"); template.sendDefault("spanish"); cr = KafkaTestUtils.getSingleRecord(consumer, "bar"); assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue(); }
Example #13
Source File: KafkaPublisherTest.java From extension-kafka with Apache License 2.0 | 6 votes |
@Test public void testPublishMessagesWithAckModeUnitOfWorkShouldBePublishedAndReadSuccessfully() { String testTopic = "testPublishMessagesWithAckModeUnitOfWorkShouldBePublishedAndReadSuccessfully"; testProducerFactory = ackProducerFactory(kafkaBroker, ByteArraySerializer.class); testConsumer = buildConsumer(testTopic); testSubject = buildPublisher(testTopic); GenericDomainEventMessage<String> testMessage = domainMessage("1234"); UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage); eventBus.publish(testMessage); uow.commit(); assertThat(singletonList(testMessage)).isEqualTo(monitor.getReceived()); assertThat(monitor.successCount()).isOne(); assertThat(monitor.failureCount()).isZero(); assertThat(monitor.ignoreCount()).isZero(); assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isOne(); }
Example #14
Source File: KafkaStreamsNativeEncodingDecodingTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test public void test() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("decode-words"); template.sendDefault("foobar"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); System.out.println("Starting: "); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "decode-counts"); stopWatch.stop(); System.out.println("Total time: " + stopWatch.getTotalTimeSeconds()); assertThat(cr.value().equals("Count for foobar : 1")).isTrue(); verify(conversionDelegate).serializeOnOutbound(any(KStream.class)); verify(conversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class)); }
Example #15
Source File: KafkaStreamsNativeEncodingDecodingTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test public void test() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("decode-words-1"); template.sendDefault("foobar"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "decode-counts-1"); assertThat(cr.value().equals("Count for foobar : 1")).isTrue(); verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class)); verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class)); }
Example #16
Source File: TracingKafkaTest.java From java-kafka-client with Apache License 2.0 | 6 votes |
@Test public void with_interceptors() throws Exception { Map<String, Object> senderProps = KafkaTestUtils .producerProps(embeddedKafka.getEmbeddedKafka()); senderProps .put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, TracingProducerInterceptor.class.getName()); KafkaProducer<Integer, String> producer = new KafkaProducer<>(senderProps); producer.send(new ProducerRecord<>("messages", 1, "test")); final CountDownLatch latch = new CountDownLatch(1); createConsumer(latch, 1, true, null); producer.close(); List<MockSpan> mockSpans = mockTracer.finishedSpans(); assertEquals(2, mockSpans.size()); checkSpans(mockSpans); assertNull(mockTracer.activeSpan()); }
Example #17
Source File: TracingKafkaTest.java From java-kafka-client with Apache License 2.0 | 6 votes |
@Test public void nullKey() throws Exception { Producer<Integer, String> producer = createTracingProducer(); ProducerRecord<Integer, String> record = new ProducerRecord<>("messages", "test"); producer.send(record); final Map<String, Object> consumerProps = KafkaTestUtils .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put("auto.offset.reset", "earliest"); final CountDownLatch latch = new CountDownLatch(1); createConsumer(latch, null, false, null); producer.close(); }
Example #18
Source File: KafkaNativeSerializationApplicationTests.java From spring-cloud-stream-samples with Apache License 2.0 | 6 votes |
@Test public void testSendReceive() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka()); senderProps.put("value.serializer", StringSerializer.class); DefaultKafkaProducerFactory<byte[], String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<byte[], String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic(INPUT_TOPIC); template.sendDefault("foo"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.put("value.deserializer", MyJsonDeserializer.class); DefaultKafkaConsumerFactory<byte[], Person> cf = new DefaultKafkaConsumerFactory<>(consumerProps); Consumer<byte[], Person> consumer = cf.createConsumer(); consumer.subscribe(Collections.singleton(OUTPUT_TOPIC)); ConsumerRecords<byte[], Person> records = consumer.poll(Duration.ofSeconds(10)); consumer.commitSync(); assertThat(records.count()).isEqualTo(1); assertThat(new String(records.iterator().next().value().getName())).isEqualTo("foo"); }
Example #19
Source File: WordCountMultipleBranchesIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words"); template.sendDefault("english"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts"); assertThat(cr.value().contains("\"word\":\"english\",\"count\":1")).isTrue(); template.sendDefault("french"); template.sendDefault("french"); cr = KafkaTestUtils.getSingleRecord(consumer, "foo"); assertThat(cr.value().contains("\"word\":\"french\",\"count\":2")).isTrue(); template.sendDefault("spanish"); template.sendDefault("spanish"); template.sendDefault("spanish"); cr = KafkaTestUtils.getSingleRecord(consumer, "bar"); assertThat(cr.value().contains("\"word\":\"spanish\",\"count\":3")).isTrue(); }
Example #20
Source File: TracingKafkaTest.java From java-kafka-client with Apache License 2.0 | 6 votes |
private Consumer<Integer, String> createConsumerWithDecorators( Collection<SpanDecorator> spanDecorators) { Map<String, Object> consumerProps = KafkaTestUtils .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put("auto.offset.reset", "earliest"); KafkaConsumer<Integer, String> kafkaConsumer = new KafkaConsumer<>(consumerProps); TracingKafkaConsumerBuilder tracingKafkaConsumerBuilder = new TracingKafkaConsumerBuilder(kafkaConsumer, mockTracer); if (spanDecorators != null) { tracingKafkaConsumerBuilder = tracingKafkaConsumerBuilder.withDecorators(spanDecorators); } TracingKafkaConsumer tracingKafkaConsumer = tracingKafkaConsumerBuilder.build(); tracingKafkaConsumer.subscribe(Collections.singletonList("messages")); return tracingKafkaConsumer; }
Example #21
Source File: KafkaPublisherTest.java From extension-kafka with Apache License 2.0 | 6 votes |
@Test public void testPublishMessagesWithTransactionalModeUnitOfWorkShouldBePublishedAndReadSuccessfully() { assumeFalse( "Transactional producers not supported on Windows", System.getProperty("os.name").contains("Windows") ); String testTopic = "testPublishMessagesWithTransactionalModeUnitOfWorkShouldBePublishedAndReadSuccessfully"; testProducerFactory = transactionalProducerFactory(kafkaBroker, "foo", ByteArraySerializer.class); testConsumer = buildConsumer(testTopic); testSubject = buildPublisher(testTopic); GenericDomainEventMessage<String> testMessage = domainMessage("121"); UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(testMessage); eventBus.publish(testMessage); uow.commit(); assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isOne(); }
Example #22
Source File: KafkaPublisherTest.java From extension-kafka with Apache License 2.0 | 6 votes |
@Test public void testPublishMessagesWithTransactionalModeNoUnitOfWorkShouldBePublishedAndReadSuccessfully() { assumeFalse( "Transactional producers not supported on Windows", System.getProperty("os.name").contains("Windows") ); String testTopic = "testPublishMessagesWithTransactionalModeNoUnitOfWorkShouldBePublishedAndReadSuccessfully"; testProducerFactory = transactionalProducerFactory(kafkaBroker, "foo", ByteArraySerializer.class); testConsumer = buildConsumer(testTopic); testSubject = buildPublisher(testTopic); List<GenericDomainEventMessage<String>> testMessages = domainMessages("62457", 5); eventBus.publish(testMessages); assertThat(monitor.successCount()).isEqualTo(testMessages.size()); assertThat(monitor.failureCount()).isZero(); assertThat(monitor.ignoreCount()).isZero(); assertThat(KafkaTestUtils.getRecords(testConsumer).count()).isEqualTo(testMessages.size()); }
Example #23
Source File: KafkaStreamsBranchingSampleTests.java From spring-cloud-stream-samples with Apache License 2.0 | 6 votes |
@Test public void testKafkaStreamsWordCountProcessor() throws InterruptedException { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); try { KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words"); template.sendDefault("english"); template.sendDefault("french"); template.sendDefault("spanish"); Thread.sleep(2000); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "english-counts", 5000); assertThat(cr.value().contains("english")).isTrue(); cr = KafkaTestUtils.getSingleRecord(consumer, "french-counts", 5000); assertThat(cr.value().contains("french")).isTrue(); cr = KafkaTestUtils.getSingleRecord(consumer, "spanish-counts", 5000); assertThat(cr.value().contains("spanish")).isTrue(); } finally { pf.destroy(); } }
Example #24
Source File: SpringBootKafkaStreamsInventoryCountTests.java From spring-cloud-stream-samples with Apache License 2.0 | 6 votes |
@BeforeEach void setUp() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker.getBrokersAsString()); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); AbstractInventoryUpdateEventGenerator eventGenerator = new KafkaTemplateInventoryUpdateEventGenerator(props, INPUT_TOPIC); setEventGenerator(eventGenerator); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "true", broker); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class); consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "test"); consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000); consumerProps.put(JsonDeserializer.TRUSTED_PACKAGES, KafkaStreamsInventoryCountTests.class.getPackage().getName()); consumerProps.put(JsonDeserializer.KEY_DEFAULT_TYPE, ProductKey.class); consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, InventoryCountEvent.class); consumerProps.put(JsonDeserializer.USE_TYPE_INFO_HEADERS, "false"); cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(GROUP_NAME); consumer.subscribe(Collections.singleton(OUTPUT_TOPIC)); }
Example #25
Source File: DeserializationErrorHandlerByKafkaTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Test public void test() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("word1"); template.sendDefault("foobar"); template.setDefaultTopic("word2"); template.sendDefault("foobar"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); Consumer<String, String> consumer1 = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx", "error.word2.groupx"); ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.word1.groupx"); assertThat(cr1.value()).isEqualTo("foobar"); ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.word2.groupx"); assertThat(cr2.value()).isEqualTo("foobar"); // Ensuring that the deserialization was indeed done by Kafka natively verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class)); verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class)); }
Example #26
Source File: OutboundValueNullSkippedConversionTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Test public void testOutboundNullValueIsHandledGracefully() throws Exception { SpringApplication app = new SpringApplication( OutboundNullApplication.class); app.setWebApplicationType(WebApplicationType.NONE); try (ConfigurableApplicationContext context = app.run("--server.port=0", "--spring.jmx.enabled=false", "--spring.cloud.stream.bindings.input.destination=words", "--spring.cloud.stream.bindings.output.destination=counts", "--spring.cloud.stream.bindings.output.producer.useNativeEncoding=false", "--spring.cloud.stream.kafka.streams.default.consumer.application-id=testOutboundNullValueIsHandledGracefully", "--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=1000", "--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" + "=org.apache.kafka.common.serialization.Serdes$StringSerde", "--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" + "=org.apache.kafka.common.serialization.Serdes$StringSerde", "--spring.cloud.stream.kafka.streams.timeWindow.length=5000", "--spring.cloud.stream.kafka.streams.timeWindow.advanceBy=0", "--spring.cloud.stream.kafka.binder.brokers=" + embeddedKafka.getBrokersAsString())) { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); try { KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("words"); template.sendDefault("foobar"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts"); assertThat(cr.value() == null).isTrue(); } finally { pf.destroy(); } } }
Example #27
Source File: KafkaStreamsBinderMultipleInputTopicsTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts"); }
Example #28
Source File: DeserializationErrorHandlerByKafkaTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Test @Ignore public void test() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("abc-DeserializationErrorHandlerByKafkaTests-In"); template.sendDefault(1, null, "foobar"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); Consumer<String, String> consumer1 = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.abc-DeserializationErrorHandlerByKafkaTests-In.group"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.abc-DeserializationErrorHandlerByKafkaTests-In.group"); assertThat(cr.value()).isEqualTo("foobar"); assertThat(cr.partition()).isEqualTo(0); // custom partition function // Ensuring that the deserialization was indeed done by Kafka natively verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class)); verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class)); }
Example #29
Source File: KafkaStreamsDlqExampleTests.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "words-count-dlq"); System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); }
Example #30
Source File: KafkaStreamsNativeEncodingDecodingTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); System.setProperty("server.port", "0"); System.setProperty("spring.jmx.enabled", "false"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "decode-counts", "decode-counts-1"); }