Java Code Examples for org.springframework.kafka.test.utils.KafkaTestUtils#consumerProps()
The following examples show how to use
org.springframework.kafka.test.utils.KafkaTestUtils#consumerProps() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DeserializtionErrorHandlerByBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 11 votes |
@BeforeClass public static void setUp() throws Exception { System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); System.setProperty("server.port", "0"); System.setProperty("spring.jmx.enabled", "false"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("kafka-streams-dlq-tests", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id"); }
Example 2
Source File: DeserializtionErrorHandlerByBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 7 votes |
@Test @Ignore public void test() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("foos"); template.sendDefault(1, 7, "hello"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); Consumer<String, String> consumer1 = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.foos.foobar-group"); ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "error.foos.foobar-group"); assertThat(cr.value()).isEqualTo("hello"); assertThat(cr.partition()).isEqualTo(0); // Ensuring that the deserialization was indeed done by the binder verify(conversionDelegate).deserializeOnInbound(any(Class.class), any(KStream.class)); }
Example 3
Source File: StreamToTableJoinFunctionTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test public void testStreamToTableBiFunction() { SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class); app.setWebApplicationType(WebApplicationType.NONE); Consumer<String, Long> consumer; Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class); DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1"); runTest(app, consumer); }
Example 4
Source File: TracingKafkaTest.java From java-kafka-client with Apache License 2.0 | 6 votes |
@Test public void nullKey() throws Exception { Producer<Integer, String> producer = createTracingProducer(); ProducerRecord<Integer, String> record = new ProducerRecord<>("messages", "test"); producer.send(record); final Map<String, Object> consumerProps = KafkaTestUtils .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put("auto.offset.reset", "earliest"); final CountDownLatch latch = new CountDownLatch(1); createConsumer(latch, null, false, null); producer.close(); }
Example 5
Source File: SpringKafkaSenderTest.java From spring-kafka with MIT License | 6 votes |
@Before public void setUp() throws Exception { // set up the Kafka consumer properties Map<String, Object> consumerProperties = KafkaTestUtils.consumerProps("sender_group", "false", AllSpringKafkaTests.embeddedKafka); // create a Kafka consumer factory DefaultKafkaConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<String, String>(consumerProperties); // set the topic that needs to be consumed ContainerProperties containerProperties = new ContainerProperties(AllSpringKafkaTests.SENDER_TOPIC); // create a Kafka MessageListenerContainer container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties); // create a thread safe queue to store the received message records = new LinkedBlockingQueue<>(); // setup a Kafka message listener container.setupMessageListener(new MessageListener<String, String>() { @Override public void onMessage(ConsumerRecord<String, String> record) { LOGGER.debug("test-listener received message='{}'", record.toString()); records.add(record); } }); // start the container and underlying message listener container.start(); // wait until the container has the required number of assigned partitions ContainerTestUtils.waitForAssignment(container, AllSpringKafkaTests.embeddedKafka.getPartitionsPerTopic()); }
Example 6
Source File: KafkaStreamsNativeEncodingDecodingTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); System.setProperty("server.port", "0"); System.setProperty("spring.jmx.enabled", "false"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "decode-counts", "decode-counts-1"); }
Example 7
Source File: KafkaStreamsDlqExampleTests.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "words-count-dlq"); System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); }
Example 8
Source File: KafkaStreamsBinderMultipleInputTopicsTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts"); }
Example 9
Source File: WordCountMultipleBranchesIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar"); }
Example 10
Source File: KafkastreamsBinderPojoInputStringOutputIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id"); }
Example 11
Source File: OutboundValueNullSkippedConversionTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts"); }
Example 12
Source File: KafkaStreamsWordCountApplicationTests.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts"); System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString()); }
Example 13
Source File: KafkaStreamsBinderWordCountBranchesFunctionTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar"); }
Example 14
Source File: KafkaStreamsInteractiveQueryIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); consumer = cf.createConsumer(); embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id"); }
Example 15
Source File: DeserializationErrorHandlerByKafkaTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Test public void test() { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("word1"); template.sendDefault("foobar"); template.setDefaultTopic("word2"); template.sendDefault("foobar"); Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobarx", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); Consumer<String, String> consumer1 = cf.createConsumer(); embeddedKafka.consumeFromEmbeddedTopics(consumer1, "error.word1.groupx", "error.word2.groupx"); ConsumerRecord<String, String> cr1 = KafkaTestUtils.getSingleRecord(consumer1, "error.word1.groupx"); assertThat(cr1.value()).isEqualTo("foobar"); ConsumerRecord<String, String> cr2 = KafkaTestUtils.getSingleRecord(consumer1, "error.word2.groupx"); assertThat(cr2.value()).isEqualTo("foobar"); // Ensuring that the deserialization was indeed done by Kafka natively verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class), any(KStream.class)); verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class)); }
Example 16
Source File: KafkaStreamsBinderHealthIndicatorTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 4 votes |
private void receive(ConfigurableApplicationContext context, List<ProducerRecord<Integer, String>> records, Status expected, String... topics) throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id0", "false", embeddedKafka); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps); Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>( senderProps); try (Consumer<String, String> consumer = cf.createConsumer()) { KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true); CountDownLatch latch = new CountDownLatch(records.size()); for (ProducerRecord<Integer, String> record : records) { ListenableFuture<SendResult<Integer, String>> future = template .send(record); future.addCallback( new ListenableFutureCallback<SendResult<Integer, String>>() { @Override public void onFailure(Throwable ex) { Assert.fail(); } @Override public void onSuccess(SendResult<Integer, String> result) { latch.countDown(); } }); } latch.await(5, TimeUnit.SECONDS); embeddedKafka.consumeFromEmbeddedTopics(consumer, topics); KafkaTestUtils.getRecords(consumer, 1000); TimeUnit.SECONDS.sleep(2); checkHealth(context, expected); } finally { pf.destroy(); } }
Example 17
Source File: KafkaBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 4 votes |
@SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testPolledConsumerWithDlq() throws Exception { KafkaTestBinder binder = getBinder(); PollableSource<MessageHandler> inboundBindTarget = new DefaultPollableMessageSource( this.messageConverter); ExtendedConsumerProperties<KafkaConsumerProperties> properties = createConsumerProperties(); properties.getExtension().setPollTimeout(1); properties.setMaxAttempts(2); properties.setBackOffInitialInterval(0); properties.getExtension().setEnableDlq(true); Map<String, Object> producerProps = KafkaTestUtils .producerProps(embeddedKafka.getEmbeddedKafka()); Binding<PollableSource<MessageHandler>> binding = binder.bindPollableConsumer( "pollableDlq", "group-pcWithDlq", inboundBindTarget, properties); KafkaTemplate template = new KafkaTemplate( new DefaultKafkaProducerFactory<>(producerProps)); template.send("pollableDlq", "testPollableDLQ"); try { int n = 0; while (n++ < 100) { inboundBindTarget.poll(m -> { throw new RuntimeException("test DLQ"); }); Thread.sleep(100); } } catch (MessageHandlingException e) { assertThat(e.getCause().getMessage()).isEqualTo("test DLQ"); } Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("dlq", "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); ConsumerFactory cf = new DefaultKafkaConsumerFactory<>(consumerProps); Consumer consumer = cf.createConsumer(); embeddedKafka.getEmbeddedKafka().consumeFromAnEmbeddedTopic(consumer, "error.pollableDlq.group-pcWithDlq"); ConsumerRecord deadLetter = KafkaTestUtils.getSingleRecord(consumer, "error.pollableDlq.group-pcWithDlq"); assertThat(deadLetter).isNotNull(); assertThat(deadLetter.value()).isEqualTo("testPollableDLQ"); binding.unbind(); consumer.close(); }
Example 18
Source File: SpringKafkaSenderTest.java From spring-kafka with MIT License | 4 votes |
@Before public void setUp() throws Exception { // set up the Kafka consumer properties Map<String, Object> consumerProperties = KafkaTestUtils.consumerProps("sender", "false", embeddedKafka.getEmbeddedKafka()); // create a Kafka consumer factory DefaultKafkaConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<String, String>( consumerProperties); // set the topic that needs to be consumed ContainerProperties containerProperties = new ContainerProperties(SENDER_TOPIC); // create a Kafka MessageListenerContainer container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties); // create a thread safe queue to store the received message records = new LinkedBlockingQueue<>(); // setup a Kafka message listener container .setupMessageListener(new MessageListener<String, String>() { @Override public void onMessage( ConsumerRecord<String, String> record) { LOGGER.debug("test-listener received message='{}'", record.toString()); records.add(record); } }); // start the container and underlying message listener container.start(); // wait until the container has the required number of assigned partitions ContainerTestUtils.waitForAssignment(container, embeddedKafka.getEmbeddedKafka().getPartitionsPerTopic()); }
Example 19
Source File: SpringKafkaITest.java From java-specialagent with Apache License 2.0 | 4 votes |
@Bean public ConsumerFactory<String,String> consumerFactory() { final Map<String,Object> consumerProps = KafkaTestUtils.consumerProps("sampleRawConsumer", "false", kafkaEmbedded); consumerProps.put("auto.offset.reset", "earliest"); return new DefaultKafkaConsumerFactory<>(consumerProps); }
Example 20
Source File: KafkaBoardClientEmbeddedKafkaTests.java From event-store-demo with GNU General Public License v3.0 | 3 votes |
@BeforeClass public static void setUp() throws Exception { Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("command-board-events-group", "false", kafkaEmbedded); consumerProps.put( ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest" ); DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>( consumerProps ); consumer = cf.createConsumer(); kafkaEmbedded.consumeFromAnEmbeddedTopic( consumer, RECEIVER_TOPIC ); }