org.springframework.kafka.core.DefaultKafkaConsumerFactory Java Examples

The following examples show how to use org.springframework.kafka.core.DefaultKafkaConsumerFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DeserializtionErrorHandlerByBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 11 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
			embeddedKafka.getBrokersAsString());
	System.setProperty("server.port", "0");
	System.setProperty("spring.jmx.enabled", "false");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("kafka-streams-dlq-tests", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
 
Example #2
Source File: DeserializtionErrorHandlerByBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 7 votes vote down vote up
@Test
@Ignore
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("foos");
	template.sendDefault(1, 7, "hello");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	Consumer<String, String> consumer1 = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
			"error.foos.foobar-group");

	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
			"error.foos.foobar-group");
	assertThat(cr.value()).isEqualTo("hello");
	assertThat(cr.partition()).isEqualTo(0);

	// Ensuring that the deserialization was indeed done by the binder
	verify(conversionDelegate).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example #3
Source File: KafkaConfig.java    From Mastering-Distributed-Tracing with MIT License 7 votes vote down vote up
private ConsumerFactory<String, Message> consumerFactory() throws Exception {
    Map<String, Object> props = new HashMap<>();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId());
    props.put(ConsumerConfig.GROUP_ID_CONFIG, app.name);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1000);
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
    props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");

    return new TracingConsumerFactory<>( //
            new DefaultKafkaConsumerFactory<String, Message>( //
                    props, //
                    new StringDeserializer(), //
                    new JsonDeserializer<>(Message.class)));
}
 
Example #4
Source File: KafkaConfigurer.java    From bird-java with MIT License 6 votes vote down vote up
@Bean
@ConditionalOnProperty(value = EventbusConstant.Kafka.LISTENER_PACKAGES)
public KafkaMessageListenerContainer kafkaListenerContainer(EventDispatcher eventDispatcher) {

    KafkaEventArgListener listener = new KafkaEventArgListener(eventDispatcher);
    ContainerProperties containerProperties = new ContainerProperties(eventDispatcher.getAllTopics());
    containerProperties.setMessageListener(listener);
    containerProperties.setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL_IMMEDIATE);

    HashMap<String,Object> properties = new HashMap<>(8);
    properties.put("bootstrap.servers", kafkaProperties.getHost());

    KafkaListenerProperties listenerProperties = kafkaProperties.getListener();
    properties.put("group.id", listenerProperties.getGroupId());
    properties.put("auto.offset.reset", "earliest");
    properties.put("enable.auto.commit", false);
    properties.put("auto.commit.interval.ms", 1000);
    properties.put("session.timeout.ms", 15000);
    properties.put("key.deserializer", StringDeserializer.class);
    properties.put("value.deserializer", EventArgDeserializer.class);
    DefaultKafkaConsumerFactory<String,EventArg> consumerFactory = new DefaultKafkaConsumerFactory<>(properties);

    return new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
}
 
Example #5
Source File: DeserializationErrorHandlerByKafkaTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setUp() {
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
			embeddedKafka.getBrokersAsString());

	System.setProperty("server.port", "0");
	System.setProperty("spring.jmx.enabled", "false");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("fooc", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "DeserializationErrorHandlerByKafkaTests-out", "DeserializationErrorHandlerByKafkaTests-out");
}
 
Example #6
Source File: KafkaNativeSerializationApplicationTests.java    From spring-cloud-stream-samples with Apache License 2.0 6 votes vote down vote up
@Test
public void testSendReceive() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka());
	senderProps.put("value.serializer", StringSerializer.class);
	DefaultKafkaProducerFactory<byte[], String> pf = new DefaultKafkaProducerFactory<>(senderProps);
	KafkaTemplate<byte[], String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic(INPUT_TOPIC);
	template.sendDefault("foo");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "false", embeddedKafka.getEmbeddedKafka());
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put("value.deserializer", MyJsonDeserializer.class);
	DefaultKafkaConsumerFactory<byte[], Person> cf = new DefaultKafkaConsumerFactory<>(consumerProps);

	Consumer<byte[], Person> consumer = cf.createConsumer();
	consumer.subscribe(Collections.singleton(OUTPUT_TOPIC));
	ConsumerRecords<byte[], Person> records = consumer.poll(Duration.ofSeconds(10));
	consumer.commitSync();

	assertThat(records.count()).isEqualTo(1);
	assertThat(new String(records.iterator().next().value().getName())).isEqualTo("foo");
}
 
Example #7
Source File: KafkaBinderMetrics.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
private synchronized  ConsumerFactory<?, ?> createConsumerFactory() {
	if (this.defaultConsumerFactory == null) {
		Map<String, Object> props = new HashMap<>();
		props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
				ByteArrayDeserializer.class);
		props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
				ByteArrayDeserializer.class);
		Map<String, Object> mergedConfig = this.binderConfigurationProperties
				.mergedConsumerConfiguration();
		if (!ObjectUtils.isEmpty(mergedConfig)) {
			props.putAll(mergedConfig);
		}
		if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) {
			props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
					this.binderConfigurationProperties
							.getKafkaConnectionString());
		}
		this.defaultConsumerFactory = new DefaultKafkaConsumerFactory<>(
				props);
	}
	return this.defaultConsumerFactory;
}
 
Example #8
Source File: KafkaBinderAutoConfigurationPropertiesTest.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testKafkaHealthIndicatorProperties() {
	assertThat(this.kafkaBinderHealthIndicator).isNotNull();
	Field consumerFactoryField = ReflectionUtils.findField(
			KafkaBinderHealthIndicator.class, "consumerFactory",
			ConsumerFactory.class);
	ReflectionUtils.makeAccessible(consumerFactoryField);
	DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) ReflectionUtils
			.getField(consumerFactoryField, this.kafkaBinderHealthIndicator);
	Field configField = ReflectionUtils.findField(DefaultKafkaConsumerFactory.class,
			"configs", Map.class);
	ReflectionUtils.makeAccessible(configField);
	Map<String, Object> configs = (Map<String, Object>) ReflectionUtils
			.getField(configField, consumerFactory);
	assertThat(configs.containsKey("bootstrap.servers")).isTrue();
	List<String> bootstrapServers = new ArrayList<>();
	bootstrapServers.add("10.98.09.199:9092");
	bootstrapServers.add("10.98.09.196:9092");
	assertThat(((List<String>) configs.get("bootstrap.servers"))
			.containsAll(bootstrapServers)).isTrue();
}
 
Example #9
Source File: SpringKafkaSenderTest.java    From spring-kafka with MIT License 6 votes vote down vote up
@Before
public void setUp() throws Exception {
  // set up the Kafka consumer properties
  Map<String, Object> consumerProperties =
      KafkaTestUtils.consumerProps("sender_group", "false", AllSpringKafkaTests.embeddedKafka);

  // create a Kafka consumer factory
  DefaultKafkaConsumerFactory<String, String> consumerFactory =
      new DefaultKafkaConsumerFactory<String, String>(consumerProperties);

  // set the topic that needs to be consumed
  ContainerProperties containerProperties =
      new ContainerProperties(AllSpringKafkaTests.SENDER_TOPIC);

  // create a Kafka MessageListenerContainer
  container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);

  // create a thread safe queue to store the received message
  records = new LinkedBlockingQueue<>();

  // setup a Kafka message listener
  container.setupMessageListener(new MessageListener<String, String>() {
    @Override
    public void onMessage(ConsumerRecord<String, String> record) {
      LOGGER.debug("test-listener received message='{}'", record.toString());
      records.add(record);
    }
  });

  // start the container and underlying message listener
  container.start();
  // wait until the container has the required number of assigned partitions
  ContainerTestUtils.waitForAssignment(container,
      AllSpringKafkaTests.embeddedKafka.getPartitionsPerTopic());
}
 
Example #10
Source File: UmcReceiveAutoConfiguration.java    From super-cloudops with Apache License 2.0 6 votes vote down vote up
@Bean(BEAN_KAFKA_BATCH_FACTORY)
@EnableKafkaCollectReceiver
@SuppressWarnings({ "unchecked", "rawtypes" })
public KafkaListenerContainerFactory<?> batchFactory(ReceiverProperties conf) {
	// Create consumer factory.
	Properties properties = conf.getKafka().getProperties();
	Map<String, Object> config = (Map) properties;
	ConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(config);

	// Create concurrent consumer container factory.
	ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
	factory.setConsumerFactory(cf);
	factory.setConcurrency(conf.getKafka().getConcurrency());
	factory.setBatchListener(true);

	// Spring kafka container properties.
	ContainerProperties containerProps = factory.getContainerProperties();
	containerProps.setPollTimeout(conf.getKafka().getPollTimeout());
	// Bulk consumption change buffer queue size.
	containerProps.setQueueDepth(conf.getKafka().getQueueDepth());
	containerProps.setAckMode(AckMode.MANUAL_IMMEDIATE);
	return factory;
}
 
Example #11
Source File: StreamToTableJoinFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
public void testStreamToTable() {
	SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example #12
Source File: KafkaConsumerConfig.java    From SpringAll with MIT License 6 votes vote down vote up
@Bean
public ConsumerFactory<String, Message> consumerFactory() {
    Map<String, Object> props = new HashMap<>();
    props.put(
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
            bootstrapServers);
    props.put(
            ConsumerConfig.GROUP_ID_CONFIG,
            consumerGroupId);
    props.put(
            ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
            autoOffsetReset);
    // props.put(
    //         ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
    //         StringDeserializer.class);
    // props.put(
    //         ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
    //         StringDeserializer.class);
    return new DefaultKafkaConsumerFactory<>(
            props,
            new StringDeserializer(),
            new JsonDeserializer<>(Message.class));
}
 
Example #13
Source File: KafkaBinderConfiguration.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Bean
@ConditionalOnMissingBean(name = "binderClientFactoryCustomizer")
public ClientFactoryCustomizer binderClientFactoryCustomizer(MeterRegistry meterRegistry) {

	return new ClientFactoryCustomizer() {

		@Override
		public void configure(ProducerFactory<?, ?> pf) {
			if (pf instanceof DefaultKafkaProducerFactory) {
				((DefaultKafkaProducerFactory<?, ?>) pf)
						.addListener(new MicrometerProducerListener<>(meterRegistry));
			}
		}

		@Override
		public void configure(ConsumerFactory<?, ?> cf) {
			if (cf instanceof DefaultKafkaConsumerFactory) {
				((DefaultKafkaConsumerFactory<?, ?>) cf)
						.addListener(new MicrometerConsumerListener<>(meterRegistry));
			}
		}

	};

}
 
Example #14
Source File: StreamToTableJoinFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
public void testStreamToTableBiFunction() {
	SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example #15
Source File: SpringBootKafkaStreamsInventoryCountTests.java    From spring-cloud-stream-samples with Apache License 2.0 6 votes vote down vote up
@BeforeEach
void setUp() {

    Map<String, Object> props = new HashMap<>();

    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker.getBrokersAsString());
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    AbstractInventoryUpdateEventGenerator eventGenerator = new
            KafkaTemplateInventoryUpdateEventGenerator(props, INPUT_TOPIC);
    setEventGenerator(eventGenerator);

    Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "true", broker);
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "test");
    consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
    consumerProps.put(JsonDeserializer.TRUSTED_PACKAGES, KafkaStreamsInventoryCountTests.class.getPackage().getName());
    consumerProps.put(JsonDeserializer.KEY_DEFAULT_TYPE, ProductKey.class);
    consumerProps.put(JsonDeserializer.VALUE_DEFAULT_TYPE, InventoryCountEvent.class);
    consumerProps.put(JsonDeserializer.USE_TYPE_INFO_HEADERS, "false");
    cf = new DefaultKafkaConsumerFactory<>(consumerProps);

    consumer = cf.createConsumer(GROUP_NAME);
    consumer.subscribe(Collections.singleton(OUTPUT_TOPIC));
}
 
Example #16
Source File: DeserializationErrorHandlerByKafkaTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("xyz-DeserializationErrorHandlerByKafkaTests-In");
	template.sendDefault(1, null, "foobar");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	Consumer<String, String> consumer1 = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.xyz-DeserializationErrorHandlerByKafkaTests-In.group");

	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
			"error.xyz-DeserializationErrorHandlerByKafkaTests-In.group");
	assertThat(cr.value()).isEqualTo("foobar");
	assertThat(cr.partition()).isEqualTo(0); // custom partition function

	// Ensuring that the deserialization was indeed done by Kafka natively
	verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
			any(KStream.class));
	verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
}
 
Example #17
Source File: DeserializationErrorHandlerByKafkaTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("abc-DeserializationErrorHandlerByKafkaTests-In");
	template.sendDefault(1, null, "foobar");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	Consumer<String, String> consumer1 = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer1, "error.abc-DeserializationErrorHandlerByKafkaTests-In.group");

	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
			"error.abc-DeserializationErrorHandlerByKafkaTests-In.group");
	assertThat(cr.value()).isEqualTo("foobar");
	assertThat(cr.partition()).isEqualTo(0); // custom partition function

	// Ensuring that the deserialization was indeed done by Kafka natively
	verify(conversionDelegate, never()).deserializeOnInbound(any(Class.class),
			any(KStream.class));
	verify(conversionDelegate, never()).serializeOnOutbound(any(KStream.class));
}
 
Example #18
Source File: DeserializtionErrorHandlerByBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void test() {
	Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
	DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(
			senderProps);
	KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
	template.setDefaultTopic("goos");
	template.sendDefault(1, 7, "hello");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("foobar",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	Consumer<String, String> consumer1 = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer1,
			"error.goos.foobar-group");

	ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1,
			"error.goos.foobar-group");
	assertThat(cr.value()).isEqualTo("hello");
	assertThat(cr.partition()).isEqualTo(0);

	// Ensuring that the deserialization was indeed done by the binder
	verify(conversionDelegate).deserializeOnInbound(any(Class.class),
			any(KStream.class));
}
 
Example #19
Source File: KafkaStreamsDlqExampleTests.java    From spring-cloud-stream-samples with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "words-count-dlq");
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafka.getBrokersAsString());
}
 
Example #20
Source File: MultipleFunctionsInSameAppTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("purchase-groups", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "coffee", "electronics");
}
 
Example #21
Source File: KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put("value.deserializer", LongDeserializer.class);
	DefaultKafkaConsumerFactory<Integer, Long> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
 
Example #22
Source File: KafkaStreamsBinderDestinationIsPatternTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "true",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "out");
}
 
Example #23
Source File: WordCountMultipleBranchesIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar");
}
 
Example #24
Source File: KafkaRepository.java    From kafka-service-broker with Apache License 2.0 5 votes vote down vote up
private KafkaMessageListenerContainer<Integer, String> createContainer(
        ContainerProperties containerProps) {
    Map<String, Object> props = consumerProps();
    DefaultKafkaConsumerFactory<Integer, String> cf =
            new DefaultKafkaConsumerFactory<Integer, String>(props);
    KafkaMessageListenerContainer<Integer, String> container =
            new KafkaMessageListenerContainer<>(cf, containerProps);
    return container;
}
 
Example #25
Source File: SynapseKafkaAutoConfiguration.java    From synapse with Apache License 2.0 5 votes vote down vote up
@Bean
@ConditionalOnMissingBean(name="kafkaConsumerFactory")
public ConsumerFactory<String, String> kafkaConsumerFactory(KafkaProperties kafkaProperties) {
    return new DefaultKafkaConsumerFactory<>(
            kafkaProperties.buildConsumerProperties(),
            new StringDeserializer(),
            new StringDeserializer());
}
 
Example #26
Source File: KafkaStreamsBinderWordCountFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "counts-1", "counts-2");
}
 
Example #27
Source File: KafkaStreamsInteractiveQueryIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
 
Example #28
Source File: KafkaStreamsNativeEncodingDecodingTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() {
	System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers",
			embeddedKafka.getBrokersAsString());
	System.setProperty("server.port", "0");
	System.setProperty("spring.jmx.enabled", "false");

	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "decode-counts", "decode-counts-1");
}
 
Example #29
Source File: KafkaStreamsBinderWordCountBranchesFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupx", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromEmbeddedTopics(consumer, "counts", "foo", "bar");
}
 
Example #30
Source File: KafkaStreamsBinderMultipleInputTopicsTest.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
			embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts");
}