Java Code Examples for org.springframework.cloud.stream.binder.ExtendedConsumerProperties#setInstanceIndex()

The following examples show how to use org.springframework.cloud.stream.binder.ExtendedConsumerProperties#setInstanceIndex() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testAutoAddPartitionsDisabledSucceedsIfTopicUnderPartitionedAndAutoRebalanceEnabled()
		throws Throwable {
	KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();

	String testTopicName = "existing" + System.currentTimeMillis();
	invokeCreateTopic(testTopicName, 1, 1);
	configurationProperties.setAutoAddPartitions(false);
	Binder binder = getBinder(configurationProperties);
	GenericApplicationContext context = new GenericApplicationContext();
	context.refresh();

	ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();

	DirectChannel input = createBindableChannel("input",
			createConsumerBindingProperties(consumerProperties));

	// this consumer must consume from partition 2
	consumerProperties.setInstanceCount(3);
	consumerProperties.setInstanceIndex(2);
	Binding binding = binder.bindConsumer(testTopicName, "test", input,
			consumerProperties);
	binding.unbind();
	assertThat(invokePartitionSize(testTopicName)).isEqualTo(1);
}
 
Example 2
Source File: KinesisBinderTests.java    From spring-cloud-stream-binder-aws-kinesis with Apache License 2.0 5 votes vote down vote up
@Override
protected ExtendedConsumerProperties<KinesisConsumerProperties> createConsumerProperties() {
	ExtendedConsumerProperties<KinesisConsumerProperties> kinesisConsumerProperties = new ExtendedConsumerProperties<>(
			new KinesisConsumerProperties());
	// set the default values that would normally be propagated by Spring Cloud Stream
	kinesisConsumerProperties.setInstanceCount(1);
	kinesisConsumerProperties.setInstanceIndex(0);
	kinesisConsumerProperties.getExtension().setShardIteratorType(ShardIteratorType.TRIM_HORIZON.name());
	return kinesisConsumerProperties;
}
 
Example 3
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Override
protected ExtendedConsumerProperties<KafkaConsumerProperties> createConsumerProperties() {
	final ExtendedConsumerProperties<KafkaConsumerProperties> kafkaConsumerProperties = new ExtendedConsumerProperties<>(
			new KafkaConsumerProperties());
	// set the default values that would normally be propagated by Spring Cloud Stream
	kafkaConsumerProperties.setInstanceCount(1);
	kafkaConsumerProperties.setInstanceIndex(0);
	return kafkaConsumerProperties;
}
 
Example 4
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testAutoAddPartitionsDisabledFailsIfTopicUnderPartitionedAndAutoRebalanceDisabled()
		throws Throwable {
	KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();

	String testTopicName = "existing" + System.currentTimeMillis();
	invokeCreateTopic(testTopicName, 1, 1);
	configurationProperties.setAutoAddPartitions(false);
	Binder binder = getBinder(configurationProperties);
	GenericApplicationContext context = new GenericApplicationContext();
	context.refresh();

	ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
	DirectChannel output = createBindableChannel("output",
			createConsumerBindingProperties(consumerProperties));
	// this consumer must consume from partition 2
	consumerProperties.setInstanceCount(3);
	consumerProperties.setInstanceIndex(2);
	consumerProperties.getExtension().setAutoRebalanceEnabled(false);
	expectedProvisioningException.expect(ProvisioningException.class);
	expectedProvisioningException.expectMessage(
			"The number of expected partitions was: 3, but 1 has been found instead");
	Binding binding = binder.bindConsumer(testTopicName, "test", output,
			consumerProperties);
	if (binding != null) {
		binding.unbind();
	}
}
 
Example 5
Source File: KinesisBinderTests.java    From spring-cloud-stream-binder-aws-kinesis with Apache License 2.0 4 votes vote down vote up
@Test
@Override
@SuppressWarnings("unchecked")
public void testAnonymousGroup() throws Exception {
	KinesisTestBinder binder = getBinder();
	ExtendedProducerProperties<KinesisProducerProperties> producerProperties = createProducerProperties();
	DirectChannel output = createBindableChannel("output",
			createProducerBindingProperties(producerProperties));

	Binding<MessageChannel> producerBinding = binder.bindProducer(
			String.format("defaultGroup%s0", getDestinationNameDelimiter()), output,
			producerProperties);

	ExtendedConsumerProperties<KinesisConsumerProperties> consumerProperties = createConsumerProperties();
	consumerProperties.setConcurrency(2);
	consumerProperties.setInstanceCount(3);
	consumerProperties.setInstanceIndex(0);

	QueueChannel input1 = new QueueChannel();
	Binding<MessageChannel> binding1 = binder.bindConsumer(
			String.format("defaultGroup%s0", getDestinationNameDelimiter()), null,
			input1, consumerProperties);

	consumerProperties.setInstanceIndex(1);

	QueueChannel input2 = new QueueChannel();
	Binding<MessageChannel> binding2 = binder.bindConsumer(
			String.format("defaultGroup%s0", getDestinationNameDelimiter()), null,
			input2, consumerProperties);

	String testPayload1 = "foo-" + UUID.randomUUID().toString();
	output.send(MessageBuilder.withPayload(testPayload1)
			.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN)
			.build());

	Message<byte[]> receivedMessage1 = (Message<byte[]>) receive(input1);
	assertThat(receivedMessage1).isNotNull();
	assertThat(new String(receivedMessage1.getPayload())).isEqualTo(testPayload1);

	Message<byte[]> receivedMessage2 = (Message<byte[]>) receive(input2);
	assertThat(receivedMessage2).isNotNull();
	assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload1);

	binding2.unbind();

	String testPayload2 = "foo-" + UUID.randomUUID().toString();
	output.send(MessageBuilder.withPayload(testPayload2)
			.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN)
			.build());

	binding2 = binder.bindConsumer(
			String.format("defaultGroup%s0", getDestinationNameDelimiter()), null,
			input2, consumerProperties);
	String testPayload3 = "foo-" + UUID.randomUUID().toString();
	output.send(MessageBuilder.withPayload(testPayload3)
			.setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN)
			.build());

	receivedMessage1 = (Message<byte[]>) receive(input1);
	assertThat(receivedMessage1).isNotNull();
	assertThat(new String(receivedMessage1.getPayload())).isEqualTo(testPayload2);
	receivedMessage1 = (Message<byte[]>) receive(input1);
	assertThat(receivedMessage1).isNotNull();
	assertThat(new String(receivedMessage1.getPayload())).isNotNull();

	receivedMessage2 = (Message<byte[]>) receive(input2);
	assertThat(receivedMessage2).isNotNull();
	assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload1);

	receivedMessage2 = (Message<byte[]>) receive(input2);
	assertThat(receivedMessage2).isNotNull();
	assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload2);

	receivedMessage2 = (Message<byte[]>) receive(input2);
	assertThat(receivedMessage2).isNotNull();
	assertThat(new String(receivedMessage2.getPayload())).isEqualTo(testPayload3);

	producerBinding.unbind();
	binding1.unbind();
	binding2.unbind();
}
 
Example 6
Source File: KinesisBinderTests.java    From spring-cloud-stream-binder-aws-kinesis with Apache License 2.0 4 votes vote down vote up
@Test
@Override
public void testPartitionedModuleJava() throws Exception {
	KinesisTestBinder binder = getBinder();

	ExtendedConsumerProperties<KinesisConsumerProperties> consumerProperties = createConsumerProperties();
	consumerProperties.setConcurrency(2);
	consumerProperties.setInstanceCount(3);
	consumerProperties.setInstanceIndex(0);
	consumerProperties.setPartitioned(true);

	final List<Message<?>> results = new ArrayList<>();
	final CountDownLatch receiveLatch = new CountDownLatch(3);

	MessageHandler receivingHandler = (message) -> {
		results.add(message);
		receiveLatch.countDown();
	};

	DirectChannel input0 = createBindableChannelInternal("test.input0J", new BindingProperties(), true);
	input0.subscribe(receivingHandler);

	Binding<MessageChannel> input0Binding = binder.bindConsumer("partJ.0",
			"testPartitionedModuleJava", input0, consumerProperties);

	consumerProperties.setInstanceIndex(1);

	DirectChannel input1 = createBindableChannelInternal("test.input1J", new BindingProperties(), true);
	input1.subscribe(receivingHandler);

	Binding<MessageChannel> input1Binding = binder.bindConsumer("partJ.0",
			"testPartitionedModuleJava", input1, consumerProperties);

	consumerProperties.setInstanceIndex(2);

	DirectChannel input2 = createBindableChannelInternal("test.input2J", new BindingProperties(), true);
	input2.subscribe(receivingHandler);

	Binding<MessageChannel> input2Binding = binder.bindConsumer("partJ.0",
			"testPartitionedModuleJava", input2, consumerProperties);

	ExtendedProducerProperties<KinesisProducerProperties> producerProperties = createProducerProperties();

	producerProperties.setPartitionKeyExtractorName("partitionSupport");
	producerProperties.setPartitionSelectorName("partitionSupport");
	producerProperties.setPartitionCount(3);

	DirectChannel output = createBindableChannelInternal("test.output",
			createProducerBindingProperties(producerProperties), false);

	Binding<MessageChannel> outputBinding = binder.bindProducer("partJ.0", output,
			producerProperties);
	if (usesExplicitRouting()) {
		Object endpoint = extractEndpoint(outputBinding);
		assertThat(getEndpointRouting(endpoint))
				.contains(getExpectedRoutingBaseDestination("partJ.0",
						"testPartitionedModuleJava") + "-' + headers['"
						+ BinderHeaders.PARTITION_HEADER + "']");
	}

	output.send(new GenericMessage<>(2));
	output.send(new GenericMessage<>(1));
	output.send(new GenericMessage<>(0));

	assertThat(receiveLatch.await(20, TimeUnit.SECONDS)).isTrue();

	assertThat(results).extracting("payload").containsExactlyInAnyOrder(
			"0".getBytes(), "1".getBytes(), "2".getBytes());

	input0Binding.unbind();
	input1Binding.unbind();
	input2Binding.unbind();
	outputBinding.unbind();
}
 
Example 7
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Test
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testPartitionedModuleJava() throws Exception {
	Binder binder = getBinder();

	KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();

	ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
	consumerProperties.setConcurrency(2);
	consumerProperties.setInstanceCount(4);
	consumerProperties.setInstanceIndex(0);
	consumerProperties.setPartitioned(true);
	consumerProperties.getExtension().setAutoRebalanceEnabled(false);
	QueueChannel input0 = new QueueChannel();
	input0.setBeanName("test.input0J");
	Binding<MessageChannel> input0Binding = binder.bindConsumer("partJ.0", "test",
			input0, consumerProperties);
	consumerProperties.setInstanceIndex(1);
	QueueChannel input1 = new QueueChannel();
	input1.setBeanName("test.input1J");
	Binding<MessageChannel> input1Binding = binder.bindConsumer("partJ.0", "test",
			input1, consumerProperties);
	consumerProperties.setInstanceIndex(2);
	QueueChannel input2 = new QueueChannel();
	input2.setBeanName("test.input2J");
	Binding<MessageChannel> input2Binding = binder.bindConsumer("partJ.0", "test",
			input2, consumerProperties);
	consumerProperties.setInstanceIndex(3);
	QueueChannel input3 = new QueueChannel();
	input3.setBeanName("test.input3J");
	Binding<MessageChannel> input3Binding = binder.bindConsumer("partJ.0", "test",
			input3, consumerProperties);

	ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();

	this.applicationContext.registerBean("pkExtractor",
			PartitionTestSupport.class, () -> new PartitionTestSupport());
	this.applicationContext.registerBean("pkSelector",
			PartitionTestSupport.class, () -> new PartitionTestSupport());
	producerProperties.setPartitionKeyExtractorName("pkExtractor");
	producerProperties.setPartitionSelectorName("pkSelector");
	producerProperties.setPartitionCount(3); // overridden to 8 on the actual topic
	DirectChannel output = createBindableChannel("output",
			createProducerBindingProperties(producerProperties));
	output.setBeanName("test.output");
	Binding<MessageChannel> outputBinding = binder.bindProducer("partJ.0", output,
			producerProperties);
	if (usesExplicitRouting()) {
		Object endpoint = extractEndpoint(outputBinding);
		assertThat(getEndpointRouting(endpoint))
				.contains(getExpectedRoutingBaseDestination("partJ.0", "test")
						+ "-' + headers['partition']");
	}

	output.send(new GenericMessage<>(2));
	output.send(new GenericMessage<>(1));
	output.send(new GenericMessage<>(0));
	output.send(new GenericMessage<>(3));

	Message<?> receive0 = receive(input0);
	assertThat(receive0).isNotNull();
	Message<?> receive1 = receive(input1);
	assertThat(receive1).isNotNull();
	Message<?> receive2 = receive(input2);
	assertThat(receive2).isNotNull();
	Message<?> receive3 = receive(input3);
	assertThat(receive3).isNotNull();
	ObjectMapper om = new ObjectMapper();

	assertThat(om.readValue((byte[]) receive0.getPayload(), Integer.class))
			.isEqualTo(0);
	assertThat(om.readValue((byte[]) receive1.getPayload(), Integer.class))
			.isEqualTo(1);
	assertThat(om.readValue((byte[]) receive2.getPayload(), Integer.class))
			.isEqualTo(2);
	assertThat(om.readValue((byte[]) receive3.getPayload(), Integer.class))
			.isEqualTo(3);

	input0Binding.unbind();
	input1Binding.unbind();
	input2Binding.unbind();
	input3Binding.unbind();
	outputBinding.unbind();
}
 
Example 8
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testPartitionedModuleJavaWithRawMode() throws Exception {
	Binder binder = getBinder();
	ExtendedProducerProperties<KafkaProducerProperties> properties = createProducerProperties();
	properties.setHeaderMode(HeaderMode.none);
	this.applicationContext.registerBean("pkExtractor",
			RawKafkaPartitionTestSupport.class, () -> new RawKafkaPartitionTestSupport());
	this.applicationContext.registerBean("pkSelector",
			RawKafkaPartitionTestSupport.class, () -> new RawKafkaPartitionTestSupport());
	properties.setPartitionKeyExtractorName("pkExtractor");
	properties.setPartitionSelectorName("pkSelector");
	properties.setPartitionCount(6);

	DirectChannel output = createBindableChannel("output",
			createProducerBindingProperties(properties));
	output.setBeanName("test.output");
	Binding<MessageChannel> outputBinding = binder.bindProducer("partJ.raw.0", output,
			properties);

	ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
	consumerProperties.setConcurrency(2);
	consumerProperties.setInstanceCount(3);
	consumerProperties.setInstanceIndex(0);
	consumerProperties.setPartitioned(true);
	consumerProperties.setHeaderMode(HeaderMode.none);
	consumerProperties.getExtension().setAutoRebalanceEnabled(false);
	QueueChannel input0 = new QueueChannel();
	input0.setBeanName("test.input0J");
	Binding<MessageChannel> input0Binding = binder.bindConsumer("partJ.raw.0", "test",
			input0, consumerProperties);
	consumerProperties.setInstanceIndex(1);
	QueueChannel input1 = new QueueChannel();
	input1.setBeanName("test.input1J");
	Binding<MessageChannel> input1Binding = binder.bindConsumer("partJ.raw.0", "test",
			input1, consumerProperties);
	consumerProperties.setInstanceIndex(2);
	QueueChannel input2 = new QueueChannel();
	input2.setBeanName("test.input2J");
	Binding<MessageChannel> input2Binding = binder.bindConsumer("partJ.raw.0", "test",
			input2, consumerProperties);

	output.send(new GenericMessage<>(new byte[] { (byte) 0 }));
	output.send(new GenericMessage<>(new byte[] { (byte) 1 }));
	output.send(new GenericMessage<>(new byte[] { (byte) 2 }));

	Message<?> receive0 = receive(input0);
	assertThat(receive0).isNotNull();
	Message<?> receive1 = receive(input1);
	assertThat(receive1).isNotNull();
	Message<?> receive2 = receive(input2);
	assertThat(receive2).isNotNull();

	assertThat(Arrays.asList(((byte[]) receive0.getPayload())[0],
			((byte[]) receive1.getPayload())[0], ((byte[]) receive2.getPayload())[0]))
					.containsExactlyInAnyOrder((byte) 0, (byte) 1, (byte) 2);

	input0Binding.unbind();
	input1Binding.unbind();
	input2Binding.unbind();
	outputBinding.unbind();
}
 
Example 9
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testPartitionedModuleSpELWithRawMode() throws Exception {
	Binder binder = getBinder();
	ExtendedProducerProperties<KafkaProducerProperties> properties = createProducerProperties();
	properties.setPartitionKeyExpression(
			spelExpressionParser.parseExpression("payload[0]"));
	properties.setPartitionSelectorExpression(
			spelExpressionParser.parseExpression("hashCode()"));
	properties.setPartitionCount(6);
	properties.setHeaderMode(HeaderMode.none);

	DirectChannel output = createBindableChannel("output",
			createProducerBindingProperties(properties));
	output.setBeanName("test.output");
	Binding<MessageChannel> outputBinding = binder.bindProducer("part.raw.0", output,
			properties);
	try {
		Object endpoint = extractEndpoint(outputBinding);
		assertThat(getEndpointRouting(endpoint))
				.contains(getExpectedRoutingBaseDestination("part.raw.0", "test")
						+ "-' + headers['partition']");
	}
	catch (UnsupportedOperationException ignored) {
	}

	ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
	consumerProperties.setConcurrency(2);
	consumerProperties.setInstanceIndex(0);
	consumerProperties.setInstanceCount(3);
	consumerProperties.setPartitioned(true);
	consumerProperties.setHeaderMode(HeaderMode.none);
	consumerProperties.getExtension().setAutoRebalanceEnabled(false);
	QueueChannel input0 = new QueueChannel();
	input0.setBeanName("test.input0S");
	Binding<MessageChannel> input0Binding = binder.bindConsumer("part.raw.0", "test",
			input0, consumerProperties);
	consumerProperties.setInstanceIndex(1);
	QueueChannel input1 = new QueueChannel();
	input1.setBeanName("test.input1S");
	Binding<MessageChannel> input1Binding = binder.bindConsumer("part.raw.0", "test",
			input1, consumerProperties);
	consumerProperties.setInstanceIndex(2);
	QueueChannel input2 = new QueueChannel();
	input2.setBeanName("test.input2S");
	Binding<MessageChannel> input2Binding = binder.bindConsumer("part.raw.0", "test",
			input2, consumerProperties);

	Message<byte[]> message2 = org.springframework.integration.support.MessageBuilder
			.withPayload(new byte[] { 2 })
			.setHeader(IntegrationMessageHeaderAccessor.CORRELATION_ID,
					"kafkaBinderTestCommonsDelegate")
			.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER, 42)
			.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_SIZE, 43).build();
	output.send(message2);
	output.send(new GenericMessage<>(new byte[] { 1 }));
	output.send(new GenericMessage<>(new byte[] { 0 }));
	Message<?> receive0 = receive(input0);
	assertThat(receive0).isNotNull();
	Message<?> receive1 = receive(input1);
	assertThat(receive1).isNotNull();
	Message<?> receive2 = receive(input2);
	assertThat(receive2).isNotNull();
	assertThat(Arrays.asList(((byte[]) receive0.getPayload())[0],
			((byte[]) receive1.getPayload())[0], ((byte[]) receive2.getPayload())[0]))
					.containsExactlyInAnyOrder((byte) 0, (byte) 1, (byte) 2);
	input0Binding.unbind();
	input1Binding.unbind();
	input2Binding.unbind();
	outputBinding.unbind();
}
 
Example 10
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testAutoAddPartitionsDisabledSucceedsIfTopicPartitionedCorrectly()
		throws Throwable {
	Binding<?> binding = null;
	try {
		KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();

		String testTopicName = "existing" + System.currentTimeMillis();
		invokeCreateTopic(testTopicName, 6, 1);
		configurationProperties.setAutoAddPartitions(false);
		Binder binder = getBinder(configurationProperties);
		ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();

		DirectChannel input = createBindableChannel("input",
				createConsumerBindingProperties(consumerProperties));

		// this consumer must consume from partition 2
		consumerProperties.setInstanceCount(3);
		consumerProperties.setInstanceIndex(2);
		consumerProperties.getExtension().setAutoRebalanceEnabled(false);

		binding = binder.bindConsumer(testTopicName, "test-x", input,
				consumerProperties);

		ContainerProperties containerProps = TestUtils.getPropertyValue(
				binding,
				"lifecycle.messageListenerContainer.containerProperties",
				ContainerProperties.class);
		TopicPartitionOffset[] listenedPartitions = containerProps.getTopicPartitionsToAssign();
		assertThat(listenedPartitions).hasSize(2);
		assertThat(listenedPartitions).contains(
				new TopicPartitionOffset(testTopicName, 2),
				new TopicPartitionOffset(testTopicName, 5));
		int partitions = invokePartitionSize(testTopicName);
		assertThat(partitions).isEqualTo(6);
	}
	finally {
		if (binding != null) {
			binding.unbind();
		}
	}
}
 
Example 11
Source File: RabbitBinderTests.java    From spring-cloud-stream-binder-rabbit with Apache License 2.0 4 votes vote down vote up
@Test
public void testConsumerProperties() throws Exception {
	RabbitTestBinder binder = getBinder();
	ExtendedConsumerProperties<RabbitConsumerProperties> properties = createConsumerProperties();
	properties.getExtension().setRequeueRejected(true);
	properties.getExtension().setTransacted(true);
	properties.getExtension().setExclusive(true);
	properties.getExtension().setMissingQueuesFatal(true);
	properties.getExtension().setFailedDeclarationRetryInterval(1500L);
	properties.getExtension().setQueueDeclarationRetries(23);
	Binding<MessageChannel> consumerBinding = binder.bindConsumer("props.0", null,
			createBindableChannel("input", new BindingProperties()), properties);
	Lifecycle endpoint = extractEndpoint(consumerBinding);
	SimpleMessageListenerContainer container = TestUtils.getPropertyValue(endpoint,
			"messageListenerContainer", SimpleMessageListenerContainer.class);
	assertThat(container.getAcknowledgeMode()).isEqualTo(AcknowledgeMode.AUTO);
	assertThat(container.getQueueNames()[0])
			.startsWith(properties.getExtension().getPrefix());
	assertThat(TestUtils.getPropertyValue(container, "transactional", Boolean.class))
			.isTrue();
	assertThat(TestUtils.getPropertyValue(container, "exclusive", Boolean.class))
			.isTrue();
	assertThat(TestUtils.getPropertyValue(container, "concurrentConsumers"))
			.isEqualTo(1);
	assertThat(TestUtils.getPropertyValue(container, "maxConcurrentConsumers"))
			.isNull();
	assertThat(TestUtils.getPropertyValue(container, "defaultRequeueRejected",
			Boolean.class)).isTrue();
	assertThat(TestUtils.getPropertyValue(container, "prefetchCount")).isEqualTo(1);
	assertThat(TestUtils.getPropertyValue(container, "batchSize")).isEqualTo(1);
	assertThat(TestUtils.getPropertyValue(container, "missingQueuesFatal",
			Boolean.class)).isTrue();
	assertThat(
			TestUtils.getPropertyValue(container, "failedDeclarationRetryInterval"))
					.isEqualTo(1500L);
	assertThat(TestUtils.getPropertyValue(container, "declarationRetries"))
			.isEqualTo(23);
	RetryTemplate retry = TestUtils.getPropertyValue(endpoint, "retryTemplate",
			RetryTemplate.class);
	assertThat(TestUtils.getPropertyValue(retry, "retryPolicy.maxAttempts"))
			.isEqualTo(3);
	assertThat(TestUtils.getPropertyValue(retry, "backOffPolicy.initialInterval"))
			.isEqualTo(1000L);
	assertThat(TestUtils.getPropertyValue(retry, "backOffPolicy.maxInterval"))
			.isEqualTo(10000L);
	assertThat(TestUtils.getPropertyValue(retry, "backOffPolicy.multiplier"))
			.isEqualTo(2.0);
	consumerBinding.unbind();
	assertThat(endpoint.isRunning()).isFalse();

	properties = createConsumerProperties();
	properties.getExtension().setAcknowledgeMode(AcknowledgeMode.NONE);
	properties.setBackOffInitialInterval(2000);
	properties.setBackOffMaxInterval(20000);
	properties.setBackOffMultiplier(5.0);
	properties.setConcurrency(2);
	properties.setMaxAttempts(23);
	properties.getExtension().setMaxConcurrency(3);
	properties.getExtension().setPrefix("foo.");
	properties.getExtension().setPrefetch(20);
	properties.getExtension().setHeaderPatterns(new String[] { "foo" });
	properties.getExtension().setTxSize(10);
	QuorumConfig quorum = properties.getExtension().getQuorum();
	quorum.setEnabled(true);
	quorum.setDeliveryLimit(10);
	quorum.setInitialGroupSize(1);
	properties.setInstanceIndex(0);
	consumerBinding = binder.bindConsumer("props.0", "test",
			createBindableChannel("input", new BindingProperties()), properties);

	endpoint = extractEndpoint(consumerBinding);
	container = verifyContainer(endpoint);

	assertThat(container.getQueueNames()[0]).isEqualTo("foo.props.0.test");

	consumerBinding.unbind();
	assertThat(endpoint.isRunning()).isFalse();
}