org.apache.kafka.clients.admin.DescribeTopicsResult Java Examples

The following examples show how to use org.apache.kafka.clients.admin.DescribeTopicsResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AdminClientWrapper.java    From hdinsight-kafka-java-get-started with MIT License 6 votes vote down vote up
public static void describeTopics(String brokers, String topicName) throws IOException {
    // Set properties used to configure admin client
    Properties properties = getProperties(brokers);

    try (final AdminClient adminClient = KafkaAdminClient.create(properties)) {
        // Make async call to describe the topic.
        final DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singleton(topicName));

        TopicDescription description = describeTopicsResult.values().get(topicName).get();
        System.out.print(description.toString());
    } catch (Exception e) {
        System.out.print("Describe denied\n");
        System.out.print(e.getMessage());
        //throw new RuntimeException(e.getMessage(), e);
    }
}
 
Example #2
Source File: TestKafkaSystemAdminJava.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testClearStream() {
  StreamSpec spec = new StreamSpec("testId", "testStreamClear", "testSystem", 8);

  KafkaSystemAdmin admin = systemAdmin();
  String topicName = spec.getPhysicalName();

  assertTrue("createStream should return true if the stream does not exist and then is created.", admin.createStream(spec));
  // validate topic exists
  assertTrue(admin.clearStream(spec));

  // validate that topic was removed
  DescribeTopicsResult dtr = admin.adminClient.describeTopics(ImmutableSet.of(topicName));
  try {
    TopicDescription td = dtr.all().get().get(topicName);
    Assert.fail("topic " + topicName + " should've been removed. td=" + td);
  } catch (Exception e) {
    if (!(e.getCause() instanceof org.apache.kafka.common.errors.UnknownTopicOrPartitionException)) {
      Assert.fail("topic " + topicName + " should've been removed. Expected UnknownTopicOrPartitionException.");
    }
  }
}
 
Example #3
Source File: ConsumeService.java    From kafka-monitor with Apache License 2.0 6 votes vote down vote up
@Override
public synchronized void start() {
  if (_running.compareAndSet(false, true)) {
    _consumeThread.start();
    LOG.info("{}/ConsumeService started.", _name);

    Sensor topicPartitionCount = metrics.sensor("topic-partitions");
    DescribeTopicsResult describeTopicsResult = _adminClient.describeTopics(Collections.singleton(_topic));
    Map<String, KafkaFuture<TopicDescription>> topicResultValues = describeTopicsResult.values();
    KafkaFuture<TopicDescription> topicDescriptionKafkaFuture = topicResultValues.get(_topic);
    TopicDescription topicDescription = null;
    try {
      topicDescription = topicDescriptionKafkaFuture.get();
    } catch (InterruptedException | ExecutionException e) {
      LOG.error("Exception occurred while getting the topicDescriptionKafkaFuture for topic: {}", _topic, e);
    }
    @SuppressWarnings("ConstantConditions")
    double partitionCount = topicDescription.partitions().size();
    topicPartitionCount.add(
        new MetricName("topic-partitions-count", METRIC_GROUP_NAME, "The total number of partitions for the topic.", tags), new Total(partitionCount));
  }
}
 
Example #4
Source File: AdminClientWrapper.java    From hdinsight-kafka-java-get-started with MIT License 6 votes vote down vote up
public static void describeTopics(String brokers, String topicName) throws IOException {
    // Set properties used to configure admin client
    Properties properties = getProperties(brokers);

    try (final AdminClient adminClient = KafkaAdminClient.create(properties)) {
        // Make async call to describe the topic.
        final DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singleton(topicName));

        TopicDescription description = describeTopicsResult.values().get(topicName).get();
        System.out.print(description.toString());
    } catch (Exception e) {
        System.out.print("Describe denied\n");
        System.out.print(e.getMessage());
        //throw new RuntimeException(e.getMessage(), e);
    }
}
 
Example #5
Source File: KafkaAdmin.java    From feeyo-redisproxy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
public Map<String, TopicDescription> getTopicAndDescriptions() throws Exception {

		try {
			// 查询topic
			ListTopicsOptions lto = new ListTopicsOptions();
			lto.timeoutMs(10 * 1000);
			ListTopicsResult ltr = adminClient.listTopics(lto);
			
			// 查询topic配置信息
			DescribeTopicsOptions dto = new DescribeTopicsOptions();
			dto.timeoutMs(15 * 1000);
			DescribeTopicsResult dtr = adminClient.describeTopics(ltr.names().get(), dto);
			return dtr.all().get();
			
		} catch (Exception e) {
			throw e;
		}
	}
 
Example #6
Source File: SamplingUtilsTest.java    From cruise-control with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Test
public void testMaybeIncreasePartitionCount() throws InterruptedException, ExecutionException, TimeoutException {
  AdminClient adminClient = EasyMock.createMock(AdminClient.class);
  NewTopic topicToAddPartitions = SamplingUtils.wrapTopic(MOCK_TOPIC, MOCK_DESIRED_PARTITION_COUNT,
                                                          MOCK_REPLICATION_FACTOR, MOCK_DESIRED_RETENTION_MS);
  DescribeTopicsResult describeTopicsResult = EasyMock.createMock(DescribeTopicsResult.class);
  KafkaFuture<TopicDescription> topicDescriptionFuture = EasyMock.createMock(KafkaFuture.class);
  TopicDescription topicDescription = EasyMock.createMock(TopicDescription.class);
  Map<String, KafkaFuture<TopicDescription>> describeTopicsValues = Collections.singletonMap(MOCK_TOPIC, topicDescriptionFuture);
  Map<String, KafkaFuture<Void>> createPartitionsValues = Collections.singletonMap(MOCK_TOPIC, EasyMock.createMock(KafkaFuture.class));
  CreatePartitionsResult createPartitionsResult = EasyMock.createMock(CreatePartitionsResult.class);

  EasyMock.expect(adminClient.describeTopics(Collections.singletonList(MOCK_TOPIC))).andReturn(describeTopicsResult);
  EasyMock.expect(describeTopicsResult.values()).andReturn(describeTopicsValues);
  EasyMock.expect(topicDescriptionFuture.get(CLIENT_REQUEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)).andReturn(topicDescription);
  EasyMock.expect(topicDescription.partitions()).andReturn(MOCK_PARTITIONS);
  EasyMock.expect(adminClient.createPartitions(Collections.singletonMap(MOCK_TOPIC, EasyMock.anyObject())))
          .andReturn(createPartitionsResult);
  EasyMock.expect(createPartitionsResult.values()).andReturn(createPartitionsValues);

  EasyMock.replay(adminClient, describeTopicsResult, topicDescriptionFuture, topicDescription, createPartitionsResult);
  boolean increasePartitionCount = SamplingUtils.maybeIncreasePartitionCount(adminClient, topicToAddPartitions);

  EasyMock.verify(adminClient, describeTopicsResult, topicDescriptionFuture, topicDescription, createPartitionsResult);
  assertTrue(increasePartitionCount);
}
 
Example #7
Source File: TopicAdmin.java    From kafka-message-tool with MIT License 6 votes vote down vote up
private static boolean topicExistsCheckWithClusterQuery(String topicName,
                                                        org.apache.kafka.clients.admin.AdminClient
                                                            kafkaClientsAdminClient) throws Exception {

    try {
        final DescribeTopicsResult result = kafkaClientsAdminClient.describeTopics(singleton(topicName));
        result.all().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS, TimeUnit.MILLISECONDS);
        return true;
    } catch (ExecutionException e) {
        if (Throwables.getRootCause(e) instanceof UnknownTopicOrPartitionException) {
            return false;
        } else {
            throw e;
        }
    }
}
 
Example #8
Source File: KafkaTopicProvisioner.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Override
public ProducerDestination provisionProducerDestination(final String name,
		ExtendedProducerProperties<KafkaProducerProperties> properties) {

	if (logger.isInfoEnabled()) {
		logger.info("Using kafka topic for outbound: " + name);
	}
	KafkaTopicUtils.validateTopicName(name);
	try (AdminClient adminClient = AdminClient.create(this.adminClientProperties)) {
		createTopic(adminClient, name, properties.getPartitionCount(), false,
				properties.getExtension().getTopic());
		int partitions = 0;
		Map<String, TopicDescription> topicDescriptions = new HashMap<>();
		if (this.configurationProperties.isAutoCreateTopics()) {
			this.metadataRetryOperations.execute(context -> {
				try {
					if (logger.isDebugEnabled()) {
						logger.debug("Attempting to retrieve the description for the topic: " + name);
					}
					DescribeTopicsResult describeTopicsResult = adminClient
							.describeTopics(Collections.singletonList(name));
					KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
							.all();
					topicDescriptions.putAll(all.get(this.operationTimeout, TimeUnit.SECONDS));
				}
				catch (Exception ex) {
					throw new ProvisioningException("Problems encountered with partitions finding", ex);
				}
				return null;
			});
		}
		TopicDescription topicDescription = topicDescriptions.get(name);
		if (topicDescription != null) {
			partitions = topicDescription.partitions().size();
		}
		return new KafkaProducerDestination(name, partitions);
	}
}
 
Example #9
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static DescribeTopicsResult describeTopicReturningUnknownPartitionException() {
  DescribeTopicsResult describeTopicsResult = niceMock(DescribeTopicsResult.class);
  expect(describeTopicsResult.all())
      .andReturn(failedFuture(new UnknownTopicOrPartitionException("Topic doesn't exist")));
  replay(describeTopicsResult);
  return describeTopicsResult;
}
 
Example #10
Source File: KafkaImplTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
private void mockDescribeTopics(Admin admin, Map<String, Either<TopicDescription, Exception>> result) {
    DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
    when(describeTopicsResult.values()).thenReturn(result.entrySet().stream().collect(toMap(
        entry1 -> entry1.getKey(),
        entry1 -> {
            KafkaFutureImpl<TopicDescription> kafkaFuture1 = new KafkaFutureImpl<>();
            if (entry1.getValue().isLeft()) {
                kafkaFuture1.complete(entry1.getValue().left());
            } else {
                kafkaFuture1.completeExceptionally(entry1.getValue().right());
            }
            return kafkaFuture1;
        })));
    Optional<Either<TopicDescription, Exception>> first = result.values().stream().filter(either -> !either.isLeft()).findFirst();
    if (first.isPresent()) {
        KafkaFutureImpl<Map<String, TopicDescription>> kafkaFuture = new KafkaFutureImpl<>();
        kafkaFuture.completeExceptionally(first.get().right());
        when(describeTopicsResult.all()).thenReturn(kafkaFuture);
    } else {
        when(describeTopicsResult.all()).thenReturn(KafkaFuture.completedFuture(
            result.entrySet().stream().collect(toMap(
                entry -> entry.getKey(),
                entry -> entry.getValue().left()))
        ));
    }
    when(admin.describeTopics(result.keySet())).thenReturn(describeTopicsResult);
}
 
Example #11
Source File: KafkaAvailabilityTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
void mockDescribeTopics(Admin mockAc) {
    when(mockAc.describeTopics(any())).thenAnswer(invocation -> {
        DescribeTopicsResult dtr = mock(DescribeTopicsResult.class);
        Collection<String> topicNames = invocation.getArgument(0);
        Throwable throwable = null;
        for (String topicName : topicNames) {
            throwable = describeTopicsResult.get(topicName);
            if (throwable != null) {
                break;
            }
        }
        if (throwable != null) {
            when(dtr.all()).thenReturn(failedFuture(throwable));
        } else {
            Map<String, TopicDescription> tds = topics.entrySet().stream().collect(Collectors.toMap(
                e -> e.getKey(),
                e -> {
                    TSB tsb = e.getValue();
                    return new TopicDescription(tsb.name, tsb.internal,
                            tsb.partitions.entrySet().stream().map(e1 -> {
                                TSB.PSB psb = e1.getValue();
                                return new TopicPartitionInfo(psb.id,
                                        psb.leader != null ? node(psb.leader) : Node.noNode(),
                                        Arrays.stream(psb.replicaOn).boxed().map(broker -> node(broker)).collect(Collectors.toList()),
                                        Arrays.stream(psb.isr).boxed().map(broker -> node(broker)).collect(Collectors.toList()));
                            }).collect(Collectors.toList()));
                }
            ));
            when(dtr.all()).thenReturn(KafkaFuture.completedFuture(tds));
            when(dtr.values()).thenThrow(notImplemented());
        }
        return dtr;
    });
}
 
Example #12
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private DescribeTopicsResult getDescribeTopicsResult() {
  TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, node, Collections
      .singletonList(node), Collections.singletonList(node));
  TopicDescription topicDescription = new TopicDescription(
      topicName1, false, Collections.singletonList(topicPartitionInfo));
  DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
  expect(describeTopicsResult.all()).andReturn(
      KafkaFuture.completedFuture(Collections.singletonMap(topicName1, topicDescription)));
  replay(describeTopicsResult);
  return describeTopicsResult;
}
 
Example #13
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private int invokePartitionSize(String topic) throws Throwable {

		DescribeTopicsResult describeTopicsResult = adminClient
				.describeTopics(Collections.singletonList(topic));
		KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
		Map<String, TopicDescription> stringTopicDescriptionMap = all
				.get(DEFAULT_OPERATION_TIMEOUT, TimeUnit.SECONDS);
		TopicDescription topicDescription = stringTopicDescriptionMap.get(topic);
		return topicDescription.partitions().size();
	}
 
Example #14
Source File: TopicServiceImplTest.java    From kafka-helmsman with MIT License 5 votes vote down vote up
@Test
public void testListExisting() {
  Cluster cluster = createCluster(1);
  TopicPartitionInfo tp = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList());
  ConfigEntry configEntry = new ConfigEntry("k", "v");
  KafkaFuture<Config> kfc = KafkaFuture.completedFuture(new Config(Collections.singletonList(configEntry)));
  Set<String> topicNames = new HashSet<>(Arrays.asList("a", "b", "_c"));
  Map<String, TopicDescription> tds = new HashMap<String, TopicDescription>() {
    {
      put("a", new TopicDescription("a", false, Collections.singletonList(tp)));
      put("b", new TopicDescription("b", false, Collections.singletonList(tp)));
      put("c", new TopicDescription("_c", false, Collections.singletonList(tp)));
    }
  };
  Map<ConfigResource, KafkaFuture<Config>> configs = new HashMap<ConfigResource, KafkaFuture<Config>>() {
    {
      put(new ConfigResource(TOPIC, "a"), kfc);
      put(new ConfigResource(TOPIC, "b"), kfc);
      put(new ConfigResource(TOPIC, "_c"), kfc);
    }
  };

  TopicService service = new TopicServiceImpl(adminClient, true);
  ListTopicsResult listTopicsResult = mock(ListTopicsResult.class);
  DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
  DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);

  when(describeTopicsResult.all()).thenReturn(KafkaFuture.completedFuture(tds));
  when(listTopicsResult.names()).thenReturn(KafkaFuture.completedFuture(topicNames));
  when(describeConfigsResult.values()).thenReturn(configs);
  when(adminClient.listTopics(any(ListTopicsOptions.class))).thenReturn(listTopicsResult);
  when(adminClient.describeTopics(topicNames)).thenReturn(describeTopicsResult);
  when(adminClient.describeConfigs(any(Collection.class))).thenReturn(describeConfigsResult);

  Map<String, ConfiguredTopic> actual = service.listExisting(true);
  Assert.assertEquals(2, actual.size());
  Assert.assertEquals(new HashSet<>(Arrays.asList("a", "b")), actual.keySet());
}
 
Example #15
Source File: KafkaAdmin.java    From feeyo-redisproxy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * 获取指定topic的配置信息
 */
public TopicDescription getDescriptionByTopicName(String topic) throws Exception {
	
	List<String> topics = new ArrayList<String>();
	topics.add(topic);

	DescribeTopicsOptions dto = new DescribeTopicsOptions();
	dto.timeoutMs(5 * 1000);
	DescribeTopicsResult dtr = adminClient.describeTopics(topics, dto);
	return dtr.all().get().get(topic);

}
 
Example #16
Source File: TopicAdmin.java    From kafka-message-tool with MIT License 5 votes vote down vote up
public Set<ClusterTopicInfo> describeTopics() throws InterruptedException, ExecutionException, TimeoutException {

        Set<ClusterTopicInfo> result = new HashSet<>();
        final ListTopicsResult listTopicsResult = kafkaClientsAdminClient.listTopics(new ListTopicsOptions().listInternal(false));
        final Collection<TopicListing> listings = listTopicsResult.listings().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS, TimeUnit.MILLISECONDS);
        Logger.debug(String.format("describeTopics.listings %s", listings));


        final Set<String> topicNames = listTopicsResult.names().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS, TimeUnit.MILLISECONDS);
        final DescribeTopicsResult describeTopicsResult = kafkaClientsAdminClient.describeTopics(topicNames);
        final Map<String, TopicDescription> stringTopicDescriptionMap = describeTopicsResult.all().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS,
                                                                                                       TimeUnit.MILLISECONDS);

        for (Map.Entry<String, TopicDescription> entry : stringTopicDescriptionMap.entrySet()) {
            final TopicDescription topicDescription = entry.getValue();
            final ClusterTopicInfo clusterTopicInfo = new ClusterTopicInfo(topicDescription.name(),
                                                                           topicDescription.partitions(),
                                                                           getConfigEntriesForTopic(topicDescription.name()));
            result.add(clusterTopicInfo);
        }
        return result;

    }
 
Example #17
Source File: TopicEnsure.java    From common-docker with Apache License 2.0 5 votes vote down vote up
public boolean topicExists(TopicSpec spec, Integer timeOut) throws Exception {
  try {
    DescribeTopicsResult topicDescribeResult = adminClient.describeTopics(
        Collections.singletonList(spec.name()), new DescribeTopicsOptions().timeoutMs(timeOut)
    );
    topicDescribeResult.all().get().get(spec.name());
  } catch (ExecutionException e) {
    if (e.getCause() instanceof UnknownTopicOrPartitionException) {
      return false;
    } else {
      throw e;
    }
  }
  return true;
}
 
Example #18
Source File: TopicEnsure.java    From common-docker with Apache License 2.0 5 votes vote down vote up
public boolean validateTopic(TopicSpec spec, int timeOut) throws Exception {
  // Describe topic.
  DescribeTopicsResult topicDescribeResult = adminClient.describeTopics(
      Collections.singletonList(spec.name()), new DescribeTopicsOptions().timeoutMs(timeOut)
  );
  TopicDescription topic = topicDescribeResult.all().get().get(spec.name());

  // Get topic config.
  ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, spec.name());
  DescribeConfigsResult configResult = adminClient.describeConfigs(
      Collections.singletonList(configResource)
  );
  Map<ConfigResource, Config> resultMap = configResult.all().get();
  Config config = resultMap.get(configResource);

  // Create actual TopicSpec.
  Map<String, String> actualConfig = new HashMap<>();
  for (Map.Entry<String, String> entry : spec.config().entrySet()) {
    ConfigEntry actualConfigEntry = config.get(entry.getKey());
    if (actualConfigEntry != null) {
      actualConfig.put(entry.getKey(), actualConfigEntry.value());
    }
  }

  TopicSpec actualSpec = new TopicSpec(
      topic.name(), topic.partitions().size(),
      topic.partitions().get(0).replicas().size(), actualConfig
  );

  boolean isTopicValid = actualSpec.equals(spec);
  if (!isTopicValid) {
    System.err.printf(
        "Invalid topic [ %s ] ! Expected %s but got %s\n", spec.name(), spec, actualSpec
    );
  }

  return isTopicValid;
}
 
Example #19
Source File: KafkaTestUtils.java    From kafka-junit with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Describes a topic in Kafka.
 * @param topicName the topic to describe.
 * @return Description of the topic.
 */
public TopicDescription describeTopic(final String topicName) {
    // Create admin client
    try (final AdminClient adminClient = getAdminClient()) {
        // Make async call to describe the topic.
        final DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singleton(topicName));

        return describeTopicsResult.values().get(topicName).get();
    } catch (final InterruptedException | ExecutionException e) {
        throw new RuntimeException(e.getMessage(), e);
    }
}
 
Example #20
Source File: KafkaAdminClientImpl.java    From vertx-kafka-client with Apache License 2.0 4 votes vote down vote up
@Override
public Future<Map<String, TopicDescription>> describeTopics(List<String> topicNames) {
  ContextInternal ctx = (ContextInternal) vertx.getOrCreateContext();
  Promise<Map<String, TopicDescription>> promise = ctx.promise();

  DescribeTopicsResult describeTopicsResult = this.adminClient.describeTopics(topicNames);
  describeTopicsResult.all().whenComplete((t, ex) -> {
    if (ex == null) {

      Map<String, TopicDescription> topics = new HashMap<>();

      for (Map.Entry<String, org.apache.kafka.clients.admin.TopicDescription> topicDescriptionEntry : t.entrySet()) {

        List<TopicPartitionInfo> partitions = new ArrayList<>();

        for (org.apache.kafka.common.TopicPartitionInfo kafkaPartitionInfo : topicDescriptionEntry.getValue().partitions()) {

          TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo();
          topicPartitionInfo.setIsr(
            kafkaPartitionInfo.isr().stream().map(Helper::from).collect(Collectors.toList()))
            .setLeader(Helper.from(kafkaPartitionInfo.leader()))
            .setPartition(kafkaPartitionInfo.partition())
            .setReplicas(
              kafkaPartitionInfo.replicas().stream().map(Helper::from).collect(Collectors.toList()));

          partitions.add(topicPartitionInfo);
        }

        TopicDescription topicDescription = new TopicDescription();

        topicDescription.setInternal(topicDescriptionEntry.getValue().isInternal())
          .setName(topicDescriptionEntry.getKey())
          .setPartitions(partitions);

        topics.put(topicDescriptionEntry.getKey(), topicDescription);
      }

      promise.complete(topics);
    } else {
      promise.fail(ex);
    }
  });
  return promise.future();
}
 
Example #21
Source File: KafkaTopicProvisioner.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
private ConsumerDestination doProvisionConsumerDestination(final String name,
		final String group,
		ExtendedConsumerProperties<KafkaConsumerProperties> properties) {

	if (properties.getExtension().isDestinationIsPattern()) {
		Assert.isTrue(!properties.getExtension().isEnableDlq(),
				"enableDLQ is not allowed when listening to topic patterns");
		if (logger.isDebugEnabled()) {
			logger.debug("Listening to a topic pattern - " + name
					+ " - no provisioning performed");
		}
		return new KafkaConsumerDestination(name);
	}
	KafkaTopicUtils.validateTopicName(name);
	boolean anonymous = !StringUtils.hasText(group);
	Assert.isTrue(!anonymous || !properties.getExtension().isEnableDlq(),
			"DLQ support is not available for anonymous subscriptions");
	if (properties.getInstanceCount() == 0) {
		throw new IllegalArgumentException("Instance count cannot be zero");
	}
	int partitionCount = properties.getInstanceCount() * properties.getConcurrency();
	ConsumerDestination consumerDestination = new KafkaConsumerDestination(name);
	try (AdminClient adminClient = createAdminClient()) {
		createTopic(adminClient, name, partitionCount,
				properties.getExtension().isAutoRebalanceEnabled(),
				properties.getExtension().getTopic());
		if (this.configurationProperties.isAutoCreateTopics()) {
			DescribeTopicsResult describeTopicsResult = adminClient
					.describeTopics(Collections.singletonList(name));
			KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult
					.all();
			try {
				Map<String, TopicDescription> topicDescriptions = all
						.get(this.operationTimeout, TimeUnit.SECONDS);
				TopicDescription topicDescription = topicDescriptions.get(name);
				int partitions = topicDescription.partitions().size();
				consumerDestination = createDlqIfNeedBe(adminClient, name, group,
						properties, anonymous, partitions);
				if (consumerDestination == null) {
					consumerDestination = new KafkaConsumerDestination(name,
							partitions);
				}
			}
			catch (Exception ex) {
				throw new ProvisioningException("provisioning exception", ex);
			}
		}
	}
	return consumerDestination;
}
 
Example #22
Source File: NewConsumerTest.java    From kafka-monitor with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void testConsumerGroupCoordinatorHashing() throws ExecutionException, InterruptedException {
  Properties consumerProperties = new Properties();

  AdminClient adminClient = Mockito.mock(AdminClient.class);

  /*
   * Mock the behavior of AdminClient only.
   */
  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME)))
      .thenReturn(Mockito.mock(DescribeTopicsResult.class));
  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME)).values())
      .thenReturn(Mockito.mock(Map.class));
  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME))
      .values()
      .get(Topic.GROUP_METADATA_TOPIC_NAME)).thenReturn(Mockito.mock(KafkaFutureImpl.class));

  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME))
      .values()
      .get(Topic.GROUP_METADATA_TOPIC_NAME)
      .get()).thenReturn(Mockito.mock(TopicDescription.class));

  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME))
      .values()
      .get(Topic.GROUP_METADATA_TOPIC_NAME)
      .get()
      .partitions()).thenReturn(Mockito.mock(List.class));

  Mockito.when(adminClient.describeTopics(Collections.singleton(Topic.GROUP_METADATA_TOPIC_NAME))
      .values()
      .get(Topic.GROUP_METADATA_TOPIC_NAME)
      .get()
      .partitions()
      .size()).thenReturn(NUM_OFFSETS_TOPIC_PARTITIONS);

  consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG,
      NewConsumer.configureGroupId(TARGET_CONSUMER_GROUP_ID, adminClient));
  System.out.println("Consumer properties after configuration: " + consumerProperties);
  Assert.assertNotNull(consumerProperties.get(ConsumerConfig.GROUP_ID_CONFIG));

  // Testing I: run partitionsFor() on the result to make sure they are the same
  int hashedResult =
      ConsumerGroupCoordinatorUtils.partitionFor(consumerProperties.get(ConsumerConfig.GROUP_ID_CONFIG).toString(),
          NUM_OFFSETS_TOPIC_PARTITIONS);
  int hashedResult2 =
      ConsumerGroupCoordinatorUtils.partitionFor(TARGET_CONSUMER_GROUP_ID, NUM_OFFSETS_TOPIC_PARTITIONS);

  Assert.assertEquals(hashedResult, hashedResult2);
  System.out.println("Modulo result as an absolute value: " + hashedResult);
  System.out.println("Modulo result as an absolute value: " + hashedResult2);

  // Testing II: Also test that the groupIds are different.
  Assert.assertNotEquals(TARGET_CONSUMER_GROUP_ID, consumerProperties.get(ConsumerConfig.GROUP_ID_CONFIG));

}
 
Example #23
Source File: MultiClusterTopicManagementServiceTest.java    From kafka-monitor with Apache License 2.0 4 votes vote down vote up
@Test
protected void MultiClusterTopicManagementServiceTopicCreationTest() throws Exception {

  Mockito.doCallRealMethod().when(_topicManagementHelper).maybeCreateTopic();

  Mockito.when(_topicManagementHelper._adminClient.describeCluster())
      .thenReturn(Mockito.mock(DescribeClusterResult.class));
  Mockito.when(_topicManagementHelper._adminClient.describeCluster().nodes())
      .thenReturn(Mockito.mock(KafkaFuture.class));
  Mockito.when(_topicManagementHelper._adminClient.describeCluster().nodes().get()).thenReturn(nodeSet);

  Mockito.when(_topicManagementHelper._adminClient.createTopics(Mockito.anyCollection()))
      .thenReturn(_createTopicsResult);
  Mockito.when(_topicManagementHelper._adminClient.createTopics(Mockito.anyCollection()).values())
      .thenReturn(_kafkaFutureMap);
  Mockito.when(
      _topicManagementHelper._adminClient.createTopics(Mockito.anyCollection()).values().get(SERVICE_TEST_TOPIC))
      .thenReturn(_kafkaFuture);

  Answer<Object> createKafkaTopicFutureAnswer = new Answer<Object>() {
    /**
     * @param invocation the invocation on the mocked TopicManagementHelper.
     * @return NULL value.
     * @throws Throwable the throwable to be thrown when Exception occurs.
     */
    @Override
    public Void answer(InvocationOnMock invocation) throws Throwable {

      Mockito.when(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC)))
          .thenReturn(Mockito.mock(DescribeTopicsResult.class));
      Mockito.when(
          _topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC)).values())
          .thenReturn(Mockito.mock(Map.class));
      Mockito.when(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC))
          .values()
          .get(SERVICE_TEST_TOPIC)).thenReturn(Mockito.mock(KafkaFuture.class));
      Mockito.when(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC))
          .values()
          .get(SERVICE_TEST_TOPIC)
          .get()).thenReturn(Mockito.mock(TopicDescription.class));
      Mockito.when(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC))
          .values()
          .get(SERVICE_TEST_TOPIC)
          .get()
          .name()).thenReturn(SERVICE_TEST_TOPIC);
      return null;
    }
  };

  Mockito.when(_topicManagementHelper._topicFactory.createTopicIfNotExist(Mockito.anyString(), Mockito.anyShort(),
      Mockito.anyDouble(), Mockito.any(), Mockito.any())).thenAnswer(createKafkaTopicFutureAnswer);

  _topicManagementHelper.maybeCreateTopic();

  Assert.assertNotNull(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC))
      .values()
      .get(SERVICE_TEST_TOPIC)
      .get());
  Assert.assertEquals(_topicManagementHelper._adminClient.describeTopics(Collections.singleton(SERVICE_TEST_TOPIC))
      .values()
      .get(SERVICE_TEST_TOPIC)
      .get()
      .name(), SERVICE_TEST_TOPIC);
}