org.apache.kafka.clients.admin.DescribeConfigsResult Java Examples

The following examples show how to use org.apache.kafka.clients.admin.DescribeConfigsResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CruiseControlMetricsReporter.java    From cruise-control with BSD 2-Clause "Simplified" License 6 votes vote down vote up
protected void maybeUpdateTopicConfig() {
  try {
    // Retrieve topic config to check and update.
    ConfigResource topicResource = new ConfigResource(ConfigResource.Type.TOPIC, _cruiseControlMetricsTopic);
    DescribeConfigsResult describeConfigsResult = _adminClient.describeConfigs(Collections.singleton(topicResource));
    Config topicConfig = describeConfigsResult.values().get(topicResource).get(CLIENT_REQUEST_TIMEOUT_MS, TimeUnit.MILLISECONDS);
    Set<AlterConfigOp> alterConfigOps = new HashSet<>(2);
    Map<String, String> configsToSet = new HashMap<>(2);
    configsToSet.put(LogConfig.RetentionMsProp(), _metricsTopic.configs().get(LogConfig.RetentionMsProp()));
    configsToSet.put(LogConfig.CleanupPolicyProp(), _metricsTopic.configs().get(LogConfig.CleanupPolicyProp()));
    maybeUpdateConfig(alterConfigOps, configsToSet, topicConfig);
    if (!alterConfigOps.isEmpty()) {
      AlterConfigsResult alterConfigsResult = _adminClient.incrementalAlterConfigs(Collections.singletonMap(topicResource, alterConfigOps));
      alterConfigsResult.values().get(topicResource).get(CLIENT_REQUEST_TIMEOUT_MS, TimeUnit.MILLISECONDS);
    }
  } catch (InterruptedException | ExecutionException | TimeoutException e) {
    LOG.warn("Unable to update config of Cruise Cruise Control metrics topic {}", _cruiseControlMetricsTopic, e);
  }
}
 
Example #2
Source File: TopicServiceImplTest.java    From kafka-helmsman with MIT License 5 votes vote down vote up
@Test
public void testListExisting() {
  Cluster cluster = createCluster(1);
  TopicPartitionInfo tp = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList());
  ConfigEntry configEntry = new ConfigEntry("k", "v");
  KafkaFuture<Config> kfc = KafkaFuture.completedFuture(new Config(Collections.singletonList(configEntry)));
  Set<String> topicNames = new HashSet<>(Arrays.asList("a", "b", "_c"));
  Map<String, TopicDescription> tds = new HashMap<String, TopicDescription>() {
    {
      put("a", new TopicDescription("a", false, Collections.singletonList(tp)));
      put("b", new TopicDescription("b", false, Collections.singletonList(tp)));
      put("c", new TopicDescription("_c", false, Collections.singletonList(tp)));
    }
  };
  Map<ConfigResource, KafkaFuture<Config>> configs = new HashMap<ConfigResource, KafkaFuture<Config>>() {
    {
      put(new ConfigResource(TOPIC, "a"), kfc);
      put(new ConfigResource(TOPIC, "b"), kfc);
      put(new ConfigResource(TOPIC, "_c"), kfc);
    }
  };

  TopicService service = new TopicServiceImpl(adminClient, true);
  ListTopicsResult listTopicsResult = mock(ListTopicsResult.class);
  DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
  DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);

  when(describeTopicsResult.all()).thenReturn(KafkaFuture.completedFuture(tds));
  when(listTopicsResult.names()).thenReturn(KafkaFuture.completedFuture(topicNames));
  when(describeConfigsResult.values()).thenReturn(configs);
  when(adminClient.listTopics(any(ListTopicsOptions.class))).thenReturn(listTopicsResult);
  when(adminClient.describeTopics(topicNames)).thenReturn(describeTopicsResult);
  when(adminClient.describeConfigs(any(Collection.class))).thenReturn(describeConfigsResult);

  Map<String, ConfiguredTopic> actual = service.listExisting(true);
  Assert.assertEquals(2, actual.size());
  Assert.assertEquals(new HashSet<>(Arrays.asList("a", "b")), actual.keySet());
}
 
Example #3
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private DescribeConfigsResult describeBrokerResult() {
  DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);
  ConfigEntry configEntryDeleteEnable = new ConfigEntry("delete.topic.enable", "true");
  List<ConfigEntry> configEntries = new ArrayList<>();
  configEntries.add(configEntryDeleteEnable);
  Map<ConfigResource, Config> config = ImmutableMap.of(
      new ConfigResource(ConfigResource.Type.BROKER, node.idString()), new Config(configEntries));
  expect(describeConfigsResult.all()).andReturn(KafkaFuture.completedFuture(config));
  replay(describeConfigsResult);
  return describeConfigsResult;
}
 
Example #4
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private static DescribeConfigsResult topicConfigResponse(final String topicName,
                                                         final ConfigEntry... entries) {

  final Map<ConfigResource, Config> config = ImmutableMap.of(
      new ConfigResource(ConfigResource.Type.TOPIC, topicName),
      new Config(Arrays.asList(entries)));

  final DescribeConfigsResult response = mock(DescribeConfigsResult.class);
  expect(response.all()).andReturn(KafkaFuture.completedFuture(config));
  replay(response);
  return response;
}
 
Example #5
Source File: TopicAdmin.java    From kafka-message-tool with MIT License 5 votes vote down vote up
public Set<ConfigEntry> getConfigEntriesForTopic(String topicName) {
    final ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    final DescribeConfigsResult topicConfiEntries = kafkaClientsAdminClient.describeConfigs(Collections.singleton(configResource));
    try {
        final Config config = topicConfiEntries.all().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS, TimeUnit.MILLISECONDS).get(configResource);
        final Collection<ConfigEntry> entries = config.entries();
        Logger.debug(String.format("Config entries for topic '%s' : %n%s", topicName, AppUtils.configEntriesToPrettyString(entries)));
        return new HashSet<>(entries);
    } catch (Exception e) {
        Logger.error(String.format("Could not retrieve config resource for topic '%s'", topicName), e);
    }
    return Collections.emptySet();
}
 
Example #6
Source File: DefaultKafkaClusterProxy.java    From kafka-message-tool with MIT License 5 votes vote down vote up
private void describeNodeConfig(int controllerNodeId, Node node) throws InterruptedException, ExecutionException {
    if (!doesNodeSupportDescribeConfigApi(node)) {
        Logger.warn(String.format("Node '%s' does not support describeConfig api. Cannot show cluster properties", node));
        return;
    }

    DescribeConfigsResult configs = kafkaClientsAdminClient.describeConfigs(
        singleton(new ConfigResource(ConfigResource.Type.BROKER, String.valueOf(node.id()))));
    final Map<ConfigResource, Config> configResourceConfigMap = configs.all().get();
    configResourceConfigMap.forEach((configResource, config) ->
                                        clusterSummary.addNodeInfo(new ClusterNodeInfo(node.id() == controllerNodeId,
                                                                                       node.idString(),
                                                                                       new HashSet<>(config.entries()))));
}
 
Example #7
Source File: SamplingUtilsTest.java    From cruise-control with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test
public void testMaybeUpdateTopicConfig() throws InterruptedException, ExecutionException, TimeoutException {
  AdminClient adminClient = EasyMock.createMock(AdminClient.class);
  DescribeConfigsResult describeConfigsResult = EasyMock.createMock(DescribeConfigsResult.class);
  KafkaFuture<Config> describedConfigsFuture = EasyMock.createMock(KafkaFuture.class);
  Config topicConfig = EasyMock.createMock(Config.class);
  AlterConfigsResult alterConfigsResult = EasyMock.createMock(AlterConfigsResult.class);
  Set<AlterConfigOp> alterConfigOps = Collections.singleton(new AlterConfigOp(
      new ConfigEntry(RetentionMsProp(), Long.toString(MOCK_DESIRED_RETENTION_MS)), AlterConfigOp.OpType.SET));
  Map<ConfigResource, KafkaFuture<Config>> describeConfigsValues = Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                            describedConfigsFuture);
  Map<ConfigResource, KafkaFuture<Void>> alterConfigsValues = Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                       EasyMock.createMock(KafkaFuture.class));

  NewTopic topicToUpdateConfigs = SamplingUtils.wrapTopic(MOCK_TOPIC, MOCK_PARTITION_COUNT, MOCK_REPLICATION_FACTOR, MOCK_DESIRED_RETENTION_MS);
  EasyMock.expect(adminClient.describeConfigs(EasyMock.eq(Collections.singleton(MOCK_TOPIC_RESOURCE)))).andReturn(describeConfigsResult);
  EasyMock.expect(describeConfigsResult.values()).andReturn(describeConfigsValues);
  EasyMock.expect(describedConfigsFuture.get(CLIENT_REQUEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)).andReturn(topicConfig);
  EasyMock.expect(topicConfig.get(EasyMock.eq(CleanupPolicyProp()))).andReturn(new ConfigEntry(CleanupPolicyProp(),
                                                                                               DEFAULT_CLEANUP_POLICY));
  EasyMock.expect(topicConfig.get(EasyMock.eq(RetentionMsProp()))).andReturn(new ConfigEntry(RetentionMsProp(),
                                                                                             MOCK_CURRENT_RETENTION_MS));
  EasyMock.expect(adminClient.incrementalAlterConfigs(EasyMock.eq(Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                           alterConfigOps))))
          .andReturn(alterConfigsResult);
  EasyMock.expect(alterConfigsResult.values()).andReturn(alterConfigsValues);
  EasyMock.replay(adminClient, describeConfigsResult, describedConfigsFuture, topicConfig, alterConfigsResult);


  boolean updateTopicConfig = SamplingUtils.maybeUpdateTopicConfig(adminClient, topicToUpdateConfigs);
  EasyMock.verify(adminClient, describeConfigsResult, describedConfigsFuture, topicConfig, alterConfigsResult);
  assertTrue(updateTopicConfig);
}
 
Example #8
Source File: TopicEnsure.java    From common-docker with Apache License 2.0 5 votes vote down vote up
public boolean validateTopic(TopicSpec spec, int timeOut) throws Exception {
  // Describe topic.
  DescribeTopicsResult topicDescribeResult = adminClient.describeTopics(
      Collections.singletonList(spec.name()), new DescribeTopicsOptions().timeoutMs(timeOut)
  );
  TopicDescription topic = topicDescribeResult.all().get().get(spec.name());

  // Get topic config.
  ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, spec.name());
  DescribeConfigsResult configResult = adminClient.describeConfigs(
      Collections.singletonList(configResource)
  );
  Map<ConfigResource, Config> resultMap = configResult.all().get();
  Config config = resultMap.get(configResource);

  // Create actual TopicSpec.
  Map<String, String> actualConfig = new HashMap<>();
  for (Map.Entry<String, String> entry : spec.config().entrySet()) {
    ConfigEntry actualConfigEntry = config.get(entry.getKey());
    if (actualConfigEntry != null) {
      actualConfig.put(entry.getKey(), actualConfigEntry.value());
    }
  }

  TopicSpec actualSpec = new TopicSpec(
      topic.name(), topic.partitions().size(),
      topic.partitions().get(0).replicas().size(), actualConfig
  );

  boolean isTopicValid = actualSpec.equals(spec);
  if (!isTopicValid) {
    System.err.printf(
        "Invalid topic [ %s ] ! Expected %s but got %s\n", spec.name(), spec, actualSpec
    );
  }

  return isTopicValid;
}
 
Example #9
Source File: KafkaAvailabilityTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
void mockDescribeConfigs(Admin mockAc) {
    when(mockAc.describeConfigs(any())).thenAnswer(invocation -> {
        Collection<ConfigResource> argument = invocation.getArgument(0);
        DescribeConfigsResult dcr = mock(DescribeConfigsResult.class);
        Throwable throwable = null;
        for (ConfigResource configResource : argument) {
            throwable = describeConfigsResult.get(configResource);
            if (throwable != null) {
                break;
            }
        }
        when(dcr.values()).thenThrow(notImplemented());
        if (throwable != null) {
            when(dcr.all()).thenReturn(failedFuture(throwable));
        } else {
            Map<ConfigResource, Config> result = new HashMap<>();
            for (ConfigResource cr : argument) {
                List<ConfigEntry> entries = new ArrayList<>();
                for (Map.Entry<String, String> e : topics.get(cr.name()).configs.entrySet()) {
                    ConfigEntry ce = new ConfigEntry(e.getKey(), e.getValue());
                    entries.add(ce);
                }
                result.put(cr, new Config(entries));
            }
            when(dcr.all()).thenReturn(KafkaFuture.completedFuture(result));
        }
        return dcr;
    });
}
 
Example #10
Source File: KafkaImplTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
private void mockDescribeConfigs(Admin admin, Map<ConfigResource, Either<Config, Exception>> result) {
    DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);
    when(describeConfigsResult.values()).thenReturn(result.entrySet().stream().collect(toMap(
        entry -> entry.getKey(),
        entry -> {
            KafkaFutureImpl<Config> kafkaFuture = new KafkaFutureImpl<>();
            if (entry.getValue().isLeft()) {
                kafkaFuture.complete(entry.getValue().left());
            } else {
                kafkaFuture.completeExceptionally(entry.getValue().right());
            }
            return kafkaFuture;
        })));
    when(admin.describeConfigs(result.keySet())).thenReturn(describeConfigsResult);
}
 
Example #11
Source File: KafkaHealthIndicator.java    From micronaut-kafka with Apache License 2.0 4 votes vote down vote up
@Override
public Flowable<HealthResult> getResult() {
    DescribeClusterResult result = adminClient.describeCluster(
            new DescribeClusterOptions().timeoutMs(
                    (int) defaultConfiguration.getHealthTimeout().toMillis()
            )
    );

    Flowable<String> clusterId = Flowable.fromFuture(result.clusterId());
    Flowable<Collection<Node>> nodes = Flowable.fromFuture(result.nodes());
    Flowable<Node> controller = Flowable.fromFuture(result.controller());

    return controller.switchMap(node -> {
        String brokerId = node.idString();
        ConfigResource configResource = new ConfigResource(ConfigResource.Type.BROKER, brokerId);
        DescribeConfigsResult configResult = adminClient.describeConfigs(Collections.singletonList(configResource));
        Flowable<Map<ConfigResource, Config>> configs = Flowable.fromFuture(configResult.all());
        return configs.switchMap(resources -> {
            Config config = resources.get(configResource);
            ConfigEntry ce = config.get(REPLICATION_PROPERTY);
            int replicationFactor = Integer.parseInt(ce.value());
            return nodes.switchMap(nodeList -> clusterId.map(clusterIdString -> {
                int nodeCount = nodeList.size();
                HealthResult.Builder builder;
                if (nodeCount >= replicationFactor) {
                    builder = HealthResult.builder(ID, HealthStatus.UP);
                } else {
                    builder = HealthResult.builder(ID, HealthStatus.DOWN);
                }
                return builder
                        .details(CollectionUtils.mapOf(
                                "brokerId", brokerId,
                                "clusterId", clusterIdString,
                                "nodes", nodeCount
                        )).build();
            }));
        });
    }).onErrorReturn(throwable ->
            HealthResult.builder(ID, HealthStatus.DOWN)
                    .exception(throwable).build()
    );
}
 
Example #12
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
private static DescribeConfigsResult topicConfigResponse(final Exception cause) {
  final DescribeConfigsResult response = mock(DescribeConfigsResult.class);
  expect(response.all()).andReturn(failedFuture(cause));
  replay(response);
  return response;
}