org.apache.kafka.clients.admin.ConfigEntry Java Examples

The following examples show how to use org.apache.kafka.clients.admin.ConfigEntry. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RetentionByTopicFunctionTest.java    From data-highway with Apache License 2.0 8 votes vote down vote up
@Test
public void typical() throws Exception {
  String topic = "topic";
  Collection<String> topics = singleton(topic);
  ConfigResource configResource = new ConfigResource(TOPIC, topic);
  Config config = new Config(singleton(new ConfigEntry("retention.ms", "1")));
  KafkaFuture<Map<ConfigResource, Config>> kafkaFuture = completedFuture(singletonMap(configResource, config));

  doReturn(describeConfigsResult).when(adminClient).describeConfigs(any());
  doReturn(kafkaFuture).when(describeConfigsResult).all();

  Map<String, Duration> result = underTest.apply(topics);

  assertThat(result.size(), is(1));
  Duration retention = result.get(topic);
  assertThat(retention, is(Duration.ofMillis(1)));
}
 
Example #2
Source File: Utils.java    From strimzi-kafka-operator with Apache License 2.0 6 votes vote down vote up
public static TopicMetadata getTopicMetadata(Topic kubeTopic) {
    List<Node> nodes = new ArrayList<>();
    for (int nodeId = 0; nodeId < kubeTopic.getNumReplicas(); nodeId++) {
        nodes.add(new Node(nodeId, "localhost", 9092 + nodeId));
    }
    List<TopicPartitionInfo> partitions = new ArrayList<>();
    for (int partitionId = 0; partitionId < kubeTopic.getNumPartitions(); partitionId++) {
        partitions.add(new TopicPartitionInfo(partitionId, nodes.get(0), nodes, nodes));
    }
    List<ConfigEntry> configs = new ArrayList<>();
    for (Map.Entry<String, String> entry: kubeTopic.getConfig().entrySet()) {
        configs.add(new ConfigEntry(entry.getKey(), entry.getValue()));
    }

    return new TopicMetadata(new TopicDescription(kubeTopic.getTopicName().toString(), false,
            partitions), new Config(configs));
}
 
Example #3
Source File: ClusterStateSummary.java    From kafka-message-tool with MIT License 6 votes vote down vote up
public String getTopicPropertyByName(String topicName, String propertyName) {
    final Optional<ClusterTopicInfo> found = topicsInfo.stream().filter(e -> e.getTopicName().equals(topicName)).findFirst();
    if (!found.isPresent()) {
        throw new RuntimeException(String.format("Topic with name '%s' not found", topicName));
    }
    final ClusterTopicInfo clusterTopicInfo = found.get();
    final Optional<ConfigEntry> propertyFound = clusterTopicInfo
        .getConfigEntries()
        .stream()
        .filter(e -> e.name().equalsIgnoreCase(propertyName)).findFirst();
    if (!propertyFound.isPresent()) {
        throw new RuntimeException(String.format("Could not find property '%s' for topic '%s' ", propertyName, topicName));
    }
    return propertyFound.get().value();

}
 
Example #4
Source File: BrokerConfigView.java    From kafka-message-tool with MIT License 6 votes vote down vote up
private void showTopicConfigPropertiesWindow(KafkaClusterProxy kafkaClusterProxy,
                                             String topicName) {

    final Set<ConfigEntry> topicProperties = kafkaClusterProxy.getTopicProperties(topicName);
    try {
        ConfigEntriesView entriesView = new ConfigEntriesView("Topic properties", topicProperties, topicPropertiesViewPreferences);
        final TopicPropertiesWindow topicPropertiesWindow = TopicPropertiesWindow.get(topicName,
                                                                                      entriesView,
                                                                                      kafkaClusterProxy.getTopicOffsetsInfo());
        topicPropertiesWindow.show();


    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
Example #5
Source File: KafkaAdminClient.java    From common-kafka with Apache License 2.0 6 votes vote down vote up
/**
 * Updates the given topic's config with the {@link Properties} provided. This is not additive but a full
 * replacement
 *
 * @param topic
 *      the topic to update config for
 * @param properties
 *      the properties to assign to the topic
 * @throws IllegalArgumentException
 *      if topic is null, empty or blank, or properties is {@code null}
 * @throws AdminOperationException
 *      if there is an issue updating the topic config
 */
public void updateTopicConfig(String topic, Properties properties) {
    if (StringUtils.isBlank(topic))
        throw new IllegalArgumentException("topic cannot be null, empty or blank");
    if (properties == null)
        throw new IllegalArgumentException("properties cannot be null");

    LOG.debug("Updating topic config for topic [{}] with config [{}]", topic, properties);

    try {
        List<ConfigEntry> configEntries = new ArrayList<>();
        for (String property : properties.stringPropertyNames()) {
            configEntries.add(new ConfigEntry(property, properties.getProperty(property)));
        }

        getNewAdminClient()
            .alterConfigs(
                Collections.singletonMap(
                    new ConfigResource(ConfigResource.Type.TOPIC, topic),
                    new Config(configEntries)))
            .all()
            .get(operationTimeout, TimeUnit.MILLISECONDS);
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        throw new AdminOperationException("Unable to update configuration for topic: " + topic, e);
    }
}
 
Example #6
Source File: KafkaMetricsServiceImpl.java    From kafka-eagle with Apache License 2.0 6 votes vote down vote up
private String deleteTopicConfig(String clusterAlias, AdminClient adminClient, String topic, ConfigEntry configEntry) {
	try {
		String describeTopicConfigs = describeTopicConfig(clusterAlias, topic);
		JSONObject object = JSON.parseObject(describeTopicConfigs).getJSONObject("config");
		object.remove(configEntry.name());
		List<ConfigEntry> configEntrys = new ArrayList<>();
		for (String key : KConstants.Topic.getTopicConfigKeys()) {
			if (object.containsKey(key)) {
				configEntrys.add(new ConfigEntry(key, object.getString(key)));
			}
		}
		Map<ConfigResource, Config> configs = new HashMap<>();
		ConfigResource configRes = new ConfigResource(Type.TOPIC, topic);
		Config config = new Config(configEntrys);
		configs.put(configRes, config);
		adminClient.alterConfigs(configs);
		return KConstants.Topic.SUCCESS;
	} catch (Exception e) {
		e.printStackTrace();
		LOG.error("Delete topic[" + topic + "] config has error, msg is " + e.getMessage());
		return e.getMessage();
	}
}
 
Example #7
Source File: TopicServiceImpl.java    From kafka-helmsman with MIT License 6 votes vote down vote up
/**
 * Transform a TopicDescription instance to ConfiguredTopic instance.
 *
 * @param td  an instance of TopicDescription
 * @param ktc a topic config future
 * @return an instance of ConfiguredTopic
 */
static ConfiguredTopic configuredTopic(TopicDescription td, KafkaFuture<Config> ktc) {
  int partitions = td.partitions().size();
  short replication = (short) td.partitions().iterator().next().replicas().size();
  try {
    Config tc = ktc.get();
    Map<String, String> configMap = tc
        .entries()
        .stream()
        .filter(TopicServiceImpl::isNonDefault)
        .collect(toMap(ConfigEntry::name, ConfigEntry::value));
    return new ConfiguredTopic(td.name(), partitions, replication, configMap);
  } catch (InterruptedException | ExecutionException e) {
    // TODO: FA-10109: Improve exception handling
    throw new RuntimeException(e);
  }
}
 
Example #8
Source File: TopicSerialization.java    From strimzi-kafka-operator with Apache License 2.0 6 votes vote down vote up
/**
 * Create a Topic to reflect the given TopicMetadata.
 */
public static Topic fromTopicMetadata(TopicMetadata meta) {
    if (meta == null) {
        return null;
    }
    Topic.Builder builder = new Topic.Builder()
            .withTopicName(meta.getDescription().name())
            .withNumPartitions(meta.getDescription().partitions().size())
            .withNumReplicas((short) meta.getDescription().partitions().get(0).replicas().size())
            .withMetadata(null);
    for (ConfigEntry entry: meta.getConfig().entries()) {
        if (entry.source() != ConfigEntry.ConfigSource.DEFAULT_CONFIG
            && entry.source() != ConfigEntry.ConfigSource.STATIC_BROKER_CONFIG) {
            builder.withConfigEntry(entry.name(), entry.value());
        }
    }
    return builder.build();
}
 
Example #9
Source File: TopicOperatorBaseIT.java    From strimzi-kafka-operator with Apache License 2.0 6 votes vote down vote up
protected String alterTopicConfigInKafka(String topicName, String key, Function<String, String> mutator) throws InterruptedException, ExecutionException {
    // Get the topic config
    ConfigResource configResource = topicConfigResource(topicName);
    org.apache.kafka.clients.admin.Config config = getTopicConfig(configResource);

    Map<String, ConfigEntry> m = new HashMap<>();
    for (ConfigEntry entry: config.entries()) {
        if (entry.name().equals(key)
            || entry.source() != ConfigEntry.ConfigSource.DEFAULT_CONFIG
                && entry.source() != ConfigEntry.ConfigSource.STATIC_BROKER_CONFIG) {
            m.put(entry.name(), entry);
        }
    }
    final String changedValue = mutator.apply(m.get(key).value());
    m.put(key, new ConfigEntry(key, changedValue));
    LOGGER.info("Changing topic config {} to {}", key, changedValue);

    // Update the topic config
    AlterConfigsResult cgf = adminClient.alterConfigs(singletonMap(configResource,
            new org.apache.kafka.clients.admin.Config(m.values())));
    cgf.all().get();
    return changedValue;
}
 
Example #10
Source File: SamplingUtilsTest.java    From cruise-control with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test
public void testMaybeUpdateTopicConfig() throws InterruptedException, ExecutionException, TimeoutException {
  AdminClient adminClient = EasyMock.createMock(AdminClient.class);
  DescribeConfigsResult describeConfigsResult = EasyMock.createMock(DescribeConfigsResult.class);
  KafkaFuture<Config> describedConfigsFuture = EasyMock.createMock(KafkaFuture.class);
  Config topicConfig = EasyMock.createMock(Config.class);
  AlterConfigsResult alterConfigsResult = EasyMock.createMock(AlterConfigsResult.class);
  Set<AlterConfigOp> alterConfigOps = Collections.singleton(new AlterConfigOp(
      new ConfigEntry(RetentionMsProp(), Long.toString(MOCK_DESIRED_RETENTION_MS)), AlterConfigOp.OpType.SET));
  Map<ConfigResource, KafkaFuture<Config>> describeConfigsValues = Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                            describedConfigsFuture);
  Map<ConfigResource, KafkaFuture<Void>> alterConfigsValues = Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                       EasyMock.createMock(KafkaFuture.class));

  NewTopic topicToUpdateConfigs = SamplingUtils.wrapTopic(MOCK_TOPIC, MOCK_PARTITION_COUNT, MOCK_REPLICATION_FACTOR, MOCK_DESIRED_RETENTION_MS);
  EasyMock.expect(adminClient.describeConfigs(EasyMock.eq(Collections.singleton(MOCK_TOPIC_RESOURCE)))).andReturn(describeConfigsResult);
  EasyMock.expect(describeConfigsResult.values()).andReturn(describeConfigsValues);
  EasyMock.expect(describedConfigsFuture.get(CLIENT_REQUEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)).andReturn(topicConfig);
  EasyMock.expect(topicConfig.get(EasyMock.eq(CleanupPolicyProp()))).andReturn(new ConfigEntry(CleanupPolicyProp(),
                                                                                               DEFAULT_CLEANUP_POLICY));
  EasyMock.expect(topicConfig.get(EasyMock.eq(RetentionMsProp()))).andReturn(new ConfigEntry(RetentionMsProp(),
                                                                                             MOCK_CURRENT_RETENTION_MS));
  EasyMock.expect(adminClient.incrementalAlterConfigs(EasyMock.eq(Collections.singletonMap(MOCK_TOPIC_RESOURCE,
                                                                                           alterConfigOps))))
          .andReturn(alterConfigsResult);
  EasyMock.expect(alterConfigsResult.values()).andReturn(alterConfigsValues);
  EasyMock.replay(adminClient, describeConfigsResult, describedConfigsFuture, topicConfig, alterConfigsResult);


  boolean updateTopicConfig = SamplingUtils.maybeUpdateTopicConfig(adminClient, topicToUpdateConfigs);
  EasyMock.verify(adminClient, describeConfigsResult, describedConfigsFuture, topicConfig, alterConfigsResult);
  assertTrue(updateTopicConfig);
}
 
Example #11
Source File: TopicAdmin.java    From kafka-message-tool with MIT License 5 votes vote down vote up
public Set<ConfigEntry> getConfigEntriesForTopic(String topicName) {
    final ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    final DescribeConfigsResult topicConfiEntries = kafkaClientsAdminClient.describeConfigs(Collections.singleton(configResource));
    try {
        final Config config = topicConfiEntries.all().get(ApplicationConstants.FUTURE_GET_TIMEOUT_MS, TimeUnit.MILLISECONDS).get(configResource);
        final Collection<ConfigEntry> entries = config.entries();
        Logger.debug(String.format("Config entries for topic '%s' : %n%s", topicName, AppUtils.configEntriesToPrettyString(entries)));
        return new HashSet<>(entries);
    } catch (Exception e) {
        Logger.error(String.format("Could not retrieve config resource for topic '%s'", topicName), e);
    }
    return Collections.emptySet();
}
 
Example #12
Source File: ClusterStateSummary.java    From kafka-message-tool with MIT License 5 votes vote down vote up
public Set<ConfigEntry> getTopicProperties(String topicName) {
    // just get first topicsInfo for first node,
    // it should be the same on rest of nodes any way
    for (ClusterTopicInfo clusterTopicInfo : topicsInfo) {
        if (clusterTopicInfo.getTopicName().equals(topicName)) {
            return clusterTopicInfo.getConfigEntries();
        }
    }
    return Collections.emptySet();
}
 
Example #13
Source File: DefaultKafkaClusterProxy.java    From kafka-message-tool with MIT License 5 votes vote down vote up
@Override
public void updateTopic(TopicAlterableProperties topicDetails) {
    Map<ConfigResource, Config> configs = new HashMap<>();

    final ArrayList<ConfigEntry> configEntries = new ArrayList<>();

    configEntries.add(new ConfigEntry(TopicConfig.RETENTION_MS_CONFIG,
                                      String.valueOf(topicDetails.getRetentionMilliseconds())));

    final Config config = new Config(configEntries);
    configs.put(new ConfigResource(ConfigResource.Type.TOPIC, topicDetails.getTopicName()), config);
    kafkaClientsAdminClient.alterConfigs(configs);
}
 
Example #14
Source File: KafkaAdminClient.java    From common-kafka with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the {@link Properties} associated to the topic
 *
 * @param topic
 *      a Kafka topic
 * @return the {@link Properties} associated to the topic
 * @throws IllegalArgumentException
 *      if topic is null, empty or blank
 * @throws AdminOperationException
 *      if there is an issue reading the topic config
 */
public Properties getTopicConfig(String topic) {
    if (StringUtils.isBlank(topic))
        throw new IllegalArgumentException("topic cannot be null, empty or blank");

    LOG.debug("Fetching topic config for topic [{}]", topic);

    try {
        ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topic);
        Map<ConfigResource, Config> configs = getNewAdminClient()
            .describeConfigs(Collections.singleton(resource))
            .all()
            .get(operationTimeout, TimeUnit.MILLISECONDS);
        Config config = configs.get(resource);
        if (config == null) {
            throw new AdminOperationException("Unable to get topic config: " + topic);
        }

        Properties properties = new Properties();
        config.entries().stream()
            // We are only interested in any overrides that are set
            .filter(configEntry -> configEntry.source() == ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG)
            .forEach(configEntry -> properties.setProperty(configEntry.name(), configEntry.value()));
        return properties;
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        throw new AdminOperationException("Unable to retrieve configuration for topic: " + topic, e);
    }
}
 
Example #15
Source File: TopicSerializationTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
@Test
public void testFromTopicMetadata() {
    List<ConfigEntry> entries = new ArrayList<>();
    entries.add(new ConfigEntry("foo", "bar"));
    Config topicConfig = new Config(entries);
    TopicMetadata meta = Utils.getTopicMetadata("test-topic", topicConfig);
    Topic topic = TopicSerialization.fromTopicMetadata(meta);
    assertThat(topic.getTopicName(), is(new TopicName("test-topic")));
    // Null map name because Kafka doesn't know about the map
    assertThat(topic.getResourceName(), is(nullValue()));
    assertThat(topic.getConfig(), is(singletonMap("foo", "bar")));
    assertThat(topic.getNumPartitions(), is(2));
    assertThat(topic.getNumReplicas(), is((short) 3));
}
 
Example #16
Source File: SamplingUtils.java    From cruise-control with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/**
 * Add config altering operations to the given configs to alter for configs that differ between current and desired.
 *
 * @param configsToAlter A set of config altering operations to be populated.
 * @param desiredConfig Desired config value by name.
 * @param currentConfig Current config.
 */
private static void maybeUpdateConfig(Set<AlterConfigOp> configsToAlter, Map<String, String> desiredConfig, Config currentConfig) {
  for (Map.Entry<String, String> entry : desiredConfig.entrySet()) {
    String configName = entry.getKey();
    String targetConfigValue = entry.getValue();
    ConfigEntry currentConfigEntry = currentConfig.get(configName);
    if (currentConfigEntry == null || !currentConfigEntry.value().equals(targetConfigValue)) {
      configsToAlter.add(new AlterConfigOp(new ConfigEntry(configName, targetConfigValue), AlterConfigOp.OpType.SET));
    }
  }
}
 
Example #17
Source File: TopicSerialization.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
/**
 * Return a singleton map from the topic {@link ConfigResource} for the given topic,
 * to the {@link Config} of the given topic.
 * @return
 */
public static Map<ConfigResource, Config> toTopicConfig(Topic topic) {
    List<ConfigEntry> entries = new ArrayList<>(topic.getConfig().size());

    for (Map.Entry<String, String> entry : topic.getConfig().entrySet()) {
        ConfigEntry configEntry = new ConfigEntry(entry.getKey(), entry.getValue());
        entries.add(configEntry);
    }

    return Collections.singletonMap(
            new ConfigResource(ConfigResource.Type.TOPIC, topic.getTopicName().toString()),
            new Config(entries));
}
 
Example #18
Source File: CruiseControlMetricsUtils.java    From cruise-control with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/**
 * Create a config altering operation if config's current value does not equal to target value.
 * @param configsToAlter Set of config altering operations to be applied.
 * @param configsToSet Configs to set.
 * @param currentConfig Current value of the config.
 */
public static void maybeUpdateConfig(Set<AlterConfigOp> configsToAlter,
                                     Map<String, String> configsToSet,
                                     Config currentConfig) {
  for (Map.Entry<String, String> entry : configsToSet.entrySet()) {
    String configName = entry.getKey();
    String targetConfigValue = entry.getValue();
    if (currentConfig.get(configName) == null || !currentConfig.get(configName).value().equals(targetConfigValue)) {
      configsToAlter.add(new AlterConfigOp(new ConfigEntry(configName, targetConfigValue), AlterConfigOp.OpType.SET));
    }
  }
}
 
Example #19
Source File: KafkaMetricsServiceImpl.java    From kafka-eagle with Apache License 2.0 5 votes vote down vote up
/** Alter topic config. */
public String changeTopicConfig(String clusterAlias, String topic, String type, ConfigEntry configEntry) {
	JSONObject object = new JSONObject();
	Properties prop = new Properties();
	prop.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, parseBrokerServer(clusterAlias));
	if (SystemConfigUtils.getBooleanProperty(clusterAlias + ".kafka.eagle.sasl.enable")) {
		kafkaService.sasl(prop, clusterAlias);
	}
	if (SystemConfigUtils.getBooleanProperty(clusterAlias + ".kafka.eagle.ssl.enable")) {
		kafkaService.ssl(prop, clusterAlias);
	}
	try {
		switch (type) {
		case Topic.ADD:
			AdminClient adminClientAdd = AdminClient.create(prop);
			object.put("type", type);
			object.put("value", addTopicConfig(clusterAlias, adminClientAdd, topic, configEntry));
			adminClientAdd.close();
			break;
		case Topic.DELETE:
			AdminClient adminClientDelete = AdminClient.create(prop);
			object.put("type", type);
			object.put("value", deleteTopicConfig(clusterAlias, adminClientDelete, topic, configEntry));
			adminClientDelete.close();
			break;
		case Topic.DESCRIBE:
			object.put("type", type);
			object.put("value", describeTopicConfig(clusterAlias, topic));
			break;
		default:
			break;
		}

	} catch (Exception e) {
		e.printStackTrace();
		LOG.error("Type[" + type + "] topic config has error, msg is " + e.getMessage());
	}
	return object.toJSONString();
}
 
Example #20
Source File: KafkaMetricsServiceImpl.java    From kafka-eagle with Apache License 2.0 5 votes vote down vote up
private String addTopicConfig(String clusterAlias, AdminClient adminClient, String topic, ConfigEntry configEntry) {
	try {
		String describeTopicConfigs = describeTopicConfig(clusterAlias, topic);
		JSONObject object = JSON.parseObject(describeTopicConfigs).getJSONObject("config");
		if (object.containsKey(configEntry.name())) {
			object.remove(configEntry.name());
		}
		List<ConfigEntry> configEntrys = new ArrayList<>();
		for (String key : KConstants.Topic.getTopicConfigKeys()) {
			if (object.containsKey(key)) {
				configEntrys.add(new ConfigEntry(key, object.getString(key)));
			}
		}
		configEntrys.add(configEntry);
		Map<ConfigResource, Config> configs = new HashMap<>();
		ConfigResource configRes = new ConfigResource(Type.TOPIC, topic);
		Config config = new Config(configEntrys);
		configs.put(configRes, config);
		AlterConfigsResult alterConfig = adminClient.alterConfigs(configs);
		alterConfig.all().get();
		return KConstants.Topic.SUCCESS;
	} catch (Exception e) {
		e.printStackTrace();
		LOG.error("Add topic[" + topic + "] config has error, msg is " + e.getMessage());
		return e.getMessage();
	}
}
 
Example #21
Source File: TopicEnsure.java    From common-docker with Apache License 2.0 5 votes vote down vote up
public boolean validateTopic(TopicSpec spec, int timeOut) throws Exception {
  // Describe topic.
  DescribeTopicsResult topicDescribeResult = adminClient.describeTopics(
      Collections.singletonList(spec.name()), new DescribeTopicsOptions().timeoutMs(timeOut)
  );
  TopicDescription topic = topicDescribeResult.all().get().get(spec.name());

  // Get topic config.
  ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, spec.name());
  DescribeConfigsResult configResult = adminClient.describeConfigs(
      Collections.singletonList(configResource)
  );
  Map<ConfigResource, Config> resultMap = configResult.all().get();
  Config config = resultMap.get(configResource);

  // Create actual TopicSpec.
  Map<String, String> actualConfig = new HashMap<>();
  for (Map.Entry<String, String> entry : spec.config().entrySet()) {
    ConfigEntry actualConfigEntry = config.get(entry.getKey());
    if (actualConfigEntry != null) {
      actualConfig.put(entry.getKey(), actualConfigEntry.value());
    }
  }

  TopicSpec actualSpec = new TopicSpec(
      topic.name(), topic.partitions().size(),
      topic.partitions().get(0).replicas().size(), actualConfig
  );

  boolean isTopicValid = actualSpec.equals(spec);
  if (!isTopicValid) {
    System.err.printf(
        "Invalid topic [ %s ] ! Expected %s but got %s\n", spec.name(), spec, actualSpec
    );
  }

  return isTopicValid;
}
 
Example #22
Source File: KafkaOperations.java    From kafka-webview with MIT License 5 votes vote down vote up
/**
 * Modify configuration values for a specific topic.
 * @param topic The topic to modify.
 * @param configItems Map of Key to Value to modify.
 * @return boolean
 */
public TopicConfig alterTopicConfig(final String topic, final Map<String, String> configItems) {
    try {
        // Define the resource we want to modify, the topic.
        final ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, topic);

        final List<ConfigEntry> configEntries = new ArrayList<>();
        for (final Map.Entry<String, String> entry : configItems.entrySet()) {
            configEntries.add(
                new ConfigEntry(entry.getKey(), entry.getValue())
            );
        }

        // Define the configuration set
        final Config config = new Config(configEntries);

        // Create the topic
        final AlterConfigsResult result = adminClient.alterConfigs(Collections.singletonMap(configResource, config));

        // Wait for the async request to process.
        result.all().get();

        // Lets return updated topic details
        return getTopicConfig(topic);
    } catch (final ExecutionException e) {
        throw handleExecutionException(e);
    } catch (final InterruptedException exception) {
        // TODO Handle this
        throw new RuntimeException(exception.getMessage(), exception);
    }
}
 
Example #23
Source File: KafkaAvailabilityTest.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
void mockDescribeConfigs(Admin mockAc) {
    when(mockAc.describeConfigs(any())).thenAnswer(invocation -> {
        Collection<ConfigResource> argument = invocation.getArgument(0);
        DescribeConfigsResult dcr = mock(DescribeConfigsResult.class);
        Throwable throwable = null;
        for (ConfigResource configResource : argument) {
            throwable = describeConfigsResult.get(configResource);
            if (throwable != null) {
                break;
            }
        }
        when(dcr.values()).thenThrow(notImplemented());
        if (throwable != null) {
            when(dcr.all()).thenReturn(failedFuture(throwable));
        } else {
            Map<ConfigResource, Config> result = new HashMap<>();
            for (ConfigResource cr : argument) {
                List<ConfigEntry> entries = new ArrayList<>();
                for (Map.Entry<String, String> e : topics.get(cr.name()).configs.entrySet()) {
                    ConfigEntry ce = new ConfigEntry(e.getKey(), e.getValue());
                    entries.add(ce);
                }
                result.put(cr, new Config(entries));
            }
            when(dcr.all()).thenReturn(KafkaFuture.completedFuture(result));
        }
        return dcr;
    });
}
 
Example #24
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
private void updateTopicConfigPostAK23(Topic topic, String fullTopicName)
    throws ExecutionException, InterruptedException {

  Config currentConfigs = getActualTopicConfig(fullTopicName);

  Map<ConfigResource, Collection<AlterConfigOp>> configs = new HashMap<>();
  ArrayList<AlterConfigOp> listOfValues = new ArrayList<>();

  topic
      .rawConfig()
      .forEach(
          (configKey, configValue) -> {
            listOfValues.add(
                new AlterConfigOp(new ConfigEntry(configKey, configValue), OpType.SET));
          });
  Set<String> newEntryKeys = topic.rawConfig().keySet();

  currentConfigs
      .entries()
      .forEach(
          entry -> {
            if (!newEntryKeys.contains(entry.name())) {
              listOfValues.add(new AlterConfigOp(entry, OpType.DELETE));
            }
          });

  configs.put(new ConfigResource(Type.TOPIC, fullTopicName), listOfValues);

  adminClient.incrementalAlterConfigs(configs).all().get();
}
 
Example #25
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private ConfigEntry defaultConfigEntry(final String key, final String value) {
  final ConfigEntry config = mock(ConfigEntry.class);
  expect(config.name()).andReturn(key);
  expect(config.value()).andReturn(value);
  expect(config.source()).andReturn(ConfigEntry.ConfigSource.DEFAULT_CONFIG);
  replay(config);
  return config;
}
 
Example #26
Source File: TopicServiceImplTest.java    From kafka-helmsman with MIT License 5 votes vote down vote up
@Test
public void testListExisting() {
  Cluster cluster = createCluster(1);
  TopicPartitionInfo tp = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList());
  ConfigEntry configEntry = new ConfigEntry("k", "v");
  KafkaFuture<Config> kfc = KafkaFuture.completedFuture(new Config(Collections.singletonList(configEntry)));
  Set<String> topicNames = new HashSet<>(Arrays.asList("a", "b", "_c"));
  Map<String, TopicDescription> tds = new HashMap<String, TopicDescription>() {
    {
      put("a", new TopicDescription("a", false, Collections.singletonList(tp)));
      put("b", new TopicDescription("b", false, Collections.singletonList(tp)));
      put("c", new TopicDescription("_c", false, Collections.singletonList(tp)));
    }
  };
  Map<ConfigResource, KafkaFuture<Config>> configs = new HashMap<ConfigResource, KafkaFuture<Config>>() {
    {
      put(new ConfigResource(TOPIC, "a"), kfc);
      put(new ConfigResource(TOPIC, "b"), kfc);
      put(new ConfigResource(TOPIC, "_c"), kfc);
    }
  };

  TopicService service = new TopicServiceImpl(adminClient, true);
  ListTopicsResult listTopicsResult = mock(ListTopicsResult.class);
  DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
  DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);

  when(describeTopicsResult.all()).thenReturn(KafkaFuture.completedFuture(tds));
  when(listTopicsResult.names()).thenReturn(KafkaFuture.completedFuture(topicNames));
  when(describeConfigsResult.values()).thenReturn(configs);
  when(adminClient.listTopics(any(ListTopicsOptions.class))).thenReturn(listTopicsResult);
  when(adminClient.describeTopics(topicNames)).thenReturn(describeTopicsResult);
  when(adminClient.describeConfigs(any(Collection.class))).thenReturn(describeConfigsResult);

  Map<String, ConfiguredTopic> actual = service.listExisting(true);
  Assert.assertEquals(2, actual.size());
  Assert.assertEquals(new HashSet<>(Arrays.asList("a", "b")), actual.keySet());
}
 
Example #27
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private ConfigEntry overriddenConfigEntry(final String key, final String value) {
  final ConfigEntry config = mock(ConfigEntry.class);
  expect(config.name()).andReturn(key);
  expect(config.value()).andReturn(value);
  expect(config.source()).andReturn(ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG);
  replay(config);
  return config;
}
 
Example #28
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private static DescribeConfigsResult topicConfigResponse(final String topicName,
                                                         final ConfigEntry... entries) {

  final Map<ConfigResource, Config> config = ImmutableMap.of(
      new ConfigResource(ConfigResource.Type.TOPIC, topicName),
      new Config(Arrays.asList(entries)));

  final DescribeConfigsResult response = mock(DescribeConfigsResult.class);
  expect(response.all()).andReturn(KafkaFuture.completedFuture(config));
  replay(response);
  return response;
}
 
Example #29
Source File: KafkaTopicClientImplTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private static Map<ConfigResource, Config> withResourceConfig(final ConfigResource resource,
                                                              final ConfigEntry... entries) {
  final Set<ConfigEntry> expected = Arrays.stream(entries)
      .collect(Collectors.toSet());

  class ConfigMatcher implements IArgumentMatcher {
    @SuppressWarnings("unchecked")
    @Override
    public boolean matches(final Object argument) {
      final Map<ConfigResource, Config> request = (Map<ConfigResource, Config>)argument;
      if (request.size() != 1) {
        return false;
      }

      final Config config = request.get(resource);
      if (config == null) {
        return false;
      }

      final Set<ConfigEntry> actual = new HashSet<>(config.entries());
      return actual.equals(expected);
    }

    @Override
    public void appendTo(final StringBuffer buffer) {
      buffer.append(resource).append("->")
          .append("Config{").append(expected).append("}");
    }
  }
  EasyMock.reportMatcher(new ConfigMatcher());
  return null;
}
 
Example #30
Source File: AppUtils.java    From kafka-message-tool with MIT License 5 votes vote down vote up
public static String configEntriesToPrettyString(Collection<ConfigEntry> entries) {
    StringBuilder b = new StringBuilder();
    entries.forEach(entry -> {
        b.append(String.format("%s\n", entry));
    });
    return b.toString();
}