Search in sources :

Example 6 with ConfigEntry

use of org.apache.kafka.clients.admin.ConfigEntry in project strimzi by strimzi.

the class TopicSerialization method toTopicConfig.

/**
 * Return a singleton map from the topic {@link ConfigResource} for the given topic,
 * to the {@link Config} of the given topic.
 */
public static Map<ConfigResource, Config> toTopicConfig(Topic topic) {
    Set<ConfigEntry> configEntries = new HashSet<>();
    for (Map.Entry<String, String> entry : topic.getConfig().entrySet()) {
        configEntries.add(new ConfigEntry(entry.getKey(), entry.getValue()));
    }
    Config config = new Config(configEntries);
    return Collections.singletonMap(new ConfigResource(ConfigResource.Type.TOPIC, topic.getTopicName().toString()), config);
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) Config(org.apache.kafka.clients.admin.Config) LogConfig(kafka.log.LogConfig) HashMap(java.util.HashMap) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) Map(java.util.Map) ConfigResource(org.apache.kafka.common.config.ConfigResource) HashSet(java.util.HashSet)

Example 7 with ConfigEntry

use of org.apache.kafka.clients.admin.ConfigEntry in project ksql by confluentinc.

the class KafkaTopicClientImplTest method withResourceConfig.

/*
   * Config has broken hashCode & equals method:
   * https://issues.apache.org/jira/browse/KAFKA-6727
   */
private static Map<ConfigResource, Config> withResourceConfig(final ConfigResource resource, final ConfigEntry... entries) {
    final Set<ConfigEntry> expected = Arrays.stream(entries).collect(Collectors.toSet());
    class ConfigMatcher implements IArgumentMatcher {

        @SuppressWarnings("unchecked")
        @Override
        public boolean matches(final Object argument) {
            final Map<ConfigResource, Config> request = (Map<ConfigResource, Config>) argument;
            if (request.size() != 1) {
                return false;
            }
            final Config config = request.get(resource);
            if (config == null) {
                return false;
            }
            final Set<ConfigEntry> actual = new HashSet<>(config.entries());
            return actual.equals(expected);
        }

        @Override
        public void appendTo(final StringBuffer buffer) {
            buffer.append(resource).append("->").append("Config{").append(expected).append("}");
        }
    }
    EasyMock.reportMatcher(new ConfigMatcher());
    return null;
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) TopicConfig(org.apache.kafka.common.config.TopicConfig) Config(org.apache.kafka.clients.admin.Config) IArgumentMatcher(org.easymock.IArgumentMatcher) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) ConfigResource(org.apache.kafka.common.config.ConfigResource) HashSet(java.util.HashSet)

Example 8 with ConfigEntry

use of org.apache.kafka.clients.admin.ConfigEntry in project ksql by confluentinc.

the class KafkaTopicClientImplTest method describeBrokerResult.

private DescribeConfigsResult describeBrokerResult() {
    DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);
    ConfigEntry configEntryDeleteEnable = new ConfigEntry("delete.topic.enable", "true");
    List<ConfigEntry> configEntries = new ArrayList<>();
    configEntries.add(configEntryDeleteEnable);
    Map<ConfigResource, Config> config = ImmutableMap.of(new ConfigResource(ConfigResource.Type.BROKER, node.idString()), new Config(configEntries));
    expect(describeConfigsResult.all()).andReturn(KafkaFuture.completedFuture(config));
    replay(describeConfigsResult);
    return describeConfigsResult;
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) TopicConfig(org.apache.kafka.common.config.TopicConfig) Config(org.apache.kafka.clients.admin.Config) ArrayList(java.util.ArrayList) DescribeConfigsResult(org.apache.kafka.clients.admin.DescribeConfigsResult) ConfigResource(org.apache.kafka.common.config.ConfigResource)

Example 9 with ConfigEntry

use of org.apache.kafka.clients.admin.ConfigEntry in project ksql by confluentinc.

the class KafkaTopicClientImpl method addTopicConfig.

@Override
public boolean addTopicConfig(final String topicName, final Map<String, ?> overrides) {
    final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    try {
        final Map<String, String> existingConfig = topicConfig(topicName, false);
        final boolean changed = overrides.entrySet().stream().anyMatch(e -> !Objects.equals(existingConfig.get(e.getKey()), e.getValue()));
        if (!changed) {
            return false;
        }
        existingConfig.putAll(toStringConfigs(overrides));
        final Set<ConfigEntry> entries = existingConfig.entrySet().stream().map(e -> new ConfigEntry(e.getKey(), e.getValue())).collect(Collectors.toSet());
        final Map<ConfigResource, Config> request = Collections.singletonMap(resource, new Config(entries));
        executeWithRetries(() -> adminClient.alterConfigs(request).all());
        return true;
    } catch (final Exception e) {
        throw new KafkaResponseGetFailedException("Failed to set config for Kafka Topic " + topicName, e);
    }
}
Also used : Config(org.apache.kafka.clients.admin.Config) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) LoggerFactory(org.slf4j.LoggerFactory) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) RetriableException(org.apache.kafka.common.errors.RetriableException) Supplier(java.util.function.Supplier) AdminClient(org.apache.kafka.clients.admin.AdminClient) ConfigResource(org.apache.kafka.common.config.ConfigResource) Lists(com.google.common.collect.Lists) Map(java.util.Map) DeleteTopicsResult(org.apache.kafka.clients.admin.DeleteTopicsResult) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicConfig(org.apache.kafka.common.config.TopicConfig) Logger(org.slf4j.Logger) Collection(java.util.Collection) NewTopic(org.apache.kafka.clients.admin.NewTopic) Set(java.util.Set) KafkaFuture(org.apache.kafka.common.KafkaFuture) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) List(java.util.List) Node(org.apache.kafka.common.Node) DescribeClusterResult(org.apache.kafka.clients.admin.DescribeClusterResult) Collections(java.util.Collections) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) Config(org.apache.kafka.clients.admin.Config) TopicConfig(org.apache.kafka.common.config.TopicConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ConfigResource(org.apache.kafka.common.config.ConfigResource) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) RetriableException(org.apache.kafka.common.errors.RetriableException) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ExecutionException(java.util.concurrent.ExecutionException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException)

Example 10 with ConfigEntry

use of org.apache.kafka.clients.admin.ConfigEntry in project ksql by confluentinc.

the class KafkaTopicClientImpl method topicConfig.

private Map<String, String> topicConfig(final String topicName, final boolean includeDefaults) {
    final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    final List<ConfigResource> request = Collections.singletonList(resource);
    try {
        final Config config = executeWithRetries(() -> adminClient.describeConfigs(request).all()).get(resource);
        return config.entries().stream().filter(e -> includeDefaults || e.source().equals(ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG)).collect(Collectors.toMap(ConfigEntry::name, ConfigEntry::value));
    } catch (final Exception e) {
        throw new KafkaResponseGetFailedException("Failed to get config for Kafka Topic " + topicName, e);
    }
}
Also used : Config(org.apache.kafka.clients.admin.Config) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) LoggerFactory(org.slf4j.LoggerFactory) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) RetriableException(org.apache.kafka.common.errors.RetriableException) Supplier(java.util.function.Supplier) AdminClient(org.apache.kafka.clients.admin.AdminClient) ConfigResource(org.apache.kafka.common.config.ConfigResource) Lists(com.google.common.collect.Lists) Map(java.util.Map) DeleteTopicsResult(org.apache.kafka.clients.admin.DeleteTopicsResult) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicConfig(org.apache.kafka.common.config.TopicConfig) Logger(org.slf4j.Logger) Collection(java.util.Collection) NewTopic(org.apache.kafka.clients.admin.NewTopic) Set(java.util.Set) KafkaFuture(org.apache.kafka.common.KafkaFuture) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) List(java.util.List) Node(org.apache.kafka.common.Node) DescribeClusterResult(org.apache.kafka.clients.admin.DescribeClusterResult) Collections(java.util.Collections) Config(org.apache.kafka.clients.admin.Config) TopicConfig(org.apache.kafka.common.config.TopicConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ConfigResource(org.apache.kafka.common.config.ConfigResource) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) RetriableException(org.apache.kafka.common.errors.RetriableException) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ExecutionException(java.util.concurrent.ExecutionException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException)

Aggregations

ConfigEntry (org.apache.kafka.clients.admin.ConfigEntry)11 Config (org.apache.kafka.clients.admin.Config)7 ConfigResource (org.apache.kafka.common.config.ConfigResource)7 Map (java.util.Map)5 ArrayList (java.util.ArrayList)4 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)4 Node (org.apache.kafka.common.Node)4 TopicConfig (org.apache.kafka.common.config.TopicConfig)4 HashMap (java.util.HashMap)3 NewTopic (org.apache.kafka.clients.admin.NewTopic)3 Lists (com.google.common.collect.Lists)2 KafkaResponseGetFailedException (io.confluent.ksql.exception.KafkaResponseGetFailedException)2 KafkaTopicException (io.confluent.ksql.exception.KafkaTopicException)2 ConfigMap (io.fabric8.kubernetes.api.model.ConfigMap)2 Collection (java.util.Collection)2 Collections (java.util.Collections)2 HashSet (java.util.HashSet)2 List (java.util.List)2 Objects (java.util.Objects)2 Set (java.util.Set)2