Search in sources :

Example 31 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project ksql by confluentinc.

the class KafkaTopicClientImplTest method withResourceConfig.

/*
   * Config has broken hashCode & equals method:
   * https://issues.apache.org/jira/browse/KAFKA-6727
   */
private static Map<ConfigResource, Config> withResourceConfig(final ConfigResource resource, final ConfigEntry... entries) {
    final Set<ConfigEntry> expected = Arrays.stream(entries).collect(Collectors.toSet());
    class ConfigMatcher implements IArgumentMatcher {

        @SuppressWarnings("unchecked")
        @Override
        public boolean matches(final Object argument) {
            final Map<ConfigResource, Config> request = (Map<ConfigResource, Config>) argument;
            if (request.size() != 1) {
                return false;
            }
            final Config config = request.get(resource);
            if (config == null) {
                return false;
            }
            final Set<ConfigEntry> actual = new HashSet<>(config.entries());
            return actual.equals(expected);
        }

        @Override
        public void appendTo(final StringBuffer buffer) {
            buffer.append(resource).append("->").append("Config{").append(expected).append("}");
        }
    }
    EasyMock.reportMatcher(new ConfigMatcher());
    return null;
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) TopicConfig(org.apache.kafka.common.config.TopicConfig) Config(org.apache.kafka.clients.admin.Config) IArgumentMatcher(org.easymock.IArgumentMatcher) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) ConfigResource(org.apache.kafka.common.config.ConfigResource) HashSet(java.util.HashSet)

Example 32 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project ksql by confluentinc.

the class KafkaTopicClientImplTest method describeBrokerResult.

private DescribeConfigsResult describeBrokerResult() {
    DescribeConfigsResult describeConfigsResult = mock(DescribeConfigsResult.class);
    ConfigEntry configEntryDeleteEnable = new ConfigEntry("delete.topic.enable", "true");
    List<ConfigEntry> configEntries = new ArrayList<>();
    configEntries.add(configEntryDeleteEnable);
    Map<ConfigResource, Config> config = ImmutableMap.of(new ConfigResource(ConfigResource.Type.BROKER, node.idString()), new Config(configEntries));
    expect(describeConfigsResult.all()).andReturn(KafkaFuture.completedFuture(config));
    replay(describeConfigsResult);
    return describeConfigsResult;
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) TopicConfig(org.apache.kafka.common.config.TopicConfig) Config(org.apache.kafka.clients.admin.Config) ArrayList(java.util.ArrayList) DescribeConfigsResult(org.apache.kafka.clients.admin.DescribeConfigsResult) ConfigResource(org.apache.kafka.common.config.ConfigResource)

Example 33 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project ksql by confluentinc.

the class KafkaTopicClientImpl method addTopicConfig.

@Override
public boolean addTopicConfig(final String topicName, final Map<String, ?> overrides) {
    final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    try {
        final Map<String, String> existingConfig = topicConfig(topicName, false);
        final boolean changed = overrides.entrySet().stream().anyMatch(e -> !Objects.equals(existingConfig.get(e.getKey()), e.getValue()));
        if (!changed) {
            return false;
        }
        existingConfig.putAll(toStringConfigs(overrides));
        final Set<ConfigEntry> entries = existingConfig.entrySet().stream().map(e -> new ConfigEntry(e.getKey(), e.getValue())).collect(Collectors.toSet());
        final Map<ConfigResource, Config> request = Collections.singletonMap(resource, new Config(entries));
        executeWithRetries(() -> adminClient.alterConfigs(request).all());
        return true;
    } catch (final Exception e) {
        throw new KafkaResponseGetFailedException("Failed to set config for Kafka Topic " + topicName, e);
    }
}
Also used : Config(org.apache.kafka.clients.admin.Config) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) LoggerFactory(org.slf4j.LoggerFactory) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) RetriableException(org.apache.kafka.common.errors.RetriableException) Supplier(java.util.function.Supplier) AdminClient(org.apache.kafka.clients.admin.AdminClient) ConfigResource(org.apache.kafka.common.config.ConfigResource) Lists(com.google.common.collect.Lists) Map(java.util.Map) DeleteTopicsResult(org.apache.kafka.clients.admin.DeleteTopicsResult) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicConfig(org.apache.kafka.common.config.TopicConfig) Logger(org.slf4j.Logger) Collection(java.util.Collection) NewTopic(org.apache.kafka.clients.admin.NewTopic) Set(java.util.Set) KafkaFuture(org.apache.kafka.common.KafkaFuture) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) List(java.util.List) Node(org.apache.kafka.common.Node) DescribeClusterResult(org.apache.kafka.clients.admin.DescribeClusterResult) Collections(java.util.Collections) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) Config(org.apache.kafka.clients.admin.Config) TopicConfig(org.apache.kafka.common.config.TopicConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ConfigResource(org.apache.kafka.common.config.ConfigResource) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) RetriableException(org.apache.kafka.common.errors.RetriableException) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ExecutionException(java.util.concurrent.ExecutionException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException)

Example 34 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project ksql by confluentinc.

the class KafkaTopicClientImpl method topicConfig.

private Map<String, String> topicConfig(final String topicName, final boolean includeDefaults) {
    final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
    final List<ConfigResource> request = Collections.singletonList(resource);
    try {
        final Config config = executeWithRetries(() -> adminClient.describeConfigs(request).all()).get(resource);
        return config.entries().stream().filter(e -> includeDefaults || e.source().equals(ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG)).collect(Collectors.toMap(ConfigEntry::name, ConfigEntry::value));
    } catch (final Exception e) {
        throw new KafkaResponseGetFailedException("Failed to get config for Kafka Topic " + topicName, e);
    }
}
Also used : Config(org.apache.kafka.clients.admin.Config) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) LoggerFactory(org.slf4j.LoggerFactory) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) RetriableException(org.apache.kafka.common.errors.RetriableException) Supplier(java.util.function.Supplier) AdminClient(org.apache.kafka.clients.admin.AdminClient) ConfigResource(org.apache.kafka.common.config.ConfigResource) Lists(com.google.common.collect.Lists) Map(java.util.Map) DeleteTopicsResult(org.apache.kafka.clients.admin.DeleteTopicsResult) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicConfig(org.apache.kafka.common.config.TopicConfig) Logger(org.slf4j.Logger) Collection(java.util.Collection) NewTopic(org.apache.kafka.clients.admin.NewTopic) Set(java.util.Set) KafkaFuture(org.apache.kafka.common.KafkaFuture) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) List(java.util.List) Node(org.apache.kafka.common.Node) DescribeClusterResult(org.apache.kafka.clients.admin.DescribeClusterResult) Collections(java.util.Collections) Config(org.apache.kafka.clients.admin.Config) TopicConfig(org.apache.kafka.common.config.TopicConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ConfigResource(org.apache.kafka.common.config.ConfigResource) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) RetriableException(org.apache.kafka.common.errors.RetriableException) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ExecutionException(java.util.concurrent.ExecutionException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException)

Example 35 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project apache-kafka-on-k8s by banzaicloud.

the class KafkaAdminClientTest method testHandleTimeout.

/**
 * Test handling timeouts.
 */
// The test is flaky. Should be renabled when this JIRA is fixed: https://issues.apache.org/jira/browse/KAFKA-5792
@Ignore
@Test
public void testHandleTimeout() throws Exception {
    HashMap<Integer, Node> nodes = new HashMap<>();
    MockTime time = new MockTime();
    nodes.put(0, new Node(0, "localhost", 8121));
    Cluster cluster = new Cluster("mockClusterId", nodes.values(), Collections.<PartitionInfo>emptySet(), Collections.<String>emptySet(), Collections.<String>emptySet(), nodes.get(0));
    try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, AdminClientConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG, "1", AdminClientConfig.RECONNECT_BACKOFF_MS_CONFIG, "1")) {
        env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
        env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet());
        env.kafkaClient().setNode(nodes.get(0));
        assertEquals(time, env.time());
        assertEquals(env.time(), ((KafkaAdminClient) env.adminClient()).time());
        // Make a request with an extremely short timeout.
        // Then wait for it to fail by not supplying any response.
        log.info("Starting AdminClient#listTopics...");
        final ListTopicsResult result = env.adminClient().listTopics(new ListTopicsOptions().timeoutMs(1000));
        TestUtils.waitForCondition(new TestCondition() {

            @Override
            public boolean conditionMet() {
                return env.kafkaClient().hasInFlightRequests();
            }
        }, "Timed out waiting for inFlightRequests");
        time.sleep(5000);
        TestUtils.waitForCondition(new TestCondition() {

            @Override
            public boolean conditionMet() {
                return result.listings().isDone();
            }
        }, "Timed out waiting for listTopics to complete");
        assertFutureError(result.listings(), TimeoutException.class);
        log.info("Verified the error result of AdminClient#listTopics");
        // The next request should succeed.
        time.sleep(5000);
        env.kafkaClient().prepareResponse(new DescribeConfigsResponse(0, Collections.singletonMap(new org.apache.kafka.common.requests.Resource(TOPIC, "foo"), new DescribeConfigsResponse.Config(ApiError.NONE, Collections.<DescribeConfigsResponse.ConfigEntry>emptySet()))));
        DescribeConfigsResult result2 = env.adminClient().describeConfigs(Collections.singleton(new ConfigResource(ConfigResource.Type.TOPIC, "foo")));
        time.sleep(5000);
        result2.values().get(new ConfigResource(ConfigResource.Type.TOPIC, "foo")).get();
    }
}
Also used : HashMap(java.util.HashMap) Node(org.apache.kafka.common.Node) Cluster(org.apache.kafka.common.Cluster) ConfigResource(org.apache.kafka.common.config.ConfigResource) TestCondition(org.apache.kafka.test.TestCondition) MockTime(org.apache.kafka.common.utils.MockTime) DescribeConfigsResponse(org.apache.kafka.common.requests.DescribeConfigsResponse) Ignore(org.junit.Ignore) Test(org.junit.Test)

Aggregations

ConfigResource (org.apache.kafka.common.config.ConfigResource)64 HashMap (java.util.HashMap)32 Config (org.apache.kafka.clients.admin.Config)23 Map (java.util.Map)22 KafkaFuture (org.apache.kafka.common.KafkaFuture)20 KafkaFutureImpl (org.apache.kafka.common.internals.KafkaFutureImpl)20 TopicConfig (org.apache.kafka.common.config.TopicConfig)18 ArrayList (java.util.ArrayList)17 ConfigEntry (org.apache.kafka.clients.admin.ConfigEntry)16 Test (org.junit.Test)15 Collection (java.util.Collection)14 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)13 ConsumerConfig (org.apache.kafka.clients.consumer.ConsumerConfig)13 Node (org.apache.kafka.common.Node)13 AdminClient (org.apache.kafka.clients.admin.AdminClient)12 ProducerConfig (org.apache.kafka.clients.producer.ProducerConfig)12 Collections (java.util.Collections)11 Collectors (java.util.stream.Collectors)11 TopicMetadataAndConfig (org.apache.kafka.clients.admin.CreateTopicsResult.TopicMetadataAndConfig)11 StreamsConfig (org.apache.kafka.streams.StreamsConfig)11