Search in sources :

Example 46 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project kafka by apache.

the class KafkaAdminClientTest method testDescribeConfigsUnrequested.

@Test
public void testDescribeConfigsUnrequested() throws Exception {
    ConfigResource topic = new ConfigResource(ConfigResource.Type.TOPIC, "topic");
    ConfigResource unrequested = new ConfigResource(ConfigResource.Type.TOPIC, "unrequested");
    try (AdminClientUnitTestEnv env = mockClientEnv()) {
        env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
        env.kafkaClient().prepareResponse(new DescribeConfigsResponse(new DescribeConfigsResponseData().setResults(asList(new DescribeConfigsResponseData.DescribeConfigsResult().setResourceName(topic.name()).setResourceType(topic.type().id()).setErrorCode(Errors.NONE.code()).setConfigs(emptyList()), new DescribeConfigsResponseData.DescribeConfigsResult().setResourceName(unrequested.name()).setResourceType(unrequested.type().id()).setErrorCode(Errors.NONE.code()).setConfigs(emptyList())))));
        Map<ConfigResource, KafkaFuture<Config>> result = env.adminClient().describeConfigs(asList(topic)).values();
        assertEquals(new HashSet<>(asList(topic)), result.keySet());
        assertNotNull(result.get(topic).get());
        assertNull(result.get(unrequested));
    }
}
Also used : DescribeConfigsResponseData(org.apache.kafka.common.message.DescribeConfigsResponseData) KafkaFuture(org.apache.kafka.common.KafkaFuture) ConfigResource(org.apache.kafka.common.config.ConfigResource) DescribeConfigsResponse(org.apache.kafka.common.requests.DescribeConfigsResponse) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 47 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project kafka by apache.

the class KafkaAdminClientTest method testDescribeBrokerAndLogConfigs.

@Test
public void testDescribeBrokerAndLogConfigs() throws Exception {
    ConfigResource brokerResource = new ConfigResource(ConfigResource.Type.BROKER, "0");
    ConfigResource brokerLoggerResource = new ConfigResource(ConfigResource.Type.BROKER_LOGGER, "0");
    try (AdminClientUnitTestEnv env = mockClientEnv()) {
        env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
        env.kafkaClient().prepareResponseFrom(new DescribeConfigsResponse(new DescribeConfigsResponseData().setResults(asList(new DescribeConfigsResponseData.DescribeConfigsResult().setResourceName(brokerResource.name()).setResourceType(brokerResource.type().id()).setErrorCode(Errors.NONE.code()).setConfigs(emptyList()), new DescribeConfigsResponseData.DescribeConfigsResult().setResourceName(brokerLoggerResource.name()).setResourceType(brokerLoggerResource.type().id()).setErrorCode(Errors.NONE.code()).setConfigs(emptyList())))), env.cluster().nodeById(0));
        Map<ConfigResource, KafkaFuture<Config>> result = env.adminClient().describeConfigs(asList(brokerResource, brokerLoggerResource)).values();
        assertEquals(new HashSet<>(asList(brokerResource, brokerLoggerResource)), result.keySet());
        result.get(brokerResource).get();
        result.get(brokerLoggerResource).get();
    }
}
Also used : DescribeConfigsResponseData(org.apache.kafka.common.message.DescribeConfigsResponseData) KafkaFuture(org.apache.kafka.common.KafkaFuture) ConfigResource(org.apache.kafka.common.config.ConfigResource) DescribeConfigsResponse(org.apache.kafka.common.requests.DescribeConfigsResponse) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 48 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project kafka by apache.

the class MockAdminClient method incrementalAlterConfigs.

@Override
public synchronized AlterConfigsResult incrementalAlterConfigs(Map<ConfigResource, Collection<AlterConfigOp>> configs, AlterConfigsOptions options) {
    Map<ConfigResource, KafkaFuture<Void>> futures = new HashMap<>();
    for (Map.Entry<ConfigResource, Collection<AlterConfigOp>> entry : configs.entrySet()) {
        ConfigResource resource = entry.getKey();
        KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
        futures.put(resource, future);
        Throwable throwable = handleIncrementalResourceAlteration(resource, entry.getValue());
        if (throwable == null) {
            future.complete(null);
        } else {
            future.completeExceptionally(throwable);
        }
    }
    return new AlterConfigsResult(futures);
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) HashMap(java.util.HashMap) TopicCollection(org.apache.kafka.common.TopicCollection) TopicNameCollection(org.apache.kafka.common.TopicCollection.TopicNameCollection) TopicIdCollection(org.apache.kafka.common.TopicCollection.TopicIdCollection) Collection(java.util.Collection) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) HashMap(java.util.HashMap) Map(java.util.Map) ConfigResource(org.apache.kafka.common.config.ConfigResource)

Example 49 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project kafka by apache.

the class MockAdminClient method describeConfigs.

@Override
public synchronized DescribeConfigsResult describeConfigs(Collection<ConfigResource> resources, DescribeConfigsOptions options) {
    if (timeoutNextRequests > 0) {
        Map<ConfigResource, KafkaFuture<Config>> configs = new HashMap<>();
        for (ConfigResource requestedResource : resources) {
            KafkaFutureImpl<Config> future = new KafkaFutureImpl<>();
            future.completeExceptionally(new TimeoutException());
            configs.put(requestedResource, future);
        }
        --timeoutNextRequests;
        return new DescribeConfigsResult(configs);
    }
    Map<ConfigResource, KafkaFuture<Config>> results = new HashMap<>();
    for (ConfigResource resource : resources) {
        KafkaFutureImpl<Config> future = new KafkaFutureImpl<>();
        results.put(resource, future);
        try {
            future.complete(getResourceDescription(resource));
        } catch (Throwable e) {
            future.completeExceptionally(e);
        }
    }
    return new DescribeConfigsResult(results);
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) HashMap(java.util.HashMap) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) ConfigResource(org.apache.kafka.common.config.ConfigResource) TimeoutException(org.apache.kafka.common.errors.TimeoutException)

Example 50 with ConfigResource

use of org.apache.kafka.common.config.ConfigResource in project kafka by apache.

the class InternalTopicManager method validate.

/**
 * Validates the internal topics passed.
 *
 * The validation of the internal topics verifies if the topics:
 * - are missing on the brokers
 * - have the expected number of partitions
 * - have configured a clean-up policy that avoids data loss
 *
 * @param topicConfigs internal topics to validate
 *
 * @return validation results that contains
 *         - the set of missing internal topics on the brokers
 *         - descriptions of misconfigurations per topic
 */
public ValidationResult validate(final Map<String, InternalTopicConfig> topicConfigs) {
    log.info("Starting to validate internal topics {}.", topicConfigs.keySet());
    final long now = time.milliseconds();
    final long deadline = now + retryTimeoutMs;
    final ValidationResult validationResult = new ValidationResult();
    final Set<String> topicDescriptionsStillToValidate = new HashSet<>(topicConfigs.keySet());
    final Set<String> topicConfigsStillToValidate = new HashSet<>(topicConfigs.keySet());
    while (!topicDescriptionsStillToValidate.isEmpty() || !topicConfigsStillToValidate.isEmpty()) {
        Map<String, KafkaFuture<TopicDescription>> descriptionsForTopic = Collections.emptyMap();
        if (!topicDescriptionsStillToValidate.isEmpty()) {
            final DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(topicDescriptionsStillToValidate);
            descriptionsForTopic = describeTopicsResult.topicNameValues();
        }
        Map<String, KafkaFuture<Config>> configsForTopic = Collections.emptyMap();
        if (!topicConfigsStillToValidate.isEmpty()) {
            final DescribeConfigsResult describeConfigsResult = adminClient.describeConfigs(topicConfigsStillToValidate.stream().map(topic -> new ConfigResource(Type.TOPIC, topic)).collect(Collectors.toSet()));
            configsForTopic = describeConfigsResult.values().entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().name(), Map.Entry::getValue));
        }
        while (!descriptionsForTopic.isEmpty() || !configsForTopic.isEmpty()) {
            if (!descriptionsForTopic.isEmpty()) {
                doValidateTopic(validationResult, descriptionsForTopic, topicConfigs, topicDescriptionsStillToValidate, (streamsSide, brokerSide) -> validatePartitionCount(validationResult, streamsSide, brokerSide));
            }
            if (!configsForTopic.isEmpty()) {
                doValidateTopic(validationResult, configsForTopic, topicConfigs, topicConfigsStillToValidate, (streamsSide, brokerSide) -> validateCleanupPolicy(validationResult, streamsSide, brokerSide));
            }
            maybeThrowTimeoutException(Arrays.asList(topicDescriptionsStillToValidate, topicConfigsStillToValidate), deadline, String.format("Could not validate internal topics within %d milliseconds. " + "This can happen if the Kafka cluster is temporarily not available.", retryTimeoutMs));
            if (!descriptionsForTopic.isEmpty() || !configsForTopic.isEmpty()) {
                Utils.sleep(100);
            }
        }
        maybeSleep(Arrays.asList(topicDescriptionsStillToValidate, topicConfigsStillToValidate), deadline, "validated");
    }
    log.info("Completed validation of internal topics {}.", topicConfigs.keySet());
    return validationResult;
}
Also used : ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) KafkaFuture(org.apache.kafka.common.KafkaFuture) DescribeTopicsResult(org.apache.kafka.clients.admin.DescribeTopicsResult) DescribeConfigsResult(org.apache.kafka.clients.admin.DescribeConfigsResult) ConfigResource(org.apache.kafka.common.config.ConfigResource) HashSet(java.util.HashSet)

Aggregations

ConfigResource (org.apache.kafka.common.config.ConfigResource)64 HashMap (java.util.HashMap)32 Config (org.apache.kafka.clients.admin.Config)23 Map (java.util.Map)22 KafkaFuture (org.apache.kafka.common.KafkaFuture)20 KafkaFutureImpl (org.apache.kafka.common.internals.KafkaFutureImpl)20 TopicConfig (org.apache.kafka.common.config.TopicConfig)18 ArrayList (java.util.ArrayList)17 ConfigEntry (org.apache.kafka.clients.admin.ConfigEntry)16 Test (org.junit.Test)15 Collection (java.util.Collection)14 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)13 ConsumerConfig (org.apache.kafka.clients.consumer.ConsumerConfig)13 Node (org.apache.kafka.common.Node)13 AdminClient (org.apache.kafka.clients.admin.AdminClient)12 ProducerConfig (org.apache.kafka.clients.producer.ProducerConfig)12 Collections (java.util.Collections)11 Collectors (java.util.stream.Collectors)11 TopicMetadataAndConfig (org.apache.kafka.clients.admin.CreateTopicsResult.TopicMetadataAndConfig)11 StreamsConfig (org.apache.kafka.streams.StreamsConfig)11