Search in sources :

Example 1 with KafkaFuture

use of org.apache.kafka.common.KafkaFuture in project ksql by confluentinc.

the class KafkaTopicClientImpl method deleteTopics.

@Override
public void deleteTopics(final List<String> topicsToDelete) {
    if (!isDeleteTopicEnabled) {
        log.info("Cannot delete topics since 'delete.topic.enable' is false. ");
        return;
    }
    final DeleteTopicsResult deleteTopicsResult = adminClient.deleteTopics(topicsToDelete);
    final Map<String, KafkaFuture<Void>> results = deleteTopicsResult.values();
    List<String> failList = Lists.newArrayList();
    for (final Map.Entry<String, KafkaFuture<Void>> entry : results.entrySet()) {
        try {
            entry.getValue().get(30, TimeUnit.SECONDS);
        } catch (Exception e) {
            failList.add(entry.getKey());
        }
    }
    if (!failList.isEmpty()) {
        throw new KsqlException("Failed to clean up topics: " + failList.stream().collect(Collectors.joining(",")));
    }
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) DeleteTopicsResult(org.apache.kafka.clients.admin.DeleteTopicsResult) Map(java.util.Map) KafkaTopicException(io.confluent.ksql.exception.KafkaTopicException) RetriableException(org.apache.kafka.common.errors.RetriableException) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) ExecutionException(java.util.concurrent.ExecutionException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException)

Example 2 with KafkaFuture

use of org.apache.kafka.common.KafkaFuture in project apache-kafka-on-k8s by banzaicloud.

the class TopicAdmin method createTopics.

/**
 * Attempt to create the topics described by the given definitions, returning all of the names of those topics that
 * were created by this request. Any existing topics with the same name are unchanged, and the names of such topics
 * are excluded from the result.
 * <p>
 * If multiple topic definitions have the same topic name, the last one with that name will be used.
 * <p>
 * Apache Kafka added support for creating topics in 0.10.1.0, so this method works as expected with that and later versions.
 * With brokers older than 0.10.1.0, this method is unable to create topics and always returns an empty set.
 *
 * @param topics the specifications of the topics
 * @return the names of the topics that were created by this operation; never null but possibly empty
 * @throws ConnectException            if an error occurs, the operation takes too long, or the thread is interrupted while
 *                                     attempting to perform this operation
 */
public Set<String> createTopics(NewTopic... topics) {
    Map<String, NewTopic> topicsByName = new HashMap<>();
    if (topics != null) {
        for (NewTopic topic : topics) {
            if (topic != null)
                topicsByName.put(topic.name(), topic);
        }
    }
    if (topicsByName.isEmpty())
        return Collections.emptySet();
    String bootstrapServers = bootstrapServers();
    String topicNameList = Utils.join(topicsByName.keySet(), "', '");
    // Attempt to create any missing topics
    CreateTopicsOptions args = new CreateTopicsOptions().validateOnly(false);
    Map<String, KafkaFuture<Void>> newResults = admin.createTopics(topicsByName.values(), args).values();
    // Iterate over each future so that we can handle individual failures like when some topics already exist
    Set<String> newlyCreatedTopicNames = new HashSet<>();
    for (Map.Entry<String, KafkaFuture<Void>> entry : newResults.entrySet()) {
        String topic = entry.getKey();
        try {
            entry.getValue().get();
            log.info("Created topic {} on brokers at {}", topicsByName.get(topic), bootstrapServers);
            newlyCreatedTopicNames.add(topic);
        } catch (ExecutionException e) {
            Throwable cause = e.getCause();
            if (cause instanceof TopicExistsException) {
                log.debug("Found existing topic '{}' on the brokers at {}", topic, bootstrapServers);
                continue;
            }
            if (cause instanceof UnsupportedVersionException) {
                log.debug("Unable to create topic(s) '{}' since the brokers at {} do not support the CreateTopics API.", " Falling back to assume topic(s) exist or will be auto-created by the broker.", topicNameList, bootstrapServers);
                return Collections.emptySet();
            }
            if (cause instanceof ClusterAuthorizationException) {
                log.debug("Not authorized to create topic(s) '{}'." + " Falling back to assume topic(s) exist or will be auto-created by the broker.", topicNameList, bootstrapServers);
                return Collections.emptySet();
            }
            if (cause instanceof TimeoutException) {
                // Timed out waiting for the operation to complete
                throw new ConnectException("Timed out while checking for or creating topic(s) '" + topicNameList + "'." + " This could indicate a connectivity issue, unavailable topic partitions, or if" + " this is your first use of the topic it may have taken too long to create.", cause);
            }
            throw new ConnectException("Error while attempting to create/find topic(s) '" + topicNameList + "'", e);
        } catch (InterruptedException e) {
            Thread.interrupted();
            throw new ConnectException("Interrupted while attempting to create/find topic(s) '" + topicNameList + "'", e);
        }
    }
    return newlyCreatedTopicNames;
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) HashMap(java.util.HashMap) CreateTopicsOptions(org.apache.kafka.clients.admin.CreateTopicsOptions) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) NewTopic(org.apache.kafka.clients.admin.NewTopic) ExecutionException(java.util.concurrent.ExecutionException) HashMap(java.util.HashMap) Map(java.util.Map) ClusterAuthorizationException(org.apache.kafka.common.errors.ClusterAuthorizationException) HashSet(java.util.HashSet) UnsupportedVersionException(org.apache.kafka.common.errors.UnsupportedVersionException) TimeoutException(org.apache.kafka.common.errors.TimeoutException) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 3 with KafkaFuture

use of org.apache.kafka.common.KafkaFuture in project apache-kafka-on-k8s by banzaicloud.

the class MockAdminClient method describeConfigs.

@Override
public DescribeConfigsResult describeConfigs(Collection<ConfigResource> resources, DescribeConfigsOptions options) {
    Map<ConfigResource, KafkaFuture<Config>> configescriptions = new HashMap<>();
    for (ConfigResource resource : resources) {
        if (resource.type() == ConfigResource.Type.TOPIC) {
            Map<String, String> configs = allTopics.get(resource.name()).configs;
            List<ConfigEntry> configEntries = new ArrayList<>();
            for (Map.Entry<String, String> entry : configs.entrySet()) {
                configEntries.add(new ConfigEntry(entry.getKey(), entry.getValue()));
            }
            KafkaFutureImpl<Config> future = new KafkaFutureImpl<>();
            future.complete(new Config(configEntries));
            configescriptions.put(resource, future);
        } else {
            throw new UnsupportedOperationException("Not implemented yet");
        }
    }
    return new DescribeConfigsResult(configescriptions);
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) ConfigResource(org.apache.kafka.common.config.ConfigResource) HashMap(java.util.HashMap) Map(java.util.Map)

Example 4 with KafkaFuture

use of org.apache.kafka.common.KafkaFuture in project apache-kafka-on-k8s by banzaicloud.

the class MockAdminClient method deleteTopics.

@Override
public DeleteTopicsResult deleteTopics(Collection<String> topicsToDelete, DeleteTopicsOptions options) {
    Map<String, KafkaFuture<Void>> deleteTopicsResult = new HashMap<>();
    if (timeoutNextRequests > 0) {
        for (final String topicName : topicsToDelete) {
            KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
            future.completeExceptionally(new TimeoutException());
            deleteTopicsResult.put(topicName, future);
        }
        --timeoutNextRequests;
        return new DeleteTopicsResult(deleteTopicsResult);
    }
    for (final String topicName : topicsToDelete) {
        KafkaFutureImpl<Void> future = new KafkaFutureImpl<>();
        if (allTopics.remove(topicName) == null) {
            future.completeExceptionally(new UnknownTopicOrPartitionException(String.format("Topic %s does not exist.", topicName)));
        } else {
            future.complete(null);
        }
        deleteTopicsResult.put(topicName, future);
    }
    return new DeleteTopicsResult(deleteTopicsResult);
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) HashMap(java.util.HashMap) UnknownTopicOrPartitionException(org.apache.kafka.common.errors.UnknownTopicOrPartitionException) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) TimeoutException(org.apache.kafka.common.errors.TimeoutException)

Example 5 with KafkaFuture

use of org.apache.kafka.common.KafkaFuture in project apache-kafka-on-k8s by banzaicloud.

the class TaskManagerTest method shouldSendPurgeData.

@Test
public void shouldSendPurgeData() {
    final KafkaFutureImpl<DeletedRecords> futureDeletedRecords = new KafkaFutureImpl<>();
    final Map<TopicPartition, RecordsToDelete> recordsToDelete = Collections.singletonMap(t1p1, RecordsToDelete.beforeOffset(5L));
    final DeleteRecordsResult deleteRecordsResult = new DeleteRecordsResult(Collections.singletonMap(t1p1, (KafkaFuture<DeletedRecords>) futureDeletedRecords));
    futureDeletedRecords.complete(null);
    EasyMock.expect(active.recordsToDelete()).andReturn(Collections.singletonMap(t1p1, 5L)).times(2);
    EasyMock.expect(adminClient.deleteRecords(recordsToDelete)).andReturn(deleteRecordsResult).times(2);
    replay();
    taskManager.maybePurgeCommitedRecords();
    taskManager.maybePurgeCommitedRecords();
    verify(active, adminClient);
}
Also used : KafkaFuture(org.apache.kafka.common.KafkaFuture) TopicPartition(org.apache.kafka.common.TopicPartition) DeletedRecords(org.apache.kafka.clients.admin.DeletedRecords) RecordsToDelete(org.apache.kafka.clients.admin.RecordsToDelete) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) DeleteRecordsResult(org.apache.kafka.clients.admin.DeleteRecordsResult) Test(org.junit.Test)

Aggregations

KafkaFuture (org.apache.kafka.common.KafkaFuture)70 HashMap (java.util.HashMap)51 Map (java.util.Map)37 KafkaFutureImpl (org.apache.kafka.common.internals.KafkaFutureImpl)31 ExecutionException (java.util.concurrent.ExecutionException)22 TimeoutException (org.apache.kafka.common.errors.TimeoutException)21 ArrayList (java.util.ArrayList)15 UnknownTopicOrPartitionException (org.apache.kafka.common.errors.UnknownTopicOrPartitionException)15 Test (org.junit.jupiter.api.Test)15 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)14 TopicPartition (org.apache.kafka.common.TopicPartition)13 ConfigResource (org.apache.kafka.common.config.ConfigResource)12 HashSet (java.util.HashSet)11 TopicExistsException (org.apache.kafka.common.errors.TopicExistsException)10 AbstractResponse (org.apache.kafka.common.requests.AbstractResponse)8 UnsupportedVersionException (org.apache.kafka.common.errors.UnsupportedVersionException)7 ChannelBuilder (org.apache.kafka.common.network.ChannelBuilder)7 DescribeTopicsResult (org.apache.kafka.clients.admin.DescribeTopicsResult)6 Node (org.apache.kafka.common.Node)6 TopicPartitionReplica (org.apache.kafka.common.TopicPartitionReplica)6