Search in sources :

Example 91 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class K8sImpl method updateResource.

@Override
public Future<KafkaTopic> updateResource(KafkaTopic topicResource) {
    Promise<KafkaTopic> handler = Promise.promise();
    vertx.executeBlocking(future -> {
        try {
            KafkaTopic kafkaTopic = operation().inNamespace(namespace).withName(topicResource.getMetadata().getName()).patch(topicResource);
            LOGGER.debug("KafkaTopic {} updated with version {}->{}", kafkaTopic != null && kafkaTopic.getMetadata() != null ? kafkaTopic.getMetadata().getName() : null, topicResource.getMetadata() != null ? topicResource.getMetadata().getResourceVersion() : null, kafkaTopic != null && kafkaTopic.getMetadata() != null ? kafkaTopic.getMetadata().getResourceVersion() : null);
            future.complete(kafkaTopic);
        } catch (Exception e) {
            future.fail(e);
        }
    }, handler);
    return handler.future();
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KubernetesClientException(io.fabric8.kubernetes.client.KubernetesClientException)

Example 92 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class K8sTopicWatcher method eventReceived.

@Override
public void eventReceived(Action action, KafkaTopic kafkaTopic) {
    ObjectMeta metadata = kafkaTopic.getMetadata();
    Map<String, String> labels = metadata.getLabels();
    if (kafkaTopic.getSpec() != null) {
        LogContext logContext = LogContext.kubeWatch(action, kafkaTopic).withKubeTopic(kafkaTopic);
        String name = metadata.getName();
        String kind = kafkaTopic.getKind();
        if (!initReconcileFuture.isComplete()) {
            LOGGER.debugCr(logContext.toReconciliation(), "Ignoring initial event for {} {} during initial reconcile", kind, name);
            return;
        }
        if (action.equals(Action.ERROR)) {
            LOGGER.errorCr(logContext.toReconciliation(), "Watch received action=ERROR for {} {} {}", kind, name, kafkaTopic);
        } else {
            PauseAnnotationChanges pauseAnnotationChanges = pausedAnnotationChanged(kafkaTopic);
            if (action.equals(Action.DELETED) || shouldReconcile(kafkaTopic, metadata, pauseAnnotationChanges.isChanged())) {
                if (pauseAnnotationChanges.isResourcePausedByAnno()) {
                    topicOperator.pausedTopicCounter.getAndIncrement();
                } else if (pauseAnnotationChanges.isResourceUnpausedByAnno()) {
                    topicOperator.pausedTopicCounter.getAndDecrement();
                }
                LOGGER.infoCr(logContext.toReconciliation(), "event {} on resource {} generation={}, labels={}", action, name, metadata.getGeneration(), labels);
                Handler<AsyncResult<Void>> resultHandler = ar -> {
                    if (ar.succeeded()) {
                        LOGGER.infoCr(logContext.toReconciliation(), "Success processing event {} on resource {} with labels {}", action, name, labels);
                    } else {
                        String message;
                        if (ar.cause() instanceof InvalidTopicException) {
                            message = kind + " " + name + " has an invalid spec section: " + ar.cause().getMessage();
                            LOGGER.errorCr(logContext.toReconciliation(), message);
                        } else {
                            message = "Failure processing " + kind + " watch event " + action + " on resource " + name + " with labels " + labels + ": " + ar.cause().getMessage();
                            LOGGER.errorCr(logContext.toReconciliation(), message, ar.cause());
                        }
                        topicOperator.enqueue(logContext, topicOperator.new Event(logContext, kafkaTopic, message, TopicOperator.EventType.WARNING, errorResult -> {
                        }));
                    }
                };
                topicOperator.onResourceEvent(logContext, kafkaTopic, action).onComplete(resultHandler);
            } else {
                LOGGER.debugCr(logContext.toReconciliation(), "Ignoring {} to {} {} because metadata.generation==status.observedGeneration", action, kind, name);
            }
        }
    }
}
Also used : Objects(java.util.Objects) WatcherException(io.fabric8.kubernetes.client.WatcherException) ReconciliationLogger(io.strimzi.operator.common.ReconciliationLogger) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) ObjectMeta(io.fabric8.kubernetes.api.model.ObjectMeta) Map(java.util.Map) Watcher(io.fabric8.kubernetes.client.Watcher) Annotations(io.strimzi.operator.common.Annotations) AsyncResult(io.vertx.core.AsyncResult) Handler(io.vertx.core.Handler) Future(io.vertx.core.Future) ObjectMeta(io.fabric8.kubernetes.api.model.ObjectMeta) AsyncResult(io.vertx.core.AsyncResult)

Example 93 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicSerialization method toTopicResource.

/**
 * Create a resource to reflect the given Topic.
 */
public static KafkaTopic toTopicResource(Topic topic, Labels labels) {
    ResourceName resourceName = topic.getOrAsKubeName();
    ObjectMeta om = topic.getMetadata();
    Map<String, String> lbls = new HashMap<>(labels.labels());
    if (om != null) {
        om.setName(resourceName.toString());
        if (topic.getMetadata().getLabels() != null)
            lbls.putAll(topic.getMetadata().getLabels());
        om.setLabels(lbls);
        om.setOwnerReferences(topic.getMetadata().getOwnerReferences());
        om.setAnnotations(topic.getMetadata().getAnnotations());
    } else {
        om = new ObjectMetaBuilder().withName(resourceName.toString()).withLabels(lbls).build();
    }
    KafkaTopic kt = new KafkaTopicBuilder().withMetadata(om).withNewSpec().withTopicName(topic.getTopicName().toString()).withPartitions(topic.getNumPartitions()).withReplicas((int) topic.getNumReplicas()).withConfig(new LinkedHashMap<>(topic.getConfig())).endSpec().build();
    // topic is created with annotations={} (empty map but should be null as well)
    if (topic.getMetadata() != null)
        kt.getMetadata().setAnnotations(topic.getMetadata().getAnnotations());
    return kt;
}
Also used : ObjectMeta(io.fabric8.kubernetes.api.model.ObjectMeta) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaTopicBuilder(io.strimzi.api.kafka.model.KafkaTopicBuilder) ObjectMetaBuilder(io.fabric8.kubernetes.api.model.ObjectMetaBuilder)

Example 94 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicST method testMoreReplicasThanAvailableBrokers.

@ParallelTest
void testMoreReplicasThanAvailableBrokers(ExtensionContext extensionContext) {
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    int topicReplicationFactor = 5;
    int topicPartitions = 5;
    KafkaTopic kafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, topicName, topicPartitions, topicReplicationFactor, 1, namespace).build();
    resourceManager.createResource(extensionContext, false, kafkaTopic);
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(kafkaTopic, topicName));
    assertThat("Topic doesn't exists in Kafka itself", !hasTopicInKafka(topicName, TOPIC_CLUSTER_NAME));
    String errorMessage = "org.apache.kafka.common.errors.InvalidReplicationFactorException: Replication factor: 5 larger than available brokers: 3";
    KafkaTopicUtils.waitForKafkaTopicNotReady(namespace, topicName);
    KafkaTopicStatus kafkaTopicStatus = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get().getStatus();
    assertThat(kafkaTopicStatus.getConditions().get(0).getMessage(), containsString(errorMessage));
    assertThat(kafkaTopicStatus.getConditions().get(0).getReason(), containsString("CompletionException"));
    LOGGER.info("Delete topic {}", topicName);
    cmdKubeClient(namespace).deleteByName("kafkatopic", topicName);
    KafkaTopicUtils.waitForKafkaTopicDeletion(namespace, topicName);
    topicReplicationFactor = 3;
    final String newTopicName = "topic-example-new";
    kafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, newTopicName, topicPartitions, topicReplicationFactor).editMetadata().withNamespace(namespace).endMetadata().build();
    resourceManager.createResource(extensionContext, kafkaTopic);
    assertThat("Topic exists in Kafka itself", hasTopicInKafka(newTopicName, TOPIC_CLUSTER_NAME));
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(kafkaTopic, newTopicName));
}
Also used : KafkaTopicStatus(io.strimzi.api.kafka.model.status.KafkaTopicStatus) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 95 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicST method testCreateTopicViaKafka.

@ParallelTest
void testCreateTopicViaKafka(ExtensionContext extensionContext) {
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    int topicPartitions = 3;
    LOGGER.debug("Creating topic {} with {} replicas and {} partitions", topicName, 3, topicPartitions);
    KafkaCmdClient.createTopicUsingPodCli(namespace, TOPIC_CLUSTER_NAME, 0, topicName, 3, topicPartitions);
    KafkaTopic kafkaTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get();
    verifyTopicViaKafkaTopicCRK8s(kafkaTopic, topicName, topicPartitions, TOPIC_CLUSTER_NAME);
    topicPartitions = 5;
    LOGGER.info("Editing topic via Kafka, settings to partitions {}", topicPartitions);
    KafkaCmdClient.updateTopicPartitionsCountUsingPodCli(namespace, TOPIC_CLUSTER_NAME, 0, topicName, topicPartitions);
    LOGGER.debug("Topic {} updated from {} to {} partitions", topicName, 3, topicPartitions);
    KafkaTopicUtils.waitForKafkaTopicPartitionChange(namespace, topicName, topicPartitions);
    verifyTopicViaKafka(namespace, topicName, topicPartitions, TOPIC_CLUSTER_NAME);
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Aggregations

KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)187 Test (org.junit.jupiter.api.Test)92 KafkaTopicBuilder (io.strimzi.api.kafka.model.KafkaTopicBuilder)80 Checkpoint (io.vertx.junit5.Checkpoint)46 ObjectMetaBuilder (io.fabric8.kubernetes.api.model.ObjectMetaBuilder)38 HashMap (java.util.HashMap)32 ObjectMeta (io.fabric8.kubernetes.api.model.ObjectMeta)30 CountDownLatch (java.util.concurrent.CountDownLatch)28 NewTopic (org.apache.kafka.clients.admin.NewTopic)28 List (java.util.List)26 Map (java.util.Map)26 MeterRegistry (io.micrometer.core.instrument.MeterRegistry)22 KafkaTopicStatus (io.strimzi.api.kafka.model.status.KafkaTopicStatus)22 AsyncResult (io.vertx.core.AsyncResult)22 MaxAttemptsExceededException (io.strimzi.operator.common.MaxAttemptsExceededException)20 Vertx (io.vertx.core.Vertx)20 Matchers.containsString (org.hamcrest.Matchers.containsString)20 Watcher (io.fabric8.kubernetes.client.Watcher)18 KafkaClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients)18 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)18