Search in sources :

Example 26 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicOperatorTest method testReconcile_noResource_withKafka_withPrivate.

/**
 * Test reconciliation when a resource has been deleted while the operator
 * wasn't running
 */
@Test
public void testReconcile_noResource_withKafka_withPrivate(VertxTestContext context) throws InterruptedException {
    Topic kubeTopic = null;
    Topic kafkaTopic = new Topic.Builder(topicName.toString(), 10, (short) 2, map("cleanup.policy", "bar")).build();
    Topic privateTopic = kafkaTopic;
    CountDownLatch topicCreatedInKafkaAndStored = new CountDownLatch(2);
    mockKafka.createTopic(Reconciliation.DUMMY_RECONCILIATION, kafkaTopic).onComplete(ar -> topicCreatedInKafkaAndStored.countDown());
    mockKafka.setDeleteTopicResponse(topicName, null);
    mockTopicStore.setCreateTopicResponse(topicName, null);
    mockTopicStore.create(kafkaTopic).onComplete(ar -> topicCreatedInKafkaAndStored.countDown());
    mockTopicStore.setDeleteTopicResponse(topicName, null);
    topicCreatedInKafkaAndStored.await();
    LogContext logContext = LogContext.periodic(topicName.toString(), topicOperator.getNamespace(), topicName.toString());
    Checkpoint async = context.checkpoint();
    topicOperator.reconcile(reconciliation(logContext), logContext, null, kubeTopic, kafkaTopic, privateTopic).onComplete(reconcileResult -> {
        assertSucceeded(context, reconcileResult);
        mockTopicStore.assertNotExists(context, topicName);
        mockK8s.assertNotExists(context, topicName.asKubeName());
        mockKafka.assertNotExists(context, topicName);
        mockK8s.assertNoEvents(context);
        async.flag();
    });
}
Also used : Checkpoint(io.vertx.junit5.Checkpoint) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.junit.jupiter.api.Test)

Example 27 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicOperatorTest method testReconcile_withResource_withKafka_withPrivate_3WayMerge.

/**
 * Test reconciliation when a resource has been changed both in kafka and in k8s while the operator was down, and
 * a 3 way merge is needed.
 */
@Test
public void testReconcile_withResource_withKafka_withPrivate_3WayMerge(VertxTestContext context) throws InterruptedException {
    Topic kubeTopic = new Topic.Builder(topicName, resourceName, 10, (short) 2, map("cleanup.policy", "bar"), metadata).build();
    Topic kafkaTopic = new Topic.Builder(topicName, resourceName, 12, (short) 2, map("cleanup.policy", "baz"), metadata).build();
    Topic privateTopic = new Topic.Builder(topicName, resourceName, 10, (short) 2, map("cleanup.policy", "baz"), metadata).build();
    Topic resultTopic = new Topic.Builder(topicName, resourceName, 12, (short) 2, map("cleanup.policy", "bar"), metadata).build();
    CountDownLatch topicCreatedInKafkaAndK8sAndStored = new CountDownLatch(3);
    mockKafka.setCreateTopicResponse(topicName -> Future.succeededFuture());
    mockKafka.createTopic(Reconciliation.DUMMY_RECONCILIATION, kafkaTopic).onComplete(ar -> topicCreatedInKafkaAndK8sAndStored.countDown());
    mockKafka.setUpdateTopicResponse(topicName -> Future.succeededFuture());
    KafkaTopic resource = TopicSerialization.toTopicResource(kubeTopic, labels);
    LogContext logContext = LogContext.periodic(topicName.toString(), topicOperator.getNamespace(), topicName.toString());
    mockK8s.setCreateResponse(topicName.asKubeName(), null);
    mockK8s.createResource(resource).onComplete(ar -> topicCreatedInKafkaAndK8sAndStored.countDown());
    mockK8s.setModifyResponse(topicName.asKubeName(), null);
    mockTopicStore.setCreateTopicResponse(topicName, null);
    mockTopicStore.create(privateTopic).onComplete(ar -> topicCreatedInKafkaAndK8sAndStored.countDown());
    topicCreatedInKafkaAndK8sAndStored.await();
    CountDownLatch topicStoreReadSuccess = new CountDownLatch(3);
    topicOperator.reconcile(reconciliation(logContext), logContext, resource, kubeTopic, kafkaTopic, privateTopic).onComplete(reconcileResult -> {
        assertSucceeded(context, reconcileResult);
        mockK8s.assertNoEvents(context);
        mockTopicStore.read(topicName).onComplete(readResult -> {
            assertSucceeded(context, readResult);
            context.verify(() -> assertThat(readResult.result(), is(resultTopic)));
            topicStoreReadSuccess.countDown();
        });
        mockK8s.getFromName(topicName.asKubeName()).onComplete(readResult -> {
            assertSucceeded(context, readResult);
            context.verify(() -> assertThat(TopicSerialization.fromTopicResource(readResult.result()), is(resultTopic)));
            topicStoreReadSuccess.countDown();
        });
        context.verify(() -> assertThat(mockKafka.getTopicState(topicName), is(resultTopic)));
        topicStoreReadSuccess.countDown();
        try {
            topicStoreReadSuccess.await(60, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        context.completeNow();
    });
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.junit.jupiter.api.Test)

Example 28 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicOperatorTest method testOnTopicChanged.

/**
 * 0. ZK notifies of a change in topic config
 * 1. operator gets updated topic metadata
 * 2. operator updates k8s and topic store.
 */
@Test
public void testOnTopicChanged(VertxTestContext context) {
    Topic kubeTopic = new Topic.Builder(topicName.toString(), 10, (short) 2, map("cleanup.policy", "bar")).build();
    Topic kafkaTopic = new Topic.Builder(topicName.toString(), 10, (short) 2, map("cleanup.policy", "baz")).build();
    Topic privateTopic = kubeTopic;
    KafkaTopic resource = TopicSerialization.toTopicResource(kubeTopic, labels);
    mockKafka.setCreateTopicResponse(topicName.toString(), null).createTopic(Reconciliation.DUMMY_RECONCILIATION, kafkaTopic);
    mockKafka.setTopicMetadataResponse(topicName, Utils.getTopicMetadata(kafkaTopic), null);
    // mockKafka.setUpdateTopicResponse(topicName -> Future.succeededFuture());
    mockTopicStore.setCreateTopicResponse(topicName, null).create(privateTopic);
    mockTopicStore.setUpdateTopicResponse(topicName, null);
    mockK8s.setCreateResponse(resourceName, null).createResource(resource);
    mockK8s.setModifyResponse(resourceName, null);
    LogContext logContext = LogContext.zkWatch("///", topicName.toString(), topicOperator.getNamespace(), topicName.toString());
    Checkpoint async = context.checkpoint(3);
    topicOperator.onTopicConfigChanged(logContext, topicName).onComplete(ar -> {
        assertSucceeded(context, ar);
        context.verify(() -> assertThat(mockKafka.getTopicState(topicName).getConfig().get("cleanup.policy"), is("baz")));
        mockTopicStore.read(topicName).onComplete(ar2 -> {
            assertSucceeded(context, ar2);
            context.verify(() -> assertThat(ar2.result().getConfig().get("cleanup.policy"), is("baz")));
            async.flag();
        });
        mockK8s.getFromName(resourceName).onComplete(ar2 -> {
            assertSucceeded(context, ar2);
            context.verify(() -> assertThat(TopicSerialization.fromTopicResource(ar2.result()).getConfig().get("cleanup.policy"), is("baz")));
            async.flag();
        });
        context.verify(() -> {
            MeterRegistry registry = metrics.meterRegistry();
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations").tag("kind", "KafkaTopic").counter().count(), is(1.0));
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.successful").tag("kind", "KafkaTopic").counter().count(), is(1.0));
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.failed").tag("kind", "KafkaTopic").counter().count(), is(0.0));
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.duration").tag("kind", "KafkaTopic").timer().count(), is(1L));
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.duration").tag("kind", "KafkaTopic").timer().totalTime(TimeUnit.MILLISECONDS), greaterThan(0.0));
            assertThat(registry.get(TopicOperator.METRICS_PREFIX + "resource.state").tag("kind", "KafkaTopic").tag("name", topicName.toString()).tag("resource-namespace", "default-namespace").gauge().value(), is(1.0));
        });
        async.flag();
    });
}
Also used : Checkpoint(io.vertx.junit5.Checkpoint) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) MeterRegistry(io.micrometer.core.instrument.MeterRegistry) Test(org.junit.jupiter.api.Test)

Example 29 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicOperatorTest method resourceAdded.

/**
 * Trigger {@link TopicOperator#onResourceEvent(LogContext, KafkaTopic, io.fabric8.kubernetes.client.Watcher.Action)}
 * and have the Kafka and TopicStore respond with the given exceptions.
 */
private TopicOperator resourceAdded(VertxTestContext context, CountDownLatch latch, Exception createException, Exception storeException) throws InterruptedException {
    mockKafka.setCreateTopicResponse(topicName.toString(), createException);
    mockKafka.setTopicExistsResult(t -> Future.succeededFuture(false));
    mockTopicStore.setCreateTopicResponse(topicName, storeException);
    KafkaTopic kafkaTopic = new KafkaTopicBuilder().withMetadata(metadata).withNewSpec().withReplicas(2).withPartitions(10).endSpec().build();
    mockKafka.setTopicMetadataResponses(topicName -> Future.succeededFuture(), topicName -> Future.succeededFuture(Utils.getTopicMetadata(TopicSerialization.fromTopicResource(kafkaTopic))));
    LogContext logContext = LogContext.kubeWatch(Watcher.Action.ADDED, kafkaTopic);
    Checkpoint async = context.checkpoint();
    mockK8s.setGetFromNameResponse(new ResourceName(kafkaTopic), Future.succeededFuture(kafkaTopic));
    topicOperator.onResourceEvent(logContext, kafkaTopic, ADDED).onComplete(ar -> {
        if (createException != null || storeException != null) {
            assertFailed(context, ar);
            Class<? extends Exception> expectedExceptionType;
            if (createException != null) {
                expectedExceptionType = createException.getClass();
            } else {
                expectedExceptionType = storeException.getClass();
            }
            if (!expectedExceptionType.equals(ar.cause().getClass())) {
                ar.cause().printStackTrace();
            }
            context.verify(() -> assertThat(ar.cause().getMessage(), ar.cause().getClass().getName(), is(expectedExceptionType.getName())));
            TopicName topicName = TopicSerialization.fromTopicResource(kafkaTopic).getTopicName();
            if (createException != null) {
                mockKafka.assertNotExists(context, topicName);
            } else {
                mockKafka.assertExists(context, topicName);
            }
            mockTopicStore.assertNotExists(context, topicName);
        // TODO mockK8s.assertContainsEvent(context, e -> "Error".equals(e.getKind()));
        } else {
            assertSucceeded(context, ar);
            Topic expectedTopic = TopicSerialization.fromTopicResource(kafkaTopic);
            mockKafka.assertContains(context, expectedTopic);
            mockTopicStore.assertContains(context, expectedTopic);
            mockK8s.assertNoEvents(context);
        }
        if (latch != null) {
            latch.countDown();
        }
        async.flag();
    });
    if (!context.awaitCompletion(60, TimeUnit.SECONDS)) {
        context.failNow(new Throwable("Test timeout"));
    }
    return topicOperator;
}
Also used : Checkpoint(io.vertx.junit5.Checkpoint) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaTopicBuilder(io.strimzi.api.kafka.model.KafkaTopicBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic)

Example 30 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicOperatorTest method testReconcileMetricsDeletedTopic.

@Test
public void testReconcileMetricsDeletedTopic(VertxTestContext context) throws InterruptedException {
    mockKafka.setTopicsListResponse(Future.succeededFuture(emptySet()));
    mockKafka.setUpdateTopicResponse(topicName -> Future.succeededFuture());
    Future<?> reconcileFuture = topicOperator.reconcileAllTopics("periodic");
    CountDownLatch addLatch = new CountDownLatch(1);
    resourceAdded(context, addLatch, null, null);
    addLatch.await(30, TimeUnit.SECONDS);
    CountDownLatch deleteLatch = new CountDownLatch(1);
    resourceRemoved(context, deleteLatch, null, null);
    deleteLatch.await(30, TimeUnit.SECONDS);
    Checkpoint async = context.checkpoint();
    reconcileFuture.onComplete(context.succeeding(e -> {
        MeterRegistry registry = metrics.meterRegistry();
        assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations").tag("kind", "KafkaTopic").counter().count(), is(0.0));
        assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.successful").tag("kind", "KafkaTopic").counter().count(), is(0.0));
        assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.failed").tag("kind", "KafkaTopic").counter().count(), is(0.0));
        assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.duration").tag("kind", "KafkaTopic").timer().count(), is(0L));
        assertThat(registry.get(TopicOperator.METRICS_PREFIX + "reconciliations.duration").tag("kind", "KafkaTopic").timer().totalTime(TimeUnit.MILLISECONDS), is(0.0));
        async.flag();
    }));
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) BeforeEach(org.junit.jupiter.api.BeforeEach) Watcher(io.fabric8.kubernetes.client.Watcher) MaxAttemptsExceededException(io.strimzi.operator.common.MaxAttemptsExceededException) ClusterAuthorizationException(org.apache.kafka.common.errors.ClusterAuthorizationException) CoreMatchers.notNullValue(org.hamcrest.CoreMatchers.notNullValue) AfterAll(org.junit.jupiter.api.AfterAll) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) MicrometerMetricsOptions(io.vertx.micrometer.MicrometerMetricsOptions) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Collections.singleton(java.util.Collections.singleton) BeforeAll(org.junit.jupiter.api.BeforeAll) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) KubernetesClientException(io.fabric8.kubernetes.client.KubernetesClientException) KafkaTopicStatus(io.strimzi.api.kafka.model.status.KafkaTopicStatus) ADDED(io.fabric8.kubernetes.client.Watcher.Action.ADDED) DELETED(io.fabric8.kubernetes.client.Watcher.Action.DELETED) MetricsProvider(io.strimzi.operator.common.MetricsProvider) VertxOptions(io.vertx.core.VertxOptions) VertxPrometheusOptions(io.vertx.micrometer.VertxPrometheusOptions) VertxExtension(io.vertx.junit5.VertxExtension) Future(io.vertx.core.Future) Test(org.junit.jupiter.api.Test) Objects(java.util.Objects) CountDownLatch(java.util.concurrent.CountDownLatch) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) List(java.util.List) ObjectMeta(io.fabric8.kubernetes.api.model.ObjectMeta) KafkaTopicBuilder(io.strimzi.api.kafka.model.KafkaTopicBuilder) TopicDeletionDisabledException(org.apache.kafka.common.errors.TopicDeletionDisabledException) Checkpoint(io.vertx.junit5.Checkpoint) UnknownTopicOrPartitionException(org.apache.kafka.common.errors.UnknownTopicOrPartitionException) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) VertxTestContext(io.vertx.junit5.VertxTestContext) HashMap(java.util.HashMap) MODIFIED(io.fabric8.kubernetes.client.Watcher.Action.MODIFIED) Collections.singletonMap(java.util.Collections.singletonMap) AsyncResult(io.vertx.core.AsyncResult) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) ObjectMetaBuilder(io.fabric8.kubernetes.api.model.ObjectMetaBuilder) Collections.emptySet(java.util.Collections.emptySet) MicrometerMetricsProvider(io.strimzi.operator.common.MicrometerMetricsProvider) Vertx(io.vertx.core.Vertx) TimeUnit(java.util.concurrent.TimeUnit) Reconciliation(io.strimzi.operator.common.Reconciliation) AfterEach(org.junit.jupiter.api.AfterEach) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) MeterRegistry(io.micrometer.core.instrument.MeterRegistry) Collections(java.util.Collections) Checkpoint(io.vertx.junit5.Checkpoint) CountDownLatch(java.util.concurrent.CountDownLatch) MeterRegistry(io.micrometer.core.instrument.MeterRegistry) Test(org.junit.jupiter.api.Test)

Aggregations

KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)187 Test (org.junit.jupiter.api.Test)92 KafkaTopicBuilder (io.strimzi.api.kafka.model.KafkaTopicBuilder)80 Checkpoint (io.vertx.junit5.Checkpoint)46 ObjectMetaBuilder (io.fabric8.kubernetes.api.model.ObjectMetaBuilder)38 HashMap (java.util.HashMap)32 ObjectMeta (io.fabric8.kubernetes.api.model.ObjectMeta)30 CountDownLatch (java.util.concurrent.CountDownLatch)28 NewTopic (org.apache.kafka.clients.admin.NewTopic)28 List (java.util.List)26 Map (java.util.Map)26 MeterRegistry (io.micrometer.core.instrument.MeterRegistry)22 KafkaTopicStatus (io.strimzi.api.kafka.model.status.KafkaTopicStatus)22 AsyncResult (io.vertx.core.AsyncResult)22 MaxAttemptsExceededException (io.strimzi.operator.common.MaxAttemptsExceededException)20 Vertx (io.vertx.core.Vertx)20 Matchers.containsString (org.hamcrest.Matchers.containsString)20 Watcher (io.fabric8.kubernetes.client.Watcher)18 KafkaClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients)18 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)18