Search in sources :

Example 1 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class CruiseControlUtils method verifyThatKafkaCruiseControlMetricReporterTopicIsPresent.

public static void verifyThatKafkaCruiseControlMetricReporterTopicIsPresent(String namespaceName, long timeout) {
    final int numberOfPartitionsMetricTopic = 1;
    final int numberOfReplicasMetricTopic = 3;
    TestUtils.waitFor("Verify that kafka contains cruise control topics with related configuration.", Constants.GLOBAL_POLL_INTERVAL, timeout, () -> {
        KafkaTopic metrics = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(CRUISE_CONTROL_METRICS_TOPIC).get();
        boolean hasTopicCorrectPartitionsCount = metrics.getSpec().getPartitions() == numberOfPartitionsMetricTopic;
        boolean hasTopicCorrectReplicasCount = metrics.getSpec().getReplicas() == numberOfReplicasMetricTopic;
        return hasTopicCorrectPartitionsCount && hasTopicCorrectReplicasCount;
    });
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic)

Example 2 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class CruiseControlUtils method verifyThatCruiseControlSamplesTopicsArePresent.

public static void verifyThatCruiseControlSamplesTopicsArePresent(String namespaceName, long timeout) {
    final int numberOfPartitionsSamplesTopic = 32;
    final int numberOfReplicasSamplesTopic = 2;
    TestUtils.waitFor("Verify that kafka contains cruise control topics with related configuration.", Constants.GLOBAL_POLL_INTERVAL, timeout, () -> {
        KafkaTopic modelTrainingSamples = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(CRUISE_CONTROL_MODEL_TRAINING_SAMPLES_TOPIC).get();
        KafkaTopic partitionsMetricsSamples = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(CRUISE_CONTROL_PARTITION_METRICS_SAMPLES_TOPIC).get();
        if (modelTrainingSamples != null && partitionsMetricsSamples != null) {
            boolean hasTopicCorrectPartitionsCount = modelTrainingSamples.getSpec().getPartitions() == numberOfPartitionsSamplesTopic && partitionsMetricsSamples.getSpec().getPartitions() == numberOfPartitionsSamplesTopic;
            boolean hasTopicCorrectReplicasCount = modelTrainingSamples.getSpec().getReplicas() == numberOfReplicasSamplesTopic && partitionsMetricsSamples.getSpec().getReplicas() == numberOfReplicasSamplesTopic;
            return hasTopicCorrectPartitionsCount && hasTopicCorrectReplicasCount;
        }
        LOGGER.debug("One of the samples {}, {} topics are not present", CRUISE_CONTROL_MODEL_TRAINING_SAMPLES_TOPIC, CRUISE_CONTROL_PARTITION_METRICS_SAMPLES_TOPIC);
        return false;
    });
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic)

Example 3 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class MirrorMaker2IsolatedST method testMirrorMaker2TlsAndScramSha512Auth.

/**
 * Test mirroring messages by MirrorMaker 2.0 over tls transport using scram-sha-512 auth
 */
@SuppressWarnings({ "checkstyle:MethodLength" })
@ParallelNamespaceTest
void testMirrorMaker2TlsAndScramSha512Auth(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String kafkaClusterSourceName = clusterName + "-source";
    String kafkaClusterTargetName = clusterName + "-target";
    String sourceTopicName = "availability-topic-source-" + mapWithTestTopics.get(extensionContext.getDisplayName());
    String targetTopicName = "availability-topic-target-" + mapWithTestTopics.get(extensionContext.getDisplayName());
    String topicSourceNameMirrored = kafkaClusterSourceName + "." + sourceTopicName;
    String topicSourceName = MIRRORMAKER2_TOPIC_NAME + "-" + rng.nextInt(Integer.MAX_VALUE);
    String topicTargetName = kafkaClusterSourceName + "." + topicSourceName;
    String kafkaUserSource = clusterName + "-my-user-source";
    String kafkaUserTarget = clusterName + "-my-user-target";
    // Deploy source kafka with tls listener and SCRAM-SHA authentication
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    // Deploy target kafka with tls listener and SCRAM-SHA authentication
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    // Deploy topic
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName, 3).build());
    // Create Kafka user for source cluster
    KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSource).build();
    resourceManager.createResource(extensionContext, userSource);
    // Create Kafka user for target cluster
    KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTarget).build();
    resourceManager.createResource(extensionContext, userTarget);
    // Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
    PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
    passwordSecretSource.setSecretName(kafkaUserSource);
    passwordSecretSource.setPassword("password");
    // Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
    PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
    passwordSecretTarget.setSecretName(kafkaUserTarget);
    passwordSecretTarget.setPassword("password");
    // Initialize CertSecretSource with certificate and secret names for source
    CertSecretSource certSecretSource = new CertSecretSource();
    certSecretSource.setCertificate("ca.crt");
    certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
    // Initialize CertSecretSource with certificate and secret names for target
    CertSecretSource certSecretTarget = new CertSecretSource();
    certSecretTarget.setCertificate("ca.crt");
    certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
    // Deploy client
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, userSource, userTarget).build());
    final String kafkaClientsPodName = kubeClient().listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(sourceTopicName).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", sourceTopicName, kafkaClusterSourceName, messagesCount);
    // Check brokers availability
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", targetTopicName, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(targetTopicName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", targetTopicName, kafkaClusterTargetName, messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
    // Deploy Mirror Maker with TLS and ScramSha512
    KafkaMirrorMaker2ClusterSpec sourceClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSource).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
    KafkaMirrorMaker2ClusterSpec targetClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTarget).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(targetClusterWithScramSha512Auth, sourceClusterWithScramSha512Auth).editFirstMirror().withTopicsExcludePattern("availability.*").endMirror().endSpec().build());
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicSourceName, kafkaClusterSourceName, userSource.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicSourceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
    int sent = internalKafkaClient.sendMessagesTls();
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Changing to target - topic {}, cluster {}, user {}", topicTargetName, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Now messages should be mirrored to target topic and cluster");
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Messages successfully mirrored");
    KafkaTopicUtils.waitForKafkaTopicCreation(namespaceName, topicTargetName);
    KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicTargetName).get();
    assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
    assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
    mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicSourceNameMirrored).get();
    assertThat(mirroredTopic, nullValue());
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaMirrorMaker2ClusterSpec(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpec) PasswordSecretSource(io.strimzi.api.kafka.model.PasswordSecretSource) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) Matchers.containsString(org.hamcrest.Matchers.containsString) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 4 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class MirrorMaker2IsolatedST method testMirrorMaker2TlsAndTlsClientAuth.

/**
 * Test mirroring messages by MirrorMaker 2.0 over tls transport using mutual tls auth
 */
@SuppressWarnings({ "checkstyle:MethodLength" })
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
void testMirrorMaker2TlsAndTlsClientAuth(ExtensionContext extensionContext) throws Exception {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String kafkaClusterSourceName = clusterName + "-source";
    String kafkaClusterTargetName = clusterName + "-target";
    String topicName = "availability-topic-source-" + mapWithTestTopics.get(extensionContext.getDisplayName());
    String topicSourceNameMirrored = kafkaClusterSourceName + "." + topicName;
    String topicSourceName = MIRRORMAKER2_TOPIC_NAME + "-" + rng.nextInt(Integer.MAX_VALUE);
    String topicTargetName = kafkaClusterSourceName + "." + topicSourceName;
    String kafkaUserSourceName = clusterName + "-my-user-source";
    String kafkaUserTargetName = clusterName + "-my-user-target";
    // Deploy source kafka with tls listener and mutual tls auth
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy target kafka with tls listener and mutual tls auth
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy topic
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName, 3).build());
    // Create Kafka user
    KafkaUser userSource = KafkaUserTemplates.tlsUser(kafkaClusterSourceName, kafkaUserSourceName).build();
    KafkaUser userTarget = KafkaUserTemplates.tlsUser(kafkaClusterTargetName, kafkaUserTargetName).build();
    resourceManager.createResource(extensionContext, userSource);
    resourceManager.createResource(extensionContext, userTarget);
    resourceManager.createResource(extensionContext, false, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, userSource, userTarget).build());
    final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, kafkaClientsName).get(0).getMetadata().getName();
    String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
    String topicTestName1 = baseTopic + "-test-1";
    String topicTestName2 = baseTopic + "-test-2";
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterTargetName, topicTestName2).build());
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicTestName1).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    // Check brokers availability
    ClientUtils.waitUntilProducerAndConsumerSuccessfullySendAndReceiveMessages(extensionContext, internalKafkaClient);
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicTestName2, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withClusterName(kafkaClusterTargetName).withTopicName(topicTestName2).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicTestName2, kafkaClusterTargetName, messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
    // Initialize CertSecretSource with certificate and secret names for source
    CertSecretSource certSecretSource = new CertSecretSource();
    certSecretSource.setCertificate("ca.crt");
    certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
    // Initialize CertSecretSource with certificate and secret names for target
    CertSecretSource certSecretTarget = new CertSecretSource();
    certSecretTarget.setCertificate("ca.crt");
    certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
    // Deploy Mirror Maker 2.0 with tls listener and mutual tls auth
    KafkaMirrorMaker2ClusterSpec sourceClusterWithTlsAuth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaUserSourceName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
    KafkaMirrorMaker2ClusterSpec targetClusterWithTlsAuth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaUserTargetName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(sourceClusterWithTlsAuth, targetClusterWithTlsAuth).editFirstMirror().withTopicsPattern(MIRRORMAKER2_TOPIC_NAME + ".*").endMirror().endSpec().build());
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicSourceName, kafkaClusterSourceName, userSource.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicSourceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
    int sent = internalKafkaClient.sendMessagesTls();
    LOGGER.info("Receiving messages from - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Now setting topic to {}, cluster to {} and user to {} - the messages should be mirrored", topicTargetName, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Consumer in target cluster and topic should receive {} messages", messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Messages successfully mirrored");
    KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicTargetName).get();
    assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
    assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
    mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicSourceNameMirrored).get();
    assertThat(mirroredTopic, nullValue());
}
Also used : KafkaListenerAuthenticationTls(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaMirrorMaker2ClusterSpec(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpec) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) Matchers.containsString(org.hamcrest.Matchers.containsString) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 5 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi by strimzi.

the class TopicST method testCreateTopicAfterUnsupportedOperation.

@ParallelTest
void testCreateTopicAfterUnsupportedOperation(ExtensionContext extensionContext) {
    String topicName = "topic-with-replication-to-change";
    String newTopicName = "another-topic";
    KafkaTopic kafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, topicName, namespace).editSpec().withReplicas(3).withPartitions(3).endSpec().build();
    resourceManager.createResource(extensionContext, kafkaTopic);
    KafkaTopicResource.replaceTopicResourceInSpecificNamespace(topicName, t -> {
        t.getSpec().setReplicas(1);
        t.getSpec().setPartitions(1);
    }, namespace);
    KafkaTopicUtils.waitForKafkaTopicNotReady(namespace, topicName);
    String exceptedMessage = "Number of partitions cannot be decreased";
    assertThat(KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get().getStatus().getConditions().get(0).getMessage(), is(exceptedMessage));
    String topicCRDMessage = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get().getStatus().getConditions().get(0).getMessage();
    assertThat(topicCRDMessage, containsString(exceptedMessage));
    KafkaTopic newKafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, newTopicName, 1, 1).editMetadata().withNamespace(namespace).endMetadata().build();
    resourceManager.createResource(extensionContext, newKafkaTopic);
    assertThat("Topic exists in Kafka itself", hasTopicInKafka(topicName, TOPIC_CLUSTER_NAME));
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(kafkaTopic, topicName));
    assertThat("Topic exists in Kafka itself", hasTopicInKafka(newTopicName, TOPIC_CLUSTER_NAME));
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(newKafkaTopic, newTopicName));
    cmdKubeClient(namespace).deleteByName(KafkaTopic.RESOURCE_SINGULAR, topicName);
    KafkaTopicUtils.waitForKafkaTopicDeletion(namespace, topicName);
    cmdKubeClient(namespace).deleteByName(KafkaTopic.RESOURCE_SINGULAR, newTopicName);
    KafkaTopicUtils.waitForKafkaTopicDeletion(namespace, newTopicName);
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Aggregations

KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)187 Test (org.junit.jupiter.api.Test)92 KafkaTopicBuilder (io.strimzi.api.kafka.model.KafkaTopicBuilder)80 Checkpoint (io.vertx.junit5.Checkpoint)48 ObjectMetaBuilder (io.fabric8.kubernetes.api.model.ObjectMetaBuilder)38 ObjectMeta (io.fabric8.kubernetes.api.model.ObjectMeta)30 HashMap (java.util.HashMap)28 CountDownLatch (java.util.concurrent.CountDownLatch)28 NewTopic (org.apache.kafka.clients.admin.NewTopic)28 List (java.util.List)24 Map (java.util.Map)24 MeterRegistry (io.micrometer.core.instrument.MeterRegistry)22 KafkaTopicStatus (io.strimzi.api.kafka.model.status.KafkaTopicStatus)22 AsyncResult (io.vertx.core.AsyncResult)22 Matchers.containsString (org.hamcrest.Matchers.containsString)22 MaxAttemptsExceededException (io.strimzi.operator.common.MaxAttemptsExceededException)20 Vertx (io.vertx.core.Vertx)20 Watcher (io.fabric8.kubernetes.client.Watcher)18 Future (io.vertx.core.Future)18 MetricsProvider (io.strimzi.operator.common.MetricsProvider)16