Search in sources :

Example 46 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class MirrorMaker2IsolatedST method testMirrorMaker2.

@ParallelNamespaceTest
void testMirrorMaker2(ExtensionContext extensionContext) {
    final TestStorage testStorage = new TestStorage(extensionContext, clusterOperator.getDeploymentNamespace());
    String kafkaClusterSourceName = testStorage.getClusterName() + "-source";
    String kafkaClusterTargetName = testStorage.getClusterName() + "-target";
    String sourceMirroredTopicName = kafkaClusterSourceName + "." + testStorage.getTopicName();
    Map<String, Object> expectedConfig = StUtils.loadProperties("group.id=mirrormaker2-cluster\n" + "key.converter=org.apache.kafka.connect.converters.ByteArrayConverter\n" + "value.converter=org.apache.kafka.connect.converters.ByteArrayConverter\n" + "header.converter=org.apache.kafka.connect.converters.ByteArrayConverter\n" + "config.storage.topic=mirrormaker2-cluster-configs\n" + "status.storage.topic=mirrormaker2-cluster-status\n" + "offset.storage.topic=mirrormaker2-cluster-offsets\n" + "config.storage.replication.factor=-1\n" + "status.storage.replication.factor=-1\n" + "offset.storage.replication.factor=-1\n" + "config.providers=file\n" + "config.providers.file.class=org.apache.kafka.common.config.provider.FileConfigProvider\n");
    // Deploy source and target kafka
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1).build(), KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1).build());
    // Deploy source topic
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, testStorage.getTopicName(), 3).build());
    KafkaClients clients = new KafkaClientsBuilder().withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withBootstrapAddress(KafkaResources.plainBootstrapAddress(kafkaClusterSourceName)).withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withMessageCount(MESSAGE_COUNT).build();
    // Check brokers availability
    LOGGER.info("Messages exchange - topic {}, cluster {} and message count of {}", testStorage.getTopicName(), kafkaClusterSourceName, MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, clients.producerStrimzi(), clients.consumerStrimzi());
    ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(testStorage.getClusterName(), kafkaClusterTargetName, kafkaClusterSourceName, 1, false).editSpec().editFirstMirror().editSourceConnector().addToConfig("refresh.topics.interval.seconds", "1").endSourceConnector().endMirror().endSpec().build());
    String podName = PodUtils.getPodNameByPrefix(testStorage.getNamespaceName(), KafkaMirrorMaker2Resources.deploymentName(testStorage.getClusterName()));
    String kafkaPodJson = TestUtils.toJsonString(kubeClient().getPod(testStorage.getNamespaceName(), podName));
    assertThat(kafkaPodJson, hasJsonPath(StUtils.globalVariableJsonPathBuilder(0, "KAFKA_CONNECT_BOOTSTRAP_SERVERS"), hasItem(KafkaResources.plainBootstrapAddress(kafkaClusterTargetName))));
    assertThat(StUtils.getPropertiesFromJson(0, kafkaPodJson, "KAFKA_CONNECT_CONFIGURATION"), is(expectedConfig));
    testDockerImagesForKafkaMirrorMaker2(testStorage.getClusterName(), clusterOperator.getDeploymentNamespace(), testStorage.getNamespaceName());
    verifyLabelsOnPods(testStorage.getNamespaceName(), testStorage.getClusterName(), "mirrormaker2", KafkaMirrorMaker2.RESOURCE_KIND);
    verifyLabelsForService(testStorage.getNamespaceName(), testStorage.getClusterName(), "mirrormaker2-api", KafkaMirrorMaker2.RESOURCE_KIND);
    verifyLabelsForConfigMaps(testStorage.getNamespaceName(), kafkaClusterSourceName, null, kafkaClusterTargetName);
    verifyLabelsForServiceAccounts(testStorage.getNamespaceName(), kafkaClusterSourceName, null);
    LOGGER.info("Now setting topic to {} and cluster to {} - the messages should be mirrored", sourceMirroredTopicName, kafkaClusterTargetName);
    clients = new KafkaClientsBuilder(clients).withTopicName(sourceMirroredTopicName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(kafkaClusterTargetName)).build();
    LOGGER.info("Consumer in target cluster and topic should receive {} messages", MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, clients.consumerStrimzi());
    ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    LOGGER.info("Mirrored successful");
    if (!Environment.isKRaftModeEnabled()) {
        KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(testStorage.getNamespaceName()).withName(sourceMirroredTopicName).get();
        assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
        assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
        // Replace source topic resource with new data and check that mm2 update target topi
        KafkaTopicResource.replaceTopicResourceInSpecificNamespace(testStorage.getTopicName(), kt -> kt.getSpec().setPartitions(8), testStorage.getNamespaceName());
        KafkaTopicUtils.waitForKafkaTopicPartitionChange(testStorage.getNamespaceName(), sourceMirroredTopicName, 8);
    }
}
Also used : KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) TestStorage(io.strimzi.systemtest.storage.TestStorage) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 47 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class MirrorMaker2IsolatedST method testMirrorMaker2TlsAndScramSha512Auth.

/**
 * Test mirroring messages by MirrorMaker 2.0 over tls transport using scram-sha-512 auth
 */
@ParallelNamespaceTest
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testMirrorMaker2TlsAndScramSha512Auth(ExtensionContext extensionContext) {
    final TestStorage testStorage = new TestStorage(extensionContext, clusterOperator.getDeploymentNamespace());
    String kafkaClusterSourceName = testStorage.getClusterName() + "-source";
    String kafkaClusterTargetName = testStorage.getClusterName() + "-target";
    String sourceMirroredTopicName = kafkaClusterSourceName + "." + testStorage.getTopicName();
    String kafkaUserSourceName = testStorage.getUserName() + "-source";
    String kafkaUserTargetName = testStorage.getUserName() + "-target";
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build(), KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, testStorage.getTopicName(), 3).build(), KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSourceName).build(), KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTargetName).build());
    // Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
    PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
    passwordSecretSource.setSecretName(kafkaUserSourceName);
    passwordSecretSource.setPassword("password");
    // Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
    PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
    passwordSecretTarget.setSecretName(kafkaUserTargetName);
    passwordSecretTarget.setPassword("password");
    // Initialize CertSecretSource with certificate and secret names for source
    CertSecretSource certSecretSource = new CertSecretSource();
    certSecretSource.setCertificate("ca.crt");
    certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
    // Initialize CertSecretSource with certificate and secret names for target
    CertSecretSource certSecretTarget = new CertSecretSource();
    certSecretTarget.setCertificate("ca.crt");
    certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
    KafkaClients clients = new KafkaClientsBuilder().withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withUserName(kafkaUserSourceName).withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withMessageCount(MESSAGE_COUNT).build();
    // Check brokers availability
    LOGGER.info("Messages exchange - topic {}, cluster {} and message count of {}", testStorage.getTopicName(), kafkaClusterSourceName, MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, clients.producerScramShaTlsStrimzi(kafkaClusterSourceName), clients.consumerScramShaTlsStrimzi(kafkaClusterSourceName));
    ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(testStorage.getClusterName(), kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().editMatchingCluster(spec -> spec.getAlias().equals(kafkaClusterSourceName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSourceName).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().endCluster().editMatchingCluster(spec -> spec.getAlias().equals(kafkaClusterTargetName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTargetName).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().endCluster().endSpec().build());
    LOGGER.info("Changing to mirrored topic - topic {}, cluster {}, user {}", sourceMirroredTopicName, kafkaClusterTargetName, kafkaClusterTargetName);
    clients = new KafkaClientsBuilder(clients).withTopicName(sourceMirroredTopicName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withUserName(kafkaUserTargetName).build();
    LOGGER.info("Now messages should be mirrored to target topic and cluster");
    resourceManager.createResource(extensionContext, clients.consumerScramShaTlsStrimzi(kafkaClusterTargetName));
    ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    LOGGER.info("Messages successfully mirrored");
    KafkaTopicUtils.waitForKafkaTopicCreation(testStorage.getNamespaceName(), sourceMirroredTopicName);
    KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(testStorage.getNamespaceName()).withName(sourceMirroredTopicName).get();
    assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
    assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) KafkaResource(io.strimzi.systemtest.resources.crd.KafkaResource) KubeClusterResource.cmdKubeClient(io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient) KafkaTopicUtils(io.strimzi.systemtest.utils.kafkaUtils.KafkaTopicUtils) KafkaListenerAuthenticationTls(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls) Assertions.assertFalse(org.junit.jupiter.api.Assertions.assertFalse) KafkaResources(io.strimzi.api.kafka.model.KafkaResources) Ca(io.strimzi.operator.cluster.model.Ca) BeforeAll(org.junit.jupiter.api.BeforeAll) Map(java.util.Map) Tag(org.junit.jupiter.api.Tag) StUtils(io.strimzi.systemtest.utils.StUtils) BeforeAllOnce(io.strimzi.systemtest.BeforeAllOnce) RollingUpdateUtils(io.strimzi.systemtest.utils.RollingUpdateUtils) ACCEPTANCE(io.strimzi.systemtest.Constants.ACCEPTANCE) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) IsolatedSuite(io.strimzi.systemtest.annotations.IsolatedSuite) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) KafkaMirrorMaker2Templates(io.strimzi.systemtest.templates.crd.KafkaMirrorMaker2Templates) INTERNAL_CLIENTS_USED(io.strimzi.systemtest.Constants.INTERNAL_CLIENTS_USED) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) PasswordSecretSource(io.strimzi.api.kafka.model.PasswordSecretSource) ClientUtils(io.strimzi.systemtest.utils.ClientUtils) KafkaMirrorMaker2Resources(io.strimzi.api.kafka.model.KafkaMirrorMaker2Resources) KafkaMirrorMaker2(io.strimzi.api.kafka.model.KafkaMirrorMaker2) KafkaMirrorMaker2Status(io.strimzi.api.kafka.model.status.KafkaMirrorMaker2Status) List(java.util.List) Labels(io.strimzi.operator.common.model.Labels) Logger(org.apache.logging.log4j.Logger) KafkaTopicTemplates(io.strimzi.systemtest.templates.crd.KafkaTopicTemplates) KafkaMirrorMaker2Utils(io.strimzi.systemtest.utils.kafkaUtils.KafkaMirrorMaker2Utils) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) Secret(io.fabric8.kubernetes.api.model.Secret) Matchers.is(org.hamcrest.Matchers.is) KafkaTopicResource(io.strimzi.systemtest.resources.crd.KafkaTopicResource) SCALABILITY(io.strimzi.systemtest.Constants.SCALABILITY) Matchers.containsString(org.hamcrest.Matchers.containsString) Assertions.assertDoesNotThrow(org.junit.jupiter.api.Assertions.assertDoesNotThrow) AbstractST(io.strimzi.systemtest.AbstractST) Environment(io.strimzi.systemtest.Environment) Assertions.assertNotNull(org.junit.jupiter.api.Assertions.assertNotNull) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) HashMap(java.util.HashMap) ExtensionContext(org.junit.jupiter.api.extension.ExtensionContext) TestStorage(io.strimzi.systemtest.storage.TestStorage) SecretUtils(io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils) PodUtils(io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils) JsonPathMatchers.hasJsonPath(org.valid4j.matchers.jsonpath.JsonPathMatchers.hasJsonPath) DeploymentStrategy(io.strimzi.api.kafka.model.template.DeploymentStrategy) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) TestUtils(io.strimzi.test.TestUtils) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) MIRROR_MAKER2(io.strimzi.systemtest.Constants.MIRROR_MAKER2) KafkaTemplates(io.strimzi.systemtest.templates.crd.KafkaTemplates) Ready(io.strimzi.systemtest.enums.CustomResourceStatus.Ready) KafkaUtils(io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils) Constants(io.strimzi.systemtest.Constants) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) SetupClusterOperator(io.strimzi.systemtest.resources.operator.SetupClusterOperator) KafkaCmdClient(io.strimzi.systemtest.cli.KafkaCmdClient) KubeClusterResource.kubeClient(io.strimzi.test.k8s.KubeClusterResource.kubeClient) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) DeploymentUtils(io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils) Matchers.hasItem(org.hamcrest.Matchers.hasItem) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KafkaUserTemplates(io.strimzi.systemtest.templates.crd.KafkaUserTemplates) CONNECT_COMPONENTS(io.strimzi.systemtest.Constants.CONNECT_COMPONENTS) KafkaMirrorMaker2Resource(io.strimzi.systemtest.resources.crd.KafkaMirrorMaker2Resource) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) LogManager(org.apache.logging.log4j.LogManager) Collections(java.util.Collections) REGRESSION(io.strimzi.systemtest.Constants.REGRESSION) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) PasswordSecretSource(io.strimzi.api.kafka.model.PasswordSecretSource) TestStorage(io.strimzi.systemtest.storage.TestStorage) Matchers.containsString(org.hamcrest.Matchers.containsString) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 48 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class TopicST method testCreateTopicAfterUnsupportedOperation.

@ParallelTest
@Disabled("TopicOperator allows forbidden settings - https://github.com/strimzi/strimzi-kafka-operator/issues/6884")
void testCreateTopicAfterUnsupportedOperation(ExtensionContext extensionContext) {
    String topicName = "topic-with-replication-to-change";
    String newTopicName = "another-topic";
    KafkaTopic kafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, topicName, namespace).editSpec().withReplicas(3).withPartitions(3).endSpec().build();
    resourceManager.createResource(extensionContext, kafkaTopic);
    KafkaTopicResource.replaceTopicResourceInSpecificNamespace(topicName, t -> {
        t.getSpec().setReplicas(1);
        t.getSpec().setPartitions(1);
    }, namespace);
    KafkaTopicUtils.waitForKafkaTopicNotReady(namespace, topicName);
    String exceptedMessage = "Number of partitions cannot be decreased";
    assertThat(KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get().getStatus().getConditions().get(0).getMessage(), is(exceptedMessage));
    String topicCRDMessage = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get().getStatus().getConditions().get(0).getMessage();
    assertThat(topicCRDMessage, containsString(exceptedMessage));
    KafkaTopic newKafkaTopic = KafkaTopicTemplates.topic(TOPIC_CLUSTER_NAME, newTopicName, 1, 1).editMetadata().withNamespace(namespace).endMetadata().build();
    resourceManager.createResource(extensionContext, newKafkaTopic);
    assertThat("Topic exists in Kafka itself", hasTopicInKafka(topicName, TOPIC_CLUSTER_NAME));
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(kafkaTopic, topicName));
    assertThat("Topic exists in Kafka itself", hasTopicInKafka(newTopicName, TOPIC_CLUSTER_NAME));
    assertThat("Topic exists in Kafka CR (Kubernetes)", hasTopicInCRK8s(newKafkaTopic, newTopicName));
    cmdKubeClient(namespace).deleteByName(KafkaTopic.RESOURCE_SINGULAR, topicName);
    KafkaTopicUtils.waitForKafkaTopicDeletion(namespace, topicName);
    cmdKubeClient(namespace).deleteByName(KafkaTopic.RESOURCE_SINGULAR, newTopicName);
    KafkaTopicUtils.waitForKafkaTopicDeletion(namespace, newTopicName);
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest) Disabled(org.junit.jupiter.api.Disabled)

Example 49 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class TopicST method testCreateTopicViaKafka.

@ParallelTest
void testCreateTopicViaKafka(ExtensionContext extensionContext) {
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    int topicPartitions = 3;
    LOGGER.debug("Creating topic {} with {} replicas and {} partitions", topicName, 3, topicPartitions);
    KafkaCmdClient.createTopicUsingPodCli(namespace, TOPIC_CLUSTER_NAME, 0, topicName, 3, topicPartitions);
    KafkaTopic kafkaTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get();
    verifyTopicViaKafkaTopicCRK8s(kafkaTopic, topicName, topicPartitions, TOPIC_CLUSTER_NAME);
    topicPartitions = 5;
    LOGGER.info("Editing topic via Kafka, settings to partitions {}", topicPartitions);
    KafkaCmdClient.updateTopicPartitionsCountUsingPodCli(namespace, TOPIC_CLUSTER_NAME, 0, topicName, topicPartitions);
    LOGGER.debug("Topic {} updated from {} to {} partitions", topicName, 3, topicPartitions);
    KafkaTopicUtils.waitForKafkaTopicPartitionChange(namespace, topicName, topicPartitions);
    verifyTopicViaKafka(namespace, topicName, topicPartitions, TOPIC_CLUSTER_NAME);
}
Also used : KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) Matchers.containsString(org.hamcrest.Matchers.containsString) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 50 with KafkaTopic

use of io.strimzi.api.kafka.model.KafkaTopic in project strimzi-kafka-operator by strimzi.

the class TopicST method testCreateTopicViaAdminClient.

@IsolatedTest("Using more tha one Kafka cluster in one namespace")
@Tag(NODEPORT_SUPPORTED)
void testCreateTopicViaAdminClient(ExtensionContext extensionContext) throws ExecutionException, InterruptedException, TimeoutException {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 3).editMetadata().withNamespace(namespace).endMetadata().editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(false).build()).endKafka().endSpec().build());
    Properties properties = new Properties();
    properties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaResource.kafkaClient().inNamespace(namespace).withName(clusterName).get().getStatus().getListeners().stream().filter(listener -> listener.getType().equals(Constants.EXTERNAL_LISTENER_DEFAULT_NAME)).findFirst().orElseThrow(RuntimeException::new).getBootstrapServers());
    try (AdminClient adminClient = AdminClient.create(properties)) {
        Set<String> topics = adminClient.listTopics().names().get(Constants.GLOBAL_CLIENTS_TIMEOUT, TimeUnit.MILLISECONDS);
        // new KafkaStreamsTopicStore has topology input topics
        int topicsSize = topics.size();
        LOGGER.info("Creating async topic {} via Admin client", topicName);
        CreateTopicsResult crt = adminClient.createTopics(singletonList(new NewTopic(topicName, 1, (short) 1)));
        crt.all().get();
        TestUtils.waitFor("Wait until Kafka cluster has " + (topicsSize + 1) + " KafkaTopic", Constants.GLOBAL_POLL_INTERVAL, Constants.GLOBAL_TIMEOUT, () -> {
            Set<String> updatedKafkaTopics = new HashSet<>();
            try {
                updatedKafkaTopics = adminClient.listTopics().names().get(Constants.GLOBAL_CLIENTS_TIMEOUT, TimeUnit.MILLISECONDS);
                LOGGER.info("Verify that in Kafka cluster contains {} topics", topicsSize + 1);
            } catch (InterruptedException | ExecutionException | TimeoutException e) {
                e.printStackTrace();
            }
            return updatedKafkaTopics.size() == topicsSize + 1 && updatedKafkaTopics.contains(topicName);
        });
        KafkaTopicUtils.waitForKafkaTopicCreation(namespace, topicName);
        KafkaTopicUtils.waitForKafkaTopicReady(namespace, topicName);
    }
    KafkaTopic kafkaTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespace).withName(topicName).get();
    LOGGER.info("Verify that corresponding {} KafkaTopic custom resources were created and topic is in Ready state", 1);
    assertThat(kafkaTopic.getStatus().getConditions().get(0).getType(), is(Ready.toString()));
    assertThat(kafkaTopic.getSpec().getPartitions(), is(1));
    assertThat(kafkaTopic.getSpec().getReplicas(), is(1));
}
Also used : Matchers.containsString(org.hamcrest.Matchers.containsString) Properties(java.util.Properties) CreateTopicsResult(org.apache.kafka.clients.admin.CreateTopicsResult) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) NewTopic(org.apache.kafka.clients.admin.NewTopic) ExecutionException(java.util.concurrent.ExecutionException) AdminClient(org.apache.kafka.clients.admin.AdminClient) HashSet(java.util.HashSet) TimeoutException(java.util.concurrent.TimeoutException) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) Tag(org.junit.jupiter.api.Tag)

Aggregations

KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)187 Test (org.junit.jupiter.api.Test)92 KafkaTopicBuilder (io.strimzi.api.kafka.model.KafkaTopicBuilder)80 Checkpoint (io.vertx.junit5.Checkpoint)46 ObjectMetaBuilder (io.fabric8.kubernetes.api.model.ObjectMetaBuilder)38 HashMap (java.util.HashMap)32 ObjectMeta (io.fabric8.kubernetes.api.model.ObjectMeta)30 CountDownLatch (java.util.concurrent.CountDownLatch)28 NewTopic (org.apache.kafka.clients.admin.NewTopic)28 List (java.util.List)26 Map (java.util.Map)26 MeterRegistry (io.micrometer.core.instrument.MeterRegistry)22 KafkaTopicStatus (io.strimzi.api.kafka.model.status.KafkaTopicStatus)22 AsyncResult (io.vertx.core.AsyncResult)22 MaxAttemptsExceededException (io.strimzi.operator.common.MaxAttemptsExceededException)20 Vertx (io.vertx.core.Vertx)20 Matchers.containsString (org.hamcrest.Matchers.containsString)20 Watcher (io.fabric8.kubernetes.client.Watcher)18 KafkaClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients)18 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)18