Search in sources :

Example 36 with KRaftNotSupported

use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.

the class KafkaST method testKafkaJBODDeleteClaimsTrue.

@ParallelNamespaceTest
@KRaftNotSupported("JBOD is not supported by KRaft mode and is used in this test case.")
void testKafkaJBODDeleteClaimsTrue(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final int kafkaReplicas = 2;
    final String diskSizeGi = "10";
    JbodStorage jbodStorage = new JbodStorageBuilder().withVolumes(new PersistentClaimStorageBuilder().withDeleteClaim(true).withId(0).withSize(diskSizeGi + "Gi").build(), new PersistentClaimStorageBuilder().withDeleteClaim(true).withId(1).withSize(diskSizeGi + "Gi").build()).build();
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaJBOD(clusterName, kafkaReplicas, jbodStorage).build());
    // kafka cluster already deployed
    verifyVolumeNamesAndLabels(namespaceName, clusterName, kafkaReplicas, 2, diskSizeGi);
    final int volumesCount = kubeClient(namespaceName).listPersistentVolumeClaims(namespaceName, clusterName).size();
    LOGGER.info("Deleting cluster");
    cmdKubeClient(namespaceName).deleteByName("kafka", clusterName);
    LOGGER.info("Waiting for PVC deletion");
    PersistentVolumeClaimUtils.waitForJbodStorageDeletion(namespaceName, volumesCount, jbodStorage, clusterName);
}
Also used : JbodStorageBuilder(io.strimzi.api.kafka.model.storage.JbodStorageBuilder) PersistentClaimStorageBuilder(io.strimzi.api.kafka.model.storage.PersistentClaimStorageBuilder) Matchers.containsString(org.hamcrest.Matchers.containsString) Matchers.emptyOrNullString(org.hamcrest.Matchers.emptyOrNullString) TestUtils.fromYamlString(io.strimzi.test.TestUtils.fromYamlString) JbodStorage(io.strimzi.api.kafka.model.storage.JbodStorage) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 37 with KRaftNotSupported

use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.

the class KafkaST method testAppDomainLabels.

@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
@KRaftNotSupported("TopicOperator is not supported by KRaft mode and is used in this test class")
void testAppDomainLabels(ExtensionContext extensionContext) {
    final TestStorage testStorage = new TestStorage(extensionContext);
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 3, 1).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(testStorage.getClusterName(), testStorage.getTopicName()).build());
    KafkaClients kafkaClients = new KafkaClientsBuilder().withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.plainBootstrapAddress(testStorage.getClusterName())).withNamespaceName(testStorage.getNamespaceName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withMessageCount(MESSAGE_COUNT).build();
    LOGGER.info("---> PODS <---");
    List<Pod> pods = kubeClient(testStorage.getNamespaceName()).listPods(testStorage.getNamespaceName(), testStorage.getClusterName()).stream().filter(pod -> pod.getMetadata().getName().startsWith(testStorage.getClusterName())).collect(Collectors.toList());
    for (Pod pod : pods) {
        LOGGER.info("Getting labels from {} pod", pod.getMetadata().getName());
        verifyAppLabels(pod.getMetadata().getLabels());
    }
    LOGGER.info("---> STATEFUL SETS <---");
    Map<String, String> kafkaLabels = StUtils.getLabelsOfStatefulSetOrStrimziPodSet(testStorage.getNamespaceName(), KafkaResources.kafkaStatefulSetName(testStorage.getClusterName()));
    LOGGER.info("Getting labels from stateful set of kafka resource");
    verifyAppLabels(kafkaLabels);
    Map<String, String> zooLabels = StUtils.getLabelsOfStatefulSetOrStrimziPodSet(testStorage.getNamespaceName(), KafkaResources.zookeeperStatefulSetName(testStorage.getClusterName()));
    LOGGER.info("Getting labels from stateful set of zookeeper resource");
    verifyAppLabels(zooLabels);
    LOGGER.info("---> SERVICES <---");
    List<Service> services = kubeClient(testStorage.getNamespaceName()).listServices(testStorage.getNamespaceName()).stream().filter(service -> service.getMetadata().getName().startsWith(testStorage.getClusterName())).collect(Collectors.toList());
    for (Service service : services) {
        LOGGER.info("Getting labels from {} service", service.getMetadata().getName());
        verifyAppLabels(service.getMetadata().getLabels());
    }
    LOGGER.info("---> SECRETS <---");
    List<Secret> secrets = kubeClient(testStorage.getNamespaceName()).listSecrets(testStorage.getNamespaceName()).stream().filter(secret -> secret.getMetadata().getName().startsWith(testStorage.getClusterName()) && secret.getType().equals("Opaque")).collect(Collectors.toList());
    for (Secret secret : secrets) {
        LOGGER.info("Getting labels from {} secret", secret.getMetadata().getName());
        verifyAppLabelsForSecretsAndConfigMaps(secret.getMetadata().getLabels());
    }
    LOGGER.info("---> CONFIG MAPS <---");
    List<ConfigMap> configMaps = kubeClient(testStorage.getNamespaceName()).listConfigMapsInSpecificNamespace(testStorage.getNamespaceName(), testStorage.getClusterName());
    for (ConfigMap configMap : configMaps) {
        LOGGER.info("Getting labels from {} config map", configMap.getMetadata().getName());
        verifyAppLabelsForSecretsAndConfigMaps(configMap.getMetadata().getLabels());
    }
    resourceManager.createResource(extensionContext, kafkaClients.producerStrimzi(), kafkaClients.consumerStrimzi());
    ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
Also used : KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) Quantity(io.fabric8.kubernetes.api.model.Quantity) CoreMatchers.is(org.hamcrest.CoreMatchers.is) KafkaClusterSpec(io.strimzi.api.kafka.model.KafkaClusterSpec) CoreMatchers(org.hamcrest.CoreMatchers) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) KafkaResource(io.strimzi.systemtest.resources.crd.KafkaResource) KubeClusterResource.cmdKubeClient(io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient) Matchers.not(org.hamcrest.Matchers.not) ExecResult(io.strimzi.test.executor.ExecResult) KafkaTopicUtils(io.strimzi.systemtest.utils.kafkaUtils.KafkaTopicUtils) SecurityContextBuilder(io.fabric8.kubernetes.api.model.SecurityContextBuilder) KafkaResources.kafkaStatefulSetName(io.strimzi.api.kafka.model.KafkaResources.kafkaStatefulSetName) Matchers.hasItems(org.hamcrest.Matchers.hasItems) PersistentVolumeClaimUtils(io.strimzi.systemtest.utils.kubeUtils.objects.PersistentVolumeClaimUtils) EntityTopicOperatorSpec(io.strimzi.api.kafka.model.EntityTopicOperatorSpec) KafkaResources(io.strimzi.api.kafka.model.KafkaResources) Arrays.asList(java.util.Arrays.asList) SystemProperty(io.strimzi.api.kafka.model.SystemProperty) Duration(java.time.Duration) Map(java.util.Map) ConfigMapUtils(io.strimzi.systemtest.utils.kubeUtils.controllers.ConfigMapUtils) Tag(org.junit.jupiter.api.Tag) ServiceUtils(io.strimzi.systemtest.utils.kubeUtils.objects.ServiceUtils) StUtils(io.strimzi.systemtest.utils.StUtils) RollingUpdateUtils(io.strimzi.systemtest.utils.RollingUpdateUtils) JbodStorageBuilder(io.strimzi.api.kafka.model.storage.JbodStorageBuilder) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) Matchers.notNullValue(org.hamcrest.Matchers.notNullValue) StUtils.configMap2Properties(io.strimzi.systemtest.utils.StUtils.configMap2Properties) INTERNAL_CLIENTS_USED(io.strimzi.systemtest.Constants.INTERNAL_CLIENTS_USED) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Instant(java.time.Instant) HasMetadata(io.fabric8.kubernetes.api.model.HasMetadata) Collectors(java.util.stream.Collectors) ClientUtils(io.strimzi.systemtest.utils.ClientUtils) List(java.util.List) Labels(io.strimzi.operator.common.model.Labels) Logger(org.apache.logging.log4j.Logger) KafkaTopicTemplates(io.strimzi.systemtest.templates.crd.KafkaTopicTemplates) PersistentVolumeClaim(io.fabric8.kubernetes.api.model.PersistentVolumeClaim) Secret(io.fabric8.kubernetes.api.model.Secret) Optional(java.util.Optional) KafkaTopicResource(io.strimzi.systemtest.resources.crd.KafkaTopicResource) Matchers.anyOf(org.hamcrest.Matchers.anyOf) CRUISE_CONTROL(io.strimzi.systemtest.Constants.CRUISE_CONTROL) Matchers.containsString(org.hamcrest.Matchers.containsString) AbstractST(io.strimzi.systemtest.AbstractST) Environment(io.strimzi.systemtest.Environment) ZookeeperClusterSpec(io.strimzi.api.kafka.model.ZookeeperClusterSpec) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) LOADBALANCER_SUPPORTED(io.strimzi.systemtest.Constants.LOADBALANCER_SUPPORTED) ParallelSuite(io.strimzi.systemtest.annotations.ParallelSuite) ResourceRequirementsBuilder(io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder) KafkaResources.zookeeperStatefulSetName(io.strimzi.api.kafka.model.KafkaResources.zookeeperStatefulSetName) HashMap(java.util.HashMap) ExtensionContext(org.junit.jupiter.api.extension.ExtensionContext) StUtils.stringToProperties(io.strimzi.systemtest.utils.StUtils.stringToProperties) TestStorage(io.strimzi.systemtest.storage.TestStorage) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) PersistentClaimStorageBuilder(io.strimzi.api.kafka.model.storage.PersistentClaimStorageBuilder) PodUtils(io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils) KafkaTopicList(io.strimzi.api.kafka.KafkaTopicList) Matchers.emptyOrNullString(org.hamcrest.Matchers.emptyOrNullString) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) TestUtils(io.strimzi.test.TestUtils) TestUtils.fromYamlString(io.strimzi.test.TestUtils.fromYamlString) Service(io.fabric8.kubernetes.api.model.Service) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) CoreMatchers.nullValue(org.hamcrest.CoreMatchers.nullValue) EntityOperatorSpec(io.strimzi.api.kafka.model.EntityOperatorSpec) KafkaTemplates(io.strimzi.systemtest.templates.crd.KafkaTemplates) JbodStorage(io.strimzi.api.kafka.model.storage.JbodStorage) KafkaUtils(io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils) Properties(java.util.Properties) Constants(io.strimzi.systemtest.Constants) Pod(io.fabric8.kubernetes.api.model.Pod) EntityUserOperatorSpec(io.strimzi.api.kafka.model.EntityUserOperatorSpec) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaCmdClient(io.strimzi.systemtest.cli.KafkaCmdClient) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) KubeClusterResource.kubeClient(io.strimzi.test.k8s.KubeClusterResource.kubeClient) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) DeploymentUtils(io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils) Matchers.hasItem(org.hamcrest.Matchers.hasItem) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KafkaUserTemplates(io.strimzi.systemtest.templates.crd.KafkaUserTemplates) SystemPropertyBuilder(io.strimzi.api.kafka.model.SystemPropertyBuilder) Kafka(io.strimzi.api.kafka.model.Kafka) LogManager(org.apache.logging.log4j.LogManager) REGRESSION(io.strimzi.systemtest.Constants.REGRESSION) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) Pod(io.fabric8.kubernetes.api.model.Pod) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) Service(io.fabric8.kubernetes.api.model.Service) Matchers.containsString(org.hamcrest.Matchers.containsString) Matchers.emptyOrNullString(org.hamcrest.Matchers.emptyOrNullString) TestUtils.fromYamlString(io.strimzi.test.TestUtils.fromYamlString) Secret(io.fabric8.kubernetes.api.model.Secret) TestStorage(io.strimzi.systemtest.storage.TestStorage) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 38 with KRaftNotSupported

use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.

the class DynamicConfST method testUpdateToExternalListenerCausesRollingRestartUsingExternalClients.

@IsolatedTest("Using more tha one Kafka cluster in one namespace")
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(ROLLING_UPDATE)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
void testUpdateToExternalListenerCausesRollingRestartUsingExternalClients(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    Map<String, Object> deepCopyOfShardKafkaConfig = kafkaConfig.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
    LabelSelector kafkaSelector = KafkaResource.getLabelSelector(clusterName, KafkaResources.kafkaStatefulSetName(clusterName));
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, KAFKA_REPLICAS, 1).editMetadata().withNamespace(namespace).endMetadata().editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(false).build()).withConfig(deepCopyOfShardKafkaConfig).endKafka().endSpec().build());
    Map<String, String> kafkaPods = PodUtils.podSnapshot(namespace, kafkaSelector);
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName, namespace).build());
    resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(namespace, clusterName, userName).build());
    ExternalKafkaClient externalKafkaClientTls = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(clusterName).withMessageCount(MESSAGE_COUNT).withKafkaUsername(userName).withSecurityProtocol(SecurityProtocol.SSL).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
    ExternalKafkaClient externalKafkaClientPlain = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(clusterName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.PLAINTEXT).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
    externalKafkaClientPlain.verifyProducedAndConsumedMessages(externalKafkaClientPlain.sendMessagesPlain(), externalKafkaClientPlain.receiveMessagesPlain());
    assertThrows(Exception.class, () -> {
        externalKafkaClientTls.sendMessagesTls();
        externalKafkaClientTls.receiveMessagesTls();
        LOGGER.error("Producer & Consumer did not send and receive messages because external listener is set to plain communication");
    });
    LOGGER.info("Updating listeners of Kafka cluster");
    KafkaResource.replaceKafkaResourceInSpecificNamespace(clusterName, k -> {
        k.getSpec().getKafka().setListeners(Arrays.asList(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build()));
    }, namespace);
    // TODO: remove it ?
    kafkaPods = RollingUpdateUtils.waitTillComponentHasRolled(namespace, kafkaSelector, KAFKA_REPLICAS, kafkaPods);
    externalKafkaClientTls.verifyProducedAndConsumedMessages(externalKafkaClientTls.sendMessagesTls() + MESSAGE_COUNT, externalKafkaClientTls.receiveMessagesTls());
    assertThrows(Exception.class, () -> {
        externalKafkaClientPlain.sendMessagesPlain();
        externalKafkaClientPlain.receiveMessagesPlain();
        LOGGER.error("Producer & Consumer did not send and receive messages because external listener is set to tls communication");
    });
    LOGGER.info("Updating listeners of Kafka cluster");
    KafkaResource.replaceKafkaResourceInSpecificNamespace(clusterName, k -> {
        k.getSpec().getKafka().setListeners(Collections.singletonList(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(false).build()));
    }, namespace);
    RollingUpdateUtils.waitTillComponentHasRolled(namespace, kafkaSelector, KAFKA_REPLICAS, kafkaPods);
    assertThrows(Exception.class, () -> {
        externalKafkaClientTls.sendMessagesTls();
        externalKafkaClientTls.receiveMessagesTls();
        LOGGER.error("Producer & Consumer did not send and receive messages because external listener is set to plain communication");
    });
    externalKafkaClientPlain.verifyProducedAndConsumedMessages(externalKafkaClientPlain.sendMessagesPlain() + MESSAGE_COUNT, externalKafkaClientPlain.receiveMessagesPlain());
}
Also used : AbstractST(io.strimzi.systemtest.AbstractST) Environment(io.strimzi.systemtest.Environment) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Assertions.assertThrows(org.junit.jupiter.api.Assertions.assertThrows) KafkaClusterSpec(io.strimzi.api.kafka.model.KafkaClusterSpec) BeforeEach(org.junit.jupiter.api.BeforeEach) Arrays(java.util.Arrays) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) ParallelSuite(io.strimzi.systemtest.annotations.ParallelSuite) KafkaResource(io.strimzi.systemtest.resources.crd.KafkaResource) ResourceManager.kubeClient(io.strimzi.systemtest.resources.ResourceManager.kubeClient) HashMap(java.util.HashMap) ExternalKafkaClient(io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient) ExtensionContext(org.junit.jupiter.api.extension.ExtensionContext) SecurityProtocol(org.apache.kafka.common.security.auth.SecurityProtocol) PodUtils(io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils) KafkaResources(io.strimzi.api.kafka.model.KafkaResources) DYNAMIC_CONFIGURATION(io.strimzi.systemtest.Constants.DYNAMIC_CONFIGURATION) Map(java.util.Map) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) Tag(org.junit.jupiter.api.Tag) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) NODEPORT_SUPPORTED(io.strimzi.systemtest.Constants.NODEPORT_SUPPORTED) StUtils(io.strimzi.systemtest.utils.StUtils) ROLLING_UPDATE(io.strimzi.systemtest.Constants.ROLLING_UPDATE) ResourceManager.cmdKubeClient(io.strimzi.systemtest.resources.ResourceManager.cmdKubeClient) KafkaTemplates(io.strimzi.systemtest.templates.crd.KafkaTemplates) RollingUpdateUtils(io.strimzi.systemtest.utils.RollingUpdateUtils) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) Constants(io.strimzi.systemtest.Constants) EXTERNAL_CLIENTS_USED(io.strimzi.systemtest.Constants.EXTERNAL_CLIENTS_USED) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Collectors(java.util.stream.Collectors) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) Logger(org.apache.logging.log4j.Logger) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KafkaTopicTemplates(io.strimzi.systemtest.templates.crd.KafkaTopicTemplates) KafkaUserTemplates(io.strimzi.systemtest.templates.crd.KafkaUserTemplates) TestKafkaVersion(io.strimzi.systemtest.utils.TestKafkaVersion) LogManager(org.apache.logging.log4j.LogManager) Collections(java.util.Collections) REGRESSION(io.strimzi.systemtest.Constants.REGRESSION) ExternalKafkaClient(io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) Tag(org.junit.jupiter.api.Tag)

Example 39 with KRaftNotSupported

use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.

the class ListenersST method testCustomCertLoadBalancerAndTlsRollingUpdate.

@ParallelNamespaceTest
@Tag(LOADBALANCER_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(INTERNAL_CLIENTS_USED)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
@SuppressWarnings({ "checkstyle:MethodLength" })
void testCustomCertLoadBalancerAndTlsRollingUpdate(ExtensionContext extensionContext) {
    final TestStorage testStorage = new TestStorage(extensionContext);
    final String clusterCustomCertServer1 = testStorage.getClusterName() + "-" + customCertServer1;
    final String clusterCustomCertServer2 = testStorage.getClusterName() + "-" + customCertServer2;
    SecretUtils.createCustomSecret(clusterCustomCertServer1, testStorage.getClusterName(), testStorage.getNamespaceName(), STRIMZI_CERT_AND_KEY_1);
    SecretUtils.createCustomSecret(clusterCustomCertServer2, testStorage.getClusterName(), testStorage.getNamespaceName(), STRIMZI_CERT_AND_KEY_2);
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(testStorage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().build()).endKafka().endSpec().build());
    resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getClusterName(), testStorage.getUserName()).build());
    String externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    String externalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), testStorage.getClusterName() + "-cluster-ca-cert", "ca.crt");
    String internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    LOGGER.info("Check if KafkaStatus certificates from external listeners are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    // External secret cert is same as internal in this case
    assertThat(externalSecretCerts, is(internalCerts));
    ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(testStorage.getTopicName()).withNamespaceName(testStorage.getNamespaceName()).withClusterName(testStorage.getClusterName()).withKafkaUsername(testStorage.getUserName()).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withCertificateAuthorityCertificateName(null).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
    externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
    Map<String, String> kafkaSnapshot = PodUtils.podSnapshot(testStorage.getNamespaceName(), testStorage.getKafkaSelector());
    KafkaResource.replaceKafkaResourceInSpecificNamespace(testStorage.getClusterName(), kafka -> {
        kafka.getSpec().getKafka().setListeners(asList(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer2).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().endConfiguration().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer1).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().withFinalizers(LB_FINALIZERS).endConfiguration().build()));
    }, testStorage.getNamespaceName());
    kafkaSnapshot = RollingUpdateUtils.waitTillComponentHasRolled(testStorage.getNamespaceName(), testStorage.getKafkaSelector(), 3, kafkaSnapshot);
    KafkaUtils.waitForKafkaStatusUpdate(testStorage.getNamespaceName(), testStorage.getClusterName());
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    externalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), clusterCustomCertServer1, "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    String internalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    externalKafkaClient = externalKafkaClient.toBuilder().withCertificateAuthorityCertificateName(clusterCustomCertServer1).build();
    externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
    KafkaClients kafkaClients = new KafkaClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.bootstrapServiceName(testStorage.getClusterName()) + ":9113").withMessageCount(MESSAGE_COUNT).withUserName(testStorage.getUserName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withConsumerGroup("consumer-group-certs-6").withCaCertSecretName(clusterCustomCertServer2).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientSuccess(testStorage.getProducerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    kafkaClients = new KafkaClientsBuilder(kafkaClients).withMessageCount(3 * MESSAGE_COUNT).build();
    resourceManager.createResource(extensionContext, kafkaClients.consumerTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT * 3);
    SecretUtils.createCustomSecret(clusterCustomCertServer1, testStorage.getClusterName(), testStorage.getNamespaceName(), STRIMZI_CERT_AND_KEY_2);
    SecretUtils.createCustomSecret(clusterCustomCertServer2, testStorage.getClusterName(), testStorage.getNamespaceName(), STRIMZI_CERT_AND_KEY_1);
    kafkaSnapshot = RollingUpdateUtils.waitTillComponentHasRolled(testStorage.getNamespaceName(), testStorage.getKafkaSelector(), 3, kafkaSnapshot);
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    externalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), clusterCustomCertServer1, "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    internalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    int sent = externalKafkaClient.sendMessagesTls() + MESSAGE_COUNT;
    externalKafkaClient.setMessageCount(2 * MESSAGE_COUNT);
    externalKafkaClient.verifyProducedAndConsumedMessages(sent, externalKafkaClient.receiveMessagesTls());
    kafkaClients = new KafkaClientsBuilder(kafkaClients).withConsumerGroup("consumer-group-certs-71").withMessageCount(MESSAGE_COUNT).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientSuccess(testStorage.getProducerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    kafkaClients = new KafkaClientsBuilder(kafkaClients).withMessageCount(MESSAGE_COUNT * 5).build();
    resourceManager.createResource(extensionContext, kafkaClients.consumerTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT * 5);
    KafkaResource.replaceKafkaResourceInSpecificNamespace(testStorage.getClusterName(), kafka -> {
        kafka.getSpec().getKafka().setListeners(asList(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer2).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().endConfiguration().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().withTls(true).build()));
    }, testStorage.getNamespaceName());
    RollingUpdateUtils.waitTillComponentHasRolled(testStorage.getNamespaceName(), testStorage.getKafkaSelector(), 3, kafkaSnapshot);
    KafkaUtils.waitForKafkaStatusUpdate(testStorage.getNamespaceName(), testStorage.getClusterName());
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    externalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), testStorage.getClusterName() + "-cluster-ca-cert", "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, testStorage.getNamespaceName(), testStorage.getClusterName());
    internalSecretCerts = getKafkaSecretCertificates(testStorage.getNamespaceName(), clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    externalKafkaClient = externalKafkaClient.toBuilder().withCertificateAuthorityCertificateName(null).withMessageCount(MESSAGE_COUNT).build();
    sent = externalKafkaClient.sendMessagesTls() + MESSAGE_COUNT;
    externalKafkaClient.setMessageCount(2 * MESSAGE_COUNT);
    externalKafkaClient.verifyProducedAndConsumedMessages(sent, externalKafkaClient.receiveMessagesTls());
    kafkaClients = new KafkaClientsBuilder(kafkaClients).withConsumerGroup("consumer-group-certs-83").withMessageCount(MESSAGE_COUNT * 6).build();
    resourceManager.createResource(extensionContext, kafkaClients.consumerTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT * 6);
}
Also used : KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) ExternalKafkaClient(io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) TestStorage(io.strimzi.systemtest.storage.TestStorage) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 40 with KRaftNotSupported

use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.

the class ListenersST method testMessagesTlsScramShaWithPredefinedPassword.

@ParallelNamespaceTest
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testMessagesTlsScramShaWithPredefinedPassword(ExtensionContext extensionContext) {
    final TestStorage testStorage = new TestStorage(extensionContext);
    final String firstUnencodedPassword = "completely_secret_password";
    final String secondUnencodedPassword = "completely_different_secret_password";
    final String firstEncodedPassword = Base64.getEncoder().encodeToString(firstUnencodedPassword.getBytes(StandardCharsets.UTF_8));
    final String secondEncodedPassword = Base64.getEncoder().encodeToString(secondUnencodedPassword.getBytes(StandardCharsets.UTF_8));
    final String secretName = testStorage.getClusterName() + "-secret";
    Secret password = new SecretBuilder().withNewMetadata().withName(secretName).endMetadata().addToData("password", firstEncodedPassword).build();
    kubeClient().namespace(testStorage.getNamespaceName()).createSecret(password);
    assertThat("Password in secret is not correct", kubeClient().namespace(testStorage.getNamespaceName()).getSecret(secretName).getData().get("password"), is(firstEncodedPassword));
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(testStorage.getClusterName(), testStorage.getUserName()).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("password", secretName, false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withType(KafkaListenerType.INTERNAL).withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9096).withTls(true).withNewKafkaListenerAuthenticationScramSha512Auth().endKafkaListenerAuthenticationScramSha512Auth().build()).endKafka().endSpec().build(), kafkaUser, KafkaTopicTemplates.topic(testStorage.getClusterName(), testStorage.getUserName()).build());
    KafkaClients kafkaClients = new KafkaClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.bootstrapServiceName(testStorage.getClusterName()) + ":9096").withMessageCount(MESSAGE_COUNT).withUserName(testStorage.getUserName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(testStorage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
    LOGGER.info("Changing password in secret: {}, we should be able to send/receive messages", secretName);
    password = new SecretBuilder(password).addToData("password", secondEncodedPassword).build();
    kubeClient().namespace(testStorage.getNamespaceName()).createSecret(password);
    SecretUtils.waitForUserPasswordChange(testStorage.getNamespaceName(), testStorage.getUserName(), secondEncodedPassword);
    LOGGER.info("Receiving messages with new password");
    kafkaClients = new KafkaClientsBuilder(kafkaClients).withConsumerGroup(ClientUtils.generateRandomConsumerGroup()).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(testStorage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(testStorage.getClusterName()));
    ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
Also used : Secret(io.fabric8.kubernetes.api.model.Secret) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) TestStorage(io.strimzi.systemtest.storage.TestStorage) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Aggregations

KRaftNotSupported (io.strimzi.systemtest.annotations.KRaftNotSupported)162 ParallelNamespaceTest (io.strimzi.systemtest.annotations.ParallelNamespaceTest)138 Tag (org.junit.jupiter.api.Tag)100 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)94 TestStorage (io.strimzi.systemtest.storage.TestStorage)94 KafkaClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients)86 GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)80 Matchers.containsString (org.hamcrest.Matchers.containsString)74 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)44 LabelSelector (io.fabric8.kubernetes.api.model.LabelSelector)36 ExternalKafkaClient (io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient)32 Secret (io.fabric8.kubernetes.api.model.Secret)30 HashMap (java.util.HashMap)30 KafkaListenerAuthenticationTls (io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls)28 SecretBuilder (io.fabric8.kubernetes.api.model.SecretBuilder)24 KafkaResources (io.strimzi.api.kafka.model.KafkaResources)24 AbstractST (io.strimzi.systemtest.AbstractST)24 Constants (io.strimzi.systemtest.Constants)24 REGRESSION (io.strimzi.systemtest.Constants.REGRESSION)24 KafkaResource (io.strimzi.systemtest.resources.crd.KafkaResource)24