Search in sources :

Example 16 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class MirrorMaker2IsolatedST method testMirrorMaker2TlsAndTlsClientAuth.

/**
 * Test mirroring messages by MirrorMaker 2.0 over tls transport using mutual tls auth
 */
@SuppressWarnings({ "checkstyle:MethodLength" })
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
void testMirrorMaker2TlsAndTlsClientAuth(ExtensionContext extensionContext) throws Exception {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String kafkaClusterSourceName = clusterName + "-source";
    String kafkaClusterTargetName = clusterName + "-target";
    String topicName = "availability-topic-source-" + mapWithTestTopics.get(extensionContext.getDisplayName());
    String topicSourceNameMirrored = kafkaClusterSourceName + "." + topicName;
    String topicSourceName = MIRRORMAKER2_TOPIC_NAME + "-" + rng.nextInt(Integer.MAX_VALUE);
    String topicTargetName = kafkaClusterSourceName + "." + topicSourceName;
    String kafkaUserSourceName = clusterName + "-my-user-source";
    String kafkaUserTargetName = clusterName + "-my-user-target";
    // Deploy source kafka with tls listener and mutual tls auth
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy target kafka with tls listener and mutual tls auth
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy topic
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName, 3).build());
    // Create Kafka user
    KafkaUser userSource = KafkaUserTemplates.tlsUser(kafkaClusterSourceName, kafkaUserSourceName).build();
    KafkaUser userTarget = KafkaUserTemplates.tlsUser(kafkaClusterTargetName, kafkaUserTargetName).build();
    resourceManager.createResource(extensionContext, userSource);
    resourceManager.createResource(extensionContext, userTarget);
    resourceManager.createResource(extensionContext, false, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, userSource, userTarget).build());
    final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, kafkaClientsName).get(0).getMetadata().getName();
    String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
    String topicTestName1 = baseTopic + "-test-1";
    String topicTestName2 = baseTopic + "-test-2";
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterTargetName, topicTestName2).build());
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicTestName1).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    // Check brokers availability
    ClientUtils.waitUntilProducerAndConsumerSuccessfullySendAndReceiveMessages(extensionContext, internalKafkaClient);
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicTestName2, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withClusterName(kafkaClusterTargetName).withTopicName(topicTestName2).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicTestName2, kafkaClusterTargetName, messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
    // Initialize CertSecretSource with certificate and secret names for source
    CertSecretSource certSecretSource = new CertSecretSource();
    certSecretSource.setCertificate("ca.crt");
    certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
    // Initialize CertSecretSource with certificate and secret names for target
    CertSecretSource certSecretTarget = new CertSecretSource();
    certSecretTarget.setCertificate("ca.crt");
    certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
    // Deploy Mirror Maker 2.0 with tls listener and mutual tls auth
    KafkaMirrorMaker2ClusterSpec sourceClusterWithTlsAuth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaUserSourceName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
    KafkaMirrorMaker2ClusterSpec targetClusterWithTlsAuth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaUserTargetName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(sourceClusterWithTlsAuth, targetClusterWithTlsAuth).editFirstMirror().withTopicsPattern(MIRRORMAKER2_TOPIC_NAME + ".*").endMirror().endSpec().build());
    LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicSourceName, kafkaClusterSourceName, userSource.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicSourceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
    int sent = internalKafkaClient.sendMessagesTls();
    LOGGER.info("Receiving messages from - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Now setting topic to {}, cluster to {} and user to {} - the messages should be mirrored", topicTargetName, kafkaClusterTargetName, userTarget.getMetadata().getName());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
    LOGGER.info("Consumer in target cluster and topic should receive {} messages", messagesCount);
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Messages successfully mirrored");
    KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicTargetName).get();
    assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
    assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
    mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicSourceNameMirrored).get();
    assertThat(mirroredTopic, nullValue());
}
Also used : KafkaListenerAuthenticationTls(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) KafkaTopic(io.strimzi.api.kafka.model.KafkaTopic) KafkaMirrorMaker2ClusterSpec(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpec) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) Matchers.containsString(org.hamcrest.Matchers.containsString) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 17 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class MirrorMaker2IsolatedST method testKMM2RollAfterSecretsCertsUpdateScramsha.

/**
 * Test mirroring messages by MirrorMaker 2.0 over tls transport using scram-sha-512 auth
 * while user Scram passwords, CA cluster and clients certificates are changed.
 */
@ParallelNamespaceTest
@SuppressWarnings({ "checkstyle:MethodLength" })
void testKMM2RollAfterSecretsCertsUpdateScramsha(ExtensionContext extensionContext) {
    TestStorage testStorage = new TestStorage(extensionContext);
    String kafkaClusterSourceName = testStorage.getClusterName() + "-source";
    String kafkaClusterTargetName = testStorage.getClusterName() + "-target";
    String topicSourceNameA = MIRRORMAKER2_TOPIC_NAME + "-a-" + rng.nextInt(Integer.MAX_VALUE);
    String topicSourceNameB = MIRRORMAKER2_TOPIC_NAME + "-b-" + rng.nextInt(Integer.MAX_VALUE);
    String topicTargetNameA = kafkaClusterSourceName + "." + topicSourceNameA;
    String topicTargetNameB = kafkaClusterSourceName + "." + topicSourceNameB;
    String kafkaUserSourceName = testStorage.getClusterName() + "-my-user-source";
    String kafkaUserTargetName = testStorage.getClusterName() + "-my-user-target";
    String kafkaTlsScramListenerName = "tlsscram";
    // Deploy source kafka with tls listener and SCRAM-SHA authentication
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(kafkaTlsScramListenerName).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy target kafka with tls listeners with tls and SCRAM-SHA authentication
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(kafkaTlsScramListenerName).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    // Deploy topic
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceNameA).build(), KafkaTopicTemplates.topic(kafkaClusterTargetName, topicSourceNameB).build());
    // Create Kafka user for source and target cluster
    KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSourceName).build();
    KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTargetName).build();
    resourceManager.createResource(extensionContext, userSource, userTarget);
    // Initialize PasswordSecretSource to set this as PasswordSecret in Source/Target MirrorMaker2 spec
    PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
    passwordSecretSource.setSecretName(kafkaUserSourceName);
    passwordSecretSource.setPassword("password");
    PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
    passwordSecretTarget.setSecretName(kafkaUserTargetName);
    passwordSecretTarget.setPassword("password");
    // Initialize CertSecretSource with certificate and secret names for source
    CertSecretSource certSecretSource = new CertSecretSource();
    certSecretSource.setCertificate("ca.crt");
    certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
    // Initialize CertSecretSource with certificate and secret names for target
    CertSecretSource certSecretTarget = new CertSecretSource();
    certSecretTarget.setCertificate("ca.crt");
    certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
    // Deploy client
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(testStorage.getNamespaceName(), true, testStorage.getKafkaClientsName(), userSource, userTarget).build());
    String kafkaClientsPodName = kubeClient().listPodsByPrefixInName(testStorage.getKafkaClientsName()).get(0).getMetadata().getName();
    KafkaMirrorMaker2ClusterSpec sourceClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSourceName).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
    KafkaMirrorMaker2ClusterSpec targetClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTargetName).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
    resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(testStorage.getClusterName(), kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(targetClusterWithScramSha512Auth, sourceClusterWithScramSha512Auth).editFirstMirror().editSourceConnector().addToConfig("refresh.topics.interval.seconds", 1).endSourceConnector().endMirror().endSpec().build());
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicSourceNameA).withNamespaceName(testStorage.getNamespaceName()).withClusterName(kafkaClusterSourceName).withKafkaUsername(kafkaUserSourceName).withMessageCount(messagesCount).withListenerName(kafkaTlsScramListenerName).build();
    int sent = internalKafkaClient.sendMessagesTls();
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetNameA).withClusterName(kafkaClusterTargetName).withKafkaUsername(kafkaUserTargetName).withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
    LOGGER.info("Now messages should be mirrored to target topic and cluster");
    internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Messages successfully mirrored");
    String kmm2DeploymentName = KafkaMirrorMaker2Resources.deploymentName(testStorage.getClusterName());
    Map<String, String> mmSnapshot = DeploymentUtils.depSnapshot(testStorage.getNamespaceName(), kmm2DeploymentName);
    LOGGER.info("Changing KafkaUser sha-password on KMM2 Source and make sure it rolled");
    Secret passwordSource = new SecretBuilder().withNewMetadata().withName(kafkaUserSourceName).endMetadata().addToData("password", "c291cmNlLXBhc3N3b3Jk").build();
    kubeClient().patchSecret(testStorage.getNamespaceName(), kafkaUserSourceName, passwordSource);
    mmSnapshot = DeploymentUtils.waitTillDepHasRolled(testStorage.getNamespaceName(), kmm2DeploymentName, 1, mmSnapshot);
    LOGGER.info("Changing KafkaUser sha-password on KMM2 Target");
    Secret passwordTarget = new SecretBuilder().withNewMetadata().withName(kafkaUserTargetName).endMetadata().addToData("password", "dGFyZ2V0LXBhc3N3b3Jk").build();
    kubeClient().patchSecret(testStorage.getNamespaceName(), kafkaUserTargetName, passwordTarget);
    DeploymentUtils.waitTillDepHasRolled(testStorage.getNamespaceName(), kmm2DeploymentName, 1, mmSnapshot);
    LOGGER.info("Recreate kafkaClients pod with new passwords.");
    resourceManager.deleteResource(kubeClient().namespace(testStorage.getNamespaceName()).getDeployment(testStorage.getKafkaClientsName()));
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(testStorage.getNamespaceName(), true, testStorage.getKafkaClientsName(), userSource, userTarget).build());
    kafkaClientsPodName = kubeClient().listPodsByPrefixInName(testStorage.getKafkaClientsName()).get(0).getMetadata().getName();
    internalKafkaClient = internalKafkaClient.toBuilder().withUsingPodName(kafkaClientsPodName).withTopicName(topicSourceNameB).withClusterName(kafkaClusterSourceName).withKafkaUsername(kafkaUserSourceName).withListenerName(kafkaTlsScramListenerName).build();
    sent = internalKafkaClient.sendMessagesTls();
    internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetNameB).withClusterName(kafkaClusterTargetName).withKafkaUsername(kafkaUserTargetName).withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
    LOGGER.info("Now messages should be mirrored to target topic and cluster");
    internalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(sent);
    LOGGER.info("Messages successfully mirrored");
}
Also used : GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) PasswordSecretSource(io.strimzi.api.kafka.model.PasswordSecretSource) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) Secret(io.fabric8.kubernetes.api.model.Secret) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) KafkaListenerAuthenticationTls(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) KafkaMirrorMaker2ClusterSpecBuilder(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder) KafkaMirrorMaker2ClusterSpec(io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpec) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) TestStorage(io.strimzi.systemtest.storage.TestStorage) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 18 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class ListenersST method testSendMessagesCustomListenerTlsScramSha.

/**
 * Test custom listener configured with scram sha auth and tls.
 */
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
@Tag(INTERNAL_CLIENTS_USED)
void testSendMessagesCustomListenerTlsScramSha(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String kafkaUsername = mapWithTestUsers.get(extensionContext.getDisplayName());
    // Use a Kafka with plain listener disabled
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withType(KafkaListenerType.INTERNAL).withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9122).withTls(true).withNewKafkaListenerAuthenticationCustomAuth().withSasl(true).withListenerConfig(Map.of("scram-sha-512.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required;", "sasl.enabled.mechanisms", "SCRAM-SHA-512")).endKafkaListenerAuthenticationCustomAuth().build()).endKafka().endSpec().build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, kafkaUsername).build();
    resourceManager.createResource(extensionContext, kafkaUser);
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, clusterName + "-" + Constants.KAFKA_CLIENTS, kafkaUser).build());
    final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(kafkaUsername).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    // Check brokers availability
    LOGGER.info("Checking produced and consumed messages to pod:{}", kafkaClientsPodName);
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
}
Also used : GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) ContainerEnvVarBuilder(io.strimzi.api.kafka.model.ContainerEnvVarBuilder) GenericKafkaListenerConfigurationBrokerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 19 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class ListenersST method testMessagesTlsScramShaWithPredefinedPassword.

@ParallelNamespaceTest
void testMessagesTlsScramShaWithPredefinedPassword(ExtensionContext extensionContext) {
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    final String firstUnencodedPassword = "completely_secret_password";
    final String secondUnencodedPassword = "completely_different_secret_password";
    final String firstEncodedPassword = Base64.getEncoder().encodeToString(firstUnencodedPassword.getBytes(StandardCharsets.UTF_8));
    final String secondEncodedPassword = Base64.getEncoder().encodeToString(secondUnencodedPassword.getBytes(StandardCharsets.UTF_8));
    final String secretName = clusterName + "-secret";
    Secret password = new SecretBuilder().withNewMetadata().withName(secretName).endMetadata().addToData("password", firstEncodedPassword).build();
    kubeClient().namespace(namespaceName).createSecret(password);
    assertThat("Password in secret is not correct", kubeClient().namespace(namespaceName).getSecret(secretName).getData().get("password"), is(firstEncodedPassword));
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("password", secretName, false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withType(KafkaListenerType.INTERNAL).withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9096).withTls(true).withNewKafkaListenerAuthenticationScramSha512Auth().endKafkaListenerAuthenticationScramSha512Auth().build()).endKafka().endSpec().build(), kafkaUser, KafkaTopicTemplates.topic(clusterName, topicName).build());
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, kafkaUser).build());
    String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(namespaceName, kafkaClientsName).get(0).getMetadata().getName();
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    int sentMessages = internalKafkaClient.sendMessagesTls();
    internalKafkaClient.checkProducedAndConsumedMessages(sentMessages, internalKafkaClient.receiveMessagesTls());
    LOGGER.info("Changing password in secret: {}, we should be able to send/receive messages", secretName);
    password = new SecretBuilder(password).addToData("password", secondEncodedPassword).build();
    kubeClient().namespace(namespaceName).createSecret(password);
    SecretUtils.waitForUserPasswordChange(namespaceName, userName, secondEncodedPassword);
    LOGGER.info("We need to recreate Kafka Clients deployment, so the correct password from secret will be taken");
    resourceManager.deleteResource(kubeClient().getDeployment(namespaceName, kafkaClientsName));
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, kafkaUser).build());
    LOGGER.info("Receiving messages with new password");
    kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(namespaceName, kafkaClientsName).get(0).getMetadata().getName();
    internalKafkaClient = internalKafkaClient.toBuilder().withUsingPodName(kafkaClientsPodName).withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
    internalKafkaClient.checkProducedAndConsumedMessages(sentMessages, internalKafkaClient.receiveMessagesTls());
}
Also used : Secret(io.fabric8.kubernetes.api.model.Secret) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) ContainerEnvVarBuilder(io.strimzi.api.kafka.model.ContainerEnvVarBuilder) GenericKafkaListenerConfigurationBrokerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 20 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class ListenersST method testCustomCertLoadBalancerAndTlsRollingUpdate.

@ParallelNamespaceTest
@Tag(LOADBALANCER_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(INTERNAL_CLIENTS_USED)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testCustomCertLoadBalancerAndTlsRollingUpdate(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    final String clusterCustomCertServer1 = clusterName + "-" + customCertServer1;
    final String clusterCustomCertServer2 = clusterName + "-" + customCertServer2;
    final LabelSelector kafkaSelector = KafkaResource.getLabelSelector(clusterName, KafkaResources.kafkaStatefulSetName(clusterName));
    SecretUtils.createCustomSecret(clusterCustomCertServer1, clusterName, namespaceName, STRIMZI_CERT_AND_KEY_1);
    SecretUtils.createCustomSecret(clusterCustomCertServer2, clusterName, namespaceName, STRIMZI_CERT_AND_KEY_2);
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().build()).endKafka().endSpec().build());
    KafkaUser aliceUser = KafkaUserTemplates.tlsUser(clusterName, userName).build();
    resourceManager.createResource(extensionContext, aliceUser);
    String externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    String externalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterName + "-cluster-ca-cert", "ca.crt");
    String internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    LOGGER.info("Check if KafkaStatus certificates from external listeners are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    // External secret cert is same as internal in this case
    assertThat(externalSecretCerts, is(internalCerts));
    ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withCertificateAuthorityCertificateName(null).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
    externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
    Map<String, String> kafkaSnapshot = PodUtils.podSnapshot(namespaceName, kafkaSelector);
    KafkaResource.replaceKafkaResourceInSpecificNamespace(clusterName, kafka -> {
        kafka.getSpec().getKafka().setListeners(asList(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer2).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().endConfiguration().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer1).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().withFinalizers(LB_FINALIZERS).endConfiguration().build()));
    }, namespaceName);
    kafkaSnapshot = RollingUpdateUtils.waitTillComponentHasRolled(namespaceName, kafkaSelector, 3, kafkaSnapshot);
    KafkaUtils.waitForKafkaStatusUpdate(namespaceName, clusterName);
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    externalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterCustomCertServer1, "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    String internalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    externalKafkaClient = externalKafkaClient.toBuilder().withCertificateAuthorityCertificateName(clusterCustomCertServer1).build();
    externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
    // Deploy client pod with custom certificates and collect messages from internal TLS listener
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(true, clusterName + "-" + Constants.KAFKA_CLIENTS, false, aliceUser).build());
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kubeClient(namespaceName).listPodsByPrefixInName(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName()).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withConsumerGroupName("consumer-group-certs-81").withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    int sent = internalKafkaClient.sendMessagesTls();
    assertThat(sent, is(MESSAGE_COUNT));
    internalKafkaClient.setMessageCount(MESSAGE_COUNT * 3);
    int received = internalKafkaClient.receiveMessagesTls();
    assertThat(received, is(3 * MESSAGE_COUNT));
    SecretUtils.createCustomSecret(clusterCustomCertServer1, clusterName, namespaceName, STRIMZI_CERT_AND_KEY_2);
    SecretUtils.createCustomSecret(clusterCustomCertServer2, clusterName, namespaceName, STRIMZI_CERT_AND_KEY_1);
    kafkaSnapshot = RollingUpdateUtils.waitTillComponentHasRolled(namespaceName, kafkaSelector, 3, kafkaSnapshot);
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    externalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterCustomCertServer1, "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    internalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    sent = externalKafkaClient.sendMessagesTls() + MESSAGE_COUNT;
    externalKafkaClient.setMessageCount(2 * MESSAGE_COUNT);
    externalKafkaClient.verifyProducedAndConsumedMessages(sent, externalKafkaClient.receiveMessagesTls());
    internalKafkaClient = internalKafkaClient.toBuilder().withConsumerGroupName("consumer-group-certs-71").withMessageCount(MESSAGE_COUNT).build();
    sent = internalKafkaClient.sendMessagesTls();
    assertThat(sent, is(MESSAGE_COUNT));
    internalKafkaClient.setMessageCount(MESSAGE_COUNT * 5);
    received = internalKafkaClient.receiveMessagesTls();
    assertThat(received, is(5 * MESSAGE_COUNT));
    KafkaResource.replaceKafkaResourceInSpecificNamespace(clusterName, kafka -> {
        kafka.getSpec().getKafka().setListeners(asList(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9113).withType(KafkaListenerType.INTERNAL).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer2).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().endConfiguration().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9114).withType(KafkaListenerType.LOADBALANCER).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().withTls(true).build()));
    }, namespaceName);
    RollingUpdateUtils.waitTillComponentHasRolled(namespaceName, kafkaSelector, 3, kafkaSnapshot);
    KafkaUtils.waitForKafkaStatusUpdate(namespaceName, clusterName);
    externalCerts = getKafkaStatusCertificates(Constants.EXTERNAL_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    externalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterName + "-cluster-ca-cert", "ca.crt");
    internalCerts = getKafkaStatusCertificates(Constants.TLS_LISTENER_DEFAULT_NAME, namespaceName, clusterName);
    internalSecretCerts = getKafkaSecretCertificates(namespaceName, clusterCustomCertServer2, "ca.crt");
    LOGGER.info("Check if KafkaStatus certificates are the same as secret certificates");
    assertThat(externalSecretCerts, is(externalCerts));
    LOGGER.info("Check if KafkaStatus certificates from internal TLS listener are the same as secret certificates");
    assertThat(internalSecretCerts, is(internalCerts));
    externalKafkaClient = externalKafkaClient.toBuilder().withCertificateAuthorityCertificateName(null).withMessageCount(MESSAGE_COUNT).build();
    sent = externalKafkaClient.sendMessagesTls() + MESSAGE_COUNT;
    externalKafkaClient.setMessageCount(2 * MESSAGE_COUNT);
    externalKafkaClient.verifyProducedAndConsumedMessages(sent, externalKafkaClient.receiveMessagesTls());
    internalKafkaClient = internalKafkaClient.toBuilder().withConsumerGroupName("consumer-group-certs-83").withMessageCount(6 * MESSAGE_COUNT).build();
    received = internalKafkaClient.receiveMessagesTls();
    assertThat(received, is(6 * MESSAGE_COUNT));
}
Also used : ExternalKafkaClient(io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Aggregations

InternalKafkaClient (io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient)160 ParallelNamespaceTest (io.strimzi.systemtest.annotations.ParallelNamespaceTest)100 KafkaUser (io.strimzi.api.kafka.model.KafkaUser)94 Tag (org.junit.jupiter.api.Tag)94 GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)86 Matchers.containsString (org.hamcrest.Matchers.containsString)60 SecretBuilder (io.fabric8.kubernetes.api.model.SecretBuilder)46 LabelSelector (io.fabric8.kubernetes.api.model.LabelSelector)42 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)42 ResourceRequirementsBuilder (io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder)34 HashMap (java.util.HashMap)28 Secret (io.fabric8.kubernetes.api.model.Secret)26 ParallelTest (io.strimzi.systemtest.annotations.ParallelTest)24 ExternalKafkaClient (io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient)24 KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)18 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)18 Quantity (io.fabric8.kubernetes.api.model.Quantity)16 IsolatedTest (io.strimzi.systemtest.annotations.IsolatedTest)16 ConfigMapBuilder (io.fabric8.kubernetes.api.model.ConfigMapBuilder)14 ConfigMapKeySelectorBuilder (io.fabric8.kubernetes.api.model.ConfigMapKeySelectorBuilder)14