Search in sources :

Example 36 with GenericKafkaListenerBuilder

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder in project strimzi by strimzi.

the class ConnectIsolatedST method testKafkaConnectWithPlainAndScramShaAuthentication.

@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testKafkaConnectWithPlainAndScramShaAuthentication(ExtensionContext extensionContext) {
    TestStorage storage = new TestStorage(extensionContext);
    // Use a Kafka with plain listener disabled
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(storage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(storage.getClusterName(), storage.getUserName()).build();
    resourceManager.createResource(extensionContext, kafkaUser);
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(storage.getClusterName(), storage.getTopicName()).build());
    KafkaConnect connect = KafkaConnectTemplates.kafkaConnectWithFilePlugin(storage.getNamespaceName(), storage.getClusterName(), 1).editSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(storage.getClusterName())).withNewKafkaClientAuthenticationScramSha512().withUsername(storage.getUserName()).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(storage.getUserName()).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build();
    // This is required to be able to remove the TLS setting, the builder cannot remove it
    connect.getSpec().setTls(null);
    resourceManager.createResource(extensionContext, connect, ScraperTemplates.scraperPod(storage.getNamespaceName(), storage.getScraperName()).build());
    LOGGER.info("Deploy NetworkPolicies for KafkaConnect");
    NetworkPolicyResource.deployNetworkPolicyForResource(extensionContext, connect, KafkaConnectResources.deploymentName(storage.getClusterName()));
    final String kafkaConnectPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(KafkaConnectResources.deploymentName(storage.getClusterName())).get(0).getMetadata().getName();
    final String kafkaConnectLogs = kubeClient(storage.getNamespaceName()).logs(kafkaConnectPodName);
    final String scraperPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(storage.getScraperName()).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(storage.getNamespaceName(), kafkaConnectPodName);
    LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
    assertThat(kafkaConnectLogs, not(containsString("ERROR")));
    LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", scraperPodName, storage.getTopicName());
    KafkaConnectorUtils.createFileSinkConnector(storage.getNamespaceName(), scraperPodName, storage.getTopicName(), Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(storage.getClusterName(), storage.getNamespaceName(), 8083));
    KafkaClients kafkaClients = new KafkaClientsBuilder().withTopicName(storage.getTopicName()).withMessageCount(MESSAGE_COUNT).withUserName(storage.getUserName()).withBootstrapAddress(KafkaResources.plainBootstrapAddress(storage.getClusterName())).withProducerName(storage.getProducerName()).withConsumerName(storage.getConsumerName()).withNamespaceName(storage.getNamespaceName()).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerScramShaPlainStrimzi(), kafkaClients.consumerScramShaPlainStrimzi());
    ClientUtils.waitForClientsSuccess(storage.getProducerName(), storage.getConsumerName(), storage.getNamespaceName(), MESSAGE_COUNT);
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(storage.getNamespaceName(), kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) TestStorage(io.strimzi.systemtest.storage.TestStorage) KafkaConnect(io.strimzi.api.kafka.model.KafkaConnect) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) PasswordSecretSourceBuilder(io.strimzi.api.kafka.model.PasswordSecretSourceBuilder) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 37 with GenericKafkaListenerBuilder

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder in project strimzi by strimzi.

the class ConnectIsolatedST method testConnectTlsAuthWithWeirdUserName.

@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(CONNECTOR_OPERATOR)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
void testConnectTlsAuthWithWeirdUserName(ExtensionContext extensionContext) {
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    // Create weird named user with . and maximum of 64 chars -> TLS
    final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasd";
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
    resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, weirdUserName).build());
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnectWithFilePlugin(namespaceName, clusterName, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName)).build()).endTls().withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(weirdUserName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).endSpec().build());
    testConnectAuthorizationWithWeirdUserName(extensionContext, clusterName, weirdUserName, SecurityProtocol.SSL, topicName);
}
Also used : KafkaListenerAuthenticationTls(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Matchers.containsString(org.hamcrest.Matchers.containsString) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 38 with GenericKafkaListenerBuilder

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder in project strimzi by strimzi.

the class ConnectIsolatedST method testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged.

@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
// changing the password in secret should cause the RU of connect pod
void testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(clusterOperator.getDeploymentNamespace(), extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    Secret passwordSecret = new SecretBuilder().withNewMetadata().withName("custom-pwd-secret").endMetadata().addToData("pwd", "MTIzNDU2Nzg5").build();
    kubeClient(namespaceName).createSecret(passwordSecret);
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
    resourceManager.createResource(extensionContext, kafkaUser);
    resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
    final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
    Map<String, String> connectSnapshot = DeploymentUtils.depSnapshot(namespaceName, KafkaConnectResources.deploymentName(clusterName));
    String newPassword = "bmVjb0ppbmVob05lelNwcmF2bnlQYXNzd29yZA==";
    Secret newPasswordSecret = new SecretBuilder().withNewMetadata().withName("new-custom-pwd-secret").endMetadata().addToData("pwd", newPassword).build();
    kubeClient(namespaceName).createSecret(newPasswordSecret);
    kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "new-custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
    resourceManager.createResource(extensionContext, kafkaUser);
    DeploymentUtils.waitTillDepHasRolled(namespaceName, KafkaConnectResources.deploymentName(clusterName), 1, connectSnapshot);
    final String kafkaConnectPodNameAfterRU = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodNameAfterRU);
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) Secret(io.fabric8.kubernetes.api.model.Secret) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) PasswordSecretSourceBuilder(io.strimzi.api.kafka.model.PasswordSecretSourceBuilder) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 39 with GenericKafkaListenerBuilder

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder in project strimzi by strimzi.

the class ConnectIsolatedST method testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication.

@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication(ExtensionContext extensionContext) {
    TestStorage storage = new TestStorage(extensionContext);
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(storage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(storage.getClusterName(), storage.getUserName()).build();
    resourceManager.createResource(extensionContext, kafkaUser);
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(storage.getClusterName(), storage.getTopicName()).build());
    KafkaConnect connect = KafkaConnectTemplates.kafkaConnectWithFilePlugin(storage.getNamespaceName(), storage.getClusterName(), 1).editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().addNewTrustedCertificate().withSecretName(storage.getClusterName() + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(storage.getClusterName() + "-kafka-bootstrap:9093").withNewKafkaClientAuthenticationScramSha512().withUsername(storage.getUserName()).withNewPasswordSecret().withSecretName(storage.getUserName()).withPassword("password").endPasswordSecret().endKafkaClientAuthenticationScramSha512().endSpec().build();
    resourceManager.createResource(extensionContext, connect, ScraperTemplates.scraperPod(storage.getNamespaceName(), storage.getScraperName()).build());
    LOGGER.info("Deploy NetworkPolicies for KafkaConnect");
    NetworkPolicyResource.deployNetworkPolicyForResource(extensionContext, connect, KafkaConnectResources.deploymentName(storage.getClusterName()));
    final String kafkaConnectPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(KafkaConnectResources.deploymentName(storage.getClusterName())).get(0).getMetadata().getName();
    final String kafkaConnectLogs = kubeClient(storage.getNamespaceName()).logs(kafkaConnectPodName);
    final String scraperPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(storage.getScraperName()).get(0).getMetadata().getName();
    LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
    assertThat(kafkaConnectLogs, not(containsString("ERROR")));
    LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", scraperPodName, storage.getTopicName());
    KafkaConnectorUtils.createFileSinkConnector(storage.getNamespaceName(), scraperPodName, storage.getTopicName(), Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(storage.getClusterName(), storage.getNamespaceName(), 8083));
    KafkaClients kafkaClients = new KafkaClientsBuilder().withTopicName(storage.getTopicName()).withMessageCount(MESSAGE_COUNT).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(storage.getClusterName())).withProducerName(storage.getProducerName()).withConsumerName(storage.getConsumerName()).withNamespaceName(storage.getNamespaceName()).withUserName(storage.getUserName()).build();
    resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(storage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(storage.getClusterName()));
    ClientUtils.waitForClientsSuccess(storage.getProducerName(), storage.getConsumerName(), storage.getNamespaceName(), MESSAGE_COUNT);
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(storage.getNamespaceName(), kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) TestStorage(io.strimzi.systemtest.storage.TestStorage) KafkaConnect(io.strimzi.api.kafka.model.KafkaConnect) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) KRaftNotSupported(io.strimzi.systemtest.annotations.KRaftNotSupported) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 40 with GenericKafkaListenerBuilder

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder in project strimzi by strimzi.

the class KRaftUtilsTest method testValidKafka.

@ParallelTest
public void testValidKafka() {
    KafkaSpec spec = new KafkaSpecBuilder().withNewKafka().withListeners(new GenericKafkaListenerBuilder().withName("listener").withPort(9092).withTls(true).withType(KafkaListenerType.INTERNAL).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build()).withNewEphemeralStorage().endEphemeralStorage().withNewKafkaAuthorizationOpa().withUrl("http://opa:8080").endKafkaAuthorizationOpa().endKafka().build();
    assertDoesNotThrow(() -> KRaftUtils.validateKafkaCrForKRaft(spec));
}
Also used : KafkaSpecBuilder(io.strimzi.api.kafka.model.KafkaSpecBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) KafkaSpec(io.strimzi.api.kafka.model.KafkaSpec) ParallelTest(io.strimzi.test.annotations.ParallelTest)

Aggregations

GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)404 ParallelTest (io.strimzi.test.annotations.ParallelTest)194 KafkaBuilder (io.strimzi.api.kafka.model.KafkaBuilder)152 Kafka (io.strimzi.api.kafka.model.Kafka)146 GenericKafkaListener (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener)119 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)116 Tag (org.junit.jupiter.api.Tag)94 ParallelNamespaceTest (io.strimzi.systemtest.annotations.ParallelNamespaceTest)86 GenericKafkaListenerConfigurationBrokerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder)84 Reconciliation (io.strimzi.operator.common.Reconciliation)80 Matchers.containsString (org.hamcrest.Matchers.containsString)78 KRaftNotSupported (io.strimzi.systemtest.annotations.KRaftNotSupported)74 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)74 KafkaListenerType (io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType)73 Labels (io.strimzi.operator.common.model.Labels)72 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)68 ResourceOperatorSupplier (io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier)62 ArrayList (java.util.ArrayList)62 GenericKafkaListenerConfigurationBroker (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBroker)60 TestStorage (io.strimzi.systemtest.storage.TestStorage)60