use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.
the class ListenersST method testSendMessagesCustomListenerTlsScramSha.
/**
* Test custom listener configured with scram sha auth and tls.
*/
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
@Tag(INTERNAL_CLIENTS_USED)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testSendMessagesCustomListenerTlsScramSha(ExtensionContext extensionContext) {
final TestStorage testStorage = new TestStorage(extensionContext);
// Use a Kafka with plain listener disabled
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withType(KafkaListenerType.INTERNAL).withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9122).withTls(true).withNewKafkaListenerAuthenticationCustomAuth().withSasl(true).withListenerConfig(Map.of("scram-sha-512.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required;", "sasl.enabled.mechanisms", "SCRAM-SHA-512")).endKafkaListenerAuthenticationCustomAuth().build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(testStorage.getClusterName(), testStorage.getTopicName()).build(), KafkaUserTemplates.scramShaUser(testStorage.getClusterName(), testStorage.getUserName()).build());
KafkaClients kafkaClients = new KafkaClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.bootstrapServiceName(testStorage.getClusterName()) + ":9122").withMessageCount(MESSAGE_COUNT).withUserName(testStorage.getUserName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).build();
resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(testStorage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(testStorage.getClusterName()));
ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.
the class ListenersST method testCustomSoloCertificatesForLoadBalancer.
@ParallelNamespaceTest
@Tag(LOADBALANCER_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(INTERNAL_CLIENTS_USED)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testCustomSoloCertificatesForLoadBalancer(ExtensionContext extensionContext) {
final TestStorage testStorage = new TestStorage(extensionContext);
final String clusterCustomCertServer1 = testStorage.getClusterName() + "-" + customCertServer1;
SecretUtils.createCustomSecret(clusterCustomCertServer1, testStorage.getClusterName(), testStorage.getNamespaceName(), STRIMZI_CERT_AND_KEY_1);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9107).withType(KafkaListenerType.INTERNAL).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer1).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().endConfiguration().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9108).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withNewBrokerCertChainAndKey().withSecretName(clusterCustomCertServer1).withKey("ca.key").withCertificate("ca.crt").endBrokerCertChainAndKey().withFinalizers(LB_FINALIZERS).endConfiguration().build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getClusterName(), testStorage.getUserName()).build());
ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(testStorage.getTopicName()).withNamespaceName(testStorage.getNamespaceName()).withClusterName(testStorage.getClusterName()).withKafkaUsername(testStorage.getUserName()).withMessageCount(MESSAGE_COUNT).withCertificateAuthorityCertificateName(clusterCustomCertServer1).withSecurityProtocol(SecurityProtocol.SSL).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
KafkaClients kafkaClients = new KafkaClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.bootstrapServiceName(testStorage.getClusterName()) + ":9107").withMessageCount(MESSAGE_COUNT).withUserName(testStorage.getUserName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withConsumerGroup("consumer-group-certs-3").withCaCertSecretName(clusterCustomCertServer1).build();
resourceManager.createResource(extensionContext, kafkaClients.producerTlsStrimzi(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(testStorage.getProducerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
kafkaClients = new KafkaClientsBuilder(kafkaClients).withMessageCount(2 * MESSAGE_COUNT).build();
resourceManager.createResource(extensionContext, kafkaClients.consumerTlsStrimzi(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi by strimzi.
the class ListenersST method testNodePortTls.
@ParallelNamespaceTest
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testNodePortTls(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(clusterOperator.getDeploymentNamespace(), extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9101).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).withConfig(Collections.singletonMap("default.replication.factor", 3)).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, userName).build());
ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withMessageCount(MESSAGE_COUNT).withKafkaUsername(userName).withSecurityProtocol(SecurityProtocol.SSL).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
}
use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi-kafka-operator by strimzi.
the class ConnectIsolatedST method testSecretsWithKafkaConnectWithTlsAndTlsClientAuthentication.
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testSecretsWithKafkaConnectWithTlsAndTlsClientAuthentication(ExtensionContext extensionContext) {
TestStorage storage = new TestStorage(extensionContext);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(storage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
KafkaUser kafkaUser = KafkaUserTemplates.tlsUser(storage.getClusterName(), storage.getUserName()).build();
resourceManager.createResource(extensionContext, kafkaUser);
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(storage.getClusterName(), storage.getTopicName()).build());
KafkaConnect connect = KafkaConnectTemplates.kafkaConnectWithFilePlugin(storage.getNamespaceName(), storage.getClusterName(), 1).editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().addNewTrustedCertificate().withSecretName(storage.getClusterName() + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(storage.getClusterName() + "-kafka-bootstrap:9093").withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(storage.getUserName()).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().endSpec().build();
resourceManager.createResource(extensionContext, connect, ScraperTemplates.scraperPod(storage.getNamespaceName(), storage.getScraperName()).build());
LOGGER.info("Deploy NetworkPolicies for KafkaConnect");
NetworkPolicyResource.deployNetworkPolicyForResource(extensionContext, connect, KafkaConnectResources.deploymentName(storage.getClusterName()));
final String kafkaConnectPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(KafkaConnectResources.deploymentName(storage.getClusterName())).get(0).getMetadata().getName();
final String kafkaConnectLogs = kubeClient(storage.getNamespaceName()).logs(kafkaConnectPodName);
final String scraperPodName = kubeClient(storage.getNamespaceName()).listPodsByPrefixInName(storage.getScraperName()).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(storage.getNamespaceName(), kafkaConnectPodName);
LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
assertThat(kafkaConnectLogs, not(containsString("ERROR")));
LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", scraperPodName, storage.getTopicName());
KafkaConnectorUtils.createFileSinkConnector(storage.getNamespaceName(), scraperPodName, storage.getTopicName(), Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(storage.getClusterName(), storage.getNamespaceName(), 8083));
KafkaClients kafkaClients = new KafkaClientsBuilder().withTopicName(storage.getTopicName()).withMessageCount(MESSAGE_COUNT).withUserName(storage.getUserName()).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(storage.getClusterName())).withProducerName(storage.getProducerName()).withConsumerName(storage.getConsumerName()).withNamespaceName(storage.getNamespaceName()).build();
resourceManager.createResource(extensionContext, kafkaClients.producerTlsStrimzi(storage.getClusterName()), kafkaClients.consumerTlsStrimzi(storage.getClusterName()));
ClientUtils.waitForClientsSuccess(storage.getProducerName(), storage.getConsumerName(), storage.getNamespaceName(), MESSAGE_COUNT);
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(storage.getNamespaceName(), kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
use of io.strimzi.systemtest.annotations.KRaftNotSupported in project strimzi-kafka-operator by strimzi.
the class ConnectIsolatedST method testConnectTlsAuthWithWeirdUserName.
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(CONNECTOR_OPERATOR)
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test class")
@ParallelNamespaceTest
void testConnectTlsAuthWithWeirdUserName(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
// Create weird named user with . and maximum of 64 chars -> TLS
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasd";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, weirdUserName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnectWithFilePlugin(namespaceName, clusterName, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName)).build()).endTls().withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(weirdUserName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).endSpec().build());
testConnectAuthorizationWithWeirdUserName(extensionContext, clusterName, weirdUserName, SecurityProtocol.SSL, topicName);
}
Aggregations