use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls in project strimzi by strimzi.
the class ListenersST method testLoadBalancerTls.
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
@Tag(LOADBALANCER_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
void testLoadBalancerTls(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9103).withType(KafkaListenerType.LOADBALANCER).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().build()).withConfig(Collections.singletonMap("default.replication.factor", 3)).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, userName).build());
ServiceUtils.waitUntilAddressIsReachable(KafkaResource.kafkaClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getListeners().get(0).getAddresses().get(0).getHost());
ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withMessageCount(MESSAGE_COUNT).withKafkaUsername(userName).withSecurityProtocol(SecurityProtocol.SSL).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
externalKafkaClient.verifyProducedAndConsumedMessages(externalKafkaClient.sendMessagesTls(), externalKafkaClient.receiveMessagesTls());
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls in project strimzi by strimzi.
the class HttpBridgeKafkaExternalListenersST method testTlsAuthWithWeirdUsername.
@ParallelTest
void testTlsAuthWithWeirdUsername(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
// Create weird named user with . and maximum of 64 chars -> TLS
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasd";
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecret = new CertSecretSource();
certSecret.setCertificate("ca.crt");
certSecret.setSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName));
KafkaBridgeSpec bridgeSpec = new KafkaBridgeSpecBuilder().withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(weirdUserName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().withNewTls().withTrustedCertificates(certSecret).endTls().build();
testWeirdUsername(extensionContext, weirdUserName, new KafkaListenerAuthenticationTls(), bridgeSpec, SecurityProtocol.SSL);
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls in project strimzi-kafka-operator by strimzi.
the class MirrorMakerIsolatedST method testMirrorMakerTlsAuthenticated.
/**
* Test mirroring messages by Mirror Maker over tls transport using mutual tls auth
*/
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testMirrorMakerTlsAuthenticated(ExtensionContext extensionContext) throws Exception {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String topicSourceName = TOPIC_NAME + "-source" + "-" + rng.nextInt(Integer.MAX_VALUE);
String kafkaSourceUserName = clusterName + "-my-user-source";
String kafkaTargetUserName = clusterName + "-my-user-target";
// Deploy source kafka with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
// Deploy target kafka with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName).build());
// createAndWaitForReadiness Kafka user
KafkaUser userSource = KafkaUserTemplates.tlsUser(kafkaClusterSourceName, kafkaSourceUserName).build();
KafkaUser userTarget = KafkaUserTemplates.tlsUser(kafkaClusterTargetName, kafkaTargetUserName).build();
resourceManager.createResource(extensionContext, userSource);
resourceManager.createResource(extensionContext, userTarget);
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for producer
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, clusterName + "-" + Constants.KAFKA_CLIENTS, userSource, userTarget).build());
final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
String topicTestName1 = baseTopic + "-test-1";
String topicTestName2 = baseTopic + "-test-2";
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName2).build());
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicTestName1).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
// Check brokers availability
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTestName2).withClusterName(kafkaClusterTargetName).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).withKafkaUsername(userTarget.getMetadata().getName()).build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
// Deploy Mirror Maker with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, true).editSpec().editConsumer().withNewTls().withTrustedCertificates(certSecretSource).endTls().withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaSourceUserName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().endConsumer().editProducer().withNewTls().withTrustedCertificates(certSecretTarget).endTls().withNewKafkaClientAuthenticationTls().withNewCertificateAndKey().withSecretName(kafkaTargetUserName).withCertificate("user.crt").withKey("user.key").endCertificateAndKey().endKafkaClientAuthenticationTls().endProducer().endSpec().build());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicSourceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder().withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
internalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(internalKafkaClient.getMessageCount());
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls in project strimzi-kafka-operator by strimzi.
the class OpaIntegrationST method setup.
@BeforeAll
void setup(ExtensionContext extensionContext) throws Exception {
// Install OPA
cmdKubeClient().apply(FileUtils.updateNamespaceOfYamlFile(TestUtils.USER_PATH + "/../systemtest/src/test/resources/opa/opa.yaml", namespace));
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(CLUSTER_NAME, 3, 1).editMetadata().withNamespace(namespace).endMetadata().editSpec().editKafka().withNewKafkaAuthorizationOpa().withUrl("http://opa:8181/v1/data/kafka/simple/authz/allow").addToSuperUsers("CN=" + OPA_SUPERUSER).endKafkaAuthorizationOpa().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls in project strimzi-kafka-operator by strimzi.
the class SecurityST method testAclWithSuperUser.
@ParallelNamespaceTest
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
void testAclWithSuperUser(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1).editSpec().editKafka().withNewKafkaAuthorizationSimple().withSuperUsers("CN=" + userName).endKafkaAuthorizationSimple().withListeners(new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, userName).editSpec().withNewKafkaUserAuthorizationSimple().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(AclOperation.WRITE).endAcl().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(// describe is for that user can find out metadata
AclOperation.DESCRIBE).endAcl().endKafkaUserAuthorizationSimple().endSpec().build());
LOGGER.info("Checking kafka super user:{} that is able to send messages to topic:{}", userName, topicName);
ExternalKafkaClient externalKafkaClient = new ExternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withListenerName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).build();
assertThat(externalKafkaClient.sendMessagesTls(), is(MESSAGE_COUNT));
LOGGER.info("Checking kafka super user:{} that is able to read messages to topic:{} regardless that " + "we configured Acls with only write operation", userName, topicName);
assertThat(externalKafkaClient.receiveMessagesTls(), is(MESSAGE_COUNT));
String nonSuperuserName = userName + "-non-super-user";
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, nonSuperuserName).editSpec().withNewKafkaUserAuthorizationSimple().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(AclOperation.WRITE).endAcl().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(// describe is for that user can find out metadata
AclOperation.DESCRIBE).endAcl().endKafkaUserAuthorizationSimple().endSpec().build());
LOGGER.info("Checking kafka super user:{} that is able to send messages to topic:{}", nonSuperuserName, topicName);
externalKafkaClient = externalKafkaClient.toBuilder().withKafkaUsername(nonSuperuserName).build();
assertThat(externalKafkaClient.sendMessagesTls(), is(MESSAGE_COUNT));
LOGGER.info("Checking kafka super user:{} that is not able to read messages to topic:{} because of defined" + " ACLs on only write operation", nonSuperuserName, topicName);
ExternalKafkaClient newExternalKafkaClient = externalKafkaClient.toBuilder().withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
assertThrows(GroupAuthorizationException.class, newExternalKafkaClient::receiveMessagesTls);
}
Aggregations