Search in sources :

Example 21 with KafkaOauthClientsBuilder

use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.

the class OauthPlainIsolatedST method testAccessTokenClaimCheck.

@ParallelTest
void testAccessTokenClaimCheck(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
    String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    LOGGER.info("Use clients with clientId not containing 'hello-world' in access token.");
    KafkaOauthClients oauthAudienceInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthProducerClientId(OAUTH_CLIENT_AUDIENCE_PRODUCER).withOauthConsumerClientId(OAUTH_CLIENT_AUDIENCE_CONSUMER).withOauthClientSecret(OAUTH_CLIENT_AUDIENCE_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.producerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.consumerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceProducerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceConsumerName);
    LOGGER.info("Use clients with clientId containing 'hello-world' in access token.");
    KafkaOauthClients oauthInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthInternalClientJob.producerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, oauthInternalClientJob.consumerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 22 with KafkaOauthClientsBuilder

use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.

the class OauthTlsIsolatedST method testMirrorMaker.

@Description("As a oauth mirror maker, I am able to replicate topic data using using encrypted communication")
@IsolatedTest("Using more tha one Kafka cluster in one namespace")
@Tag(MIRROR_MAKER)
@Tag(NODEPORT_SUPPORTED)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testMirrorMaker(ExtensionContext extensionContext) {
    // Nodeport needs cluster wide rights to work properly which is not possible with STRIMZI_RBAC_SCOPE=NAMESPACE
    assumeFalse(Environment.isNamespaceRbacScope());
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
    KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
    String targetKafkaCluster = oauthClusterName + "-target";
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(targetKafkaCluster, 1, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().editKafka().withListeners(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withUserNameClaim(keycloakInstance.getUserNameClaim()).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaListenerAuthenticationOAuth().build()).endKafka().endSpec().build());
    resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(oauthClusterName, oauthClusterName, targetKafkaCluster, ClientUtils.generateRandomConsumerGroup(), 1, true).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().withNewConsumer().withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(oauthClusterName)).build()).endTls().withBootstrapServers(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withGroupId(ClientUtils.generateRandomConsumerGroup()).addToConfig(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker").withNewClientSecret().withSecretName(MIRROR_MAKER_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().endConsumer().withNewProducer().withBootstrapServers(KafkaResources.tlsBootstrapAddress(targetKafkaCluster)).withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(targetKafkaCluster)).build()).endTls().withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker").withNewClientSecret().withSecretName(MIRROR_MAKER_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().addToConfig(ProducerConfig.ACKS_CONFIG, "all").endProducer().endSpec().build());
    String mirrorMakerPodName = kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, KafkaMirrorMakerResources.deploymentName(oauthClusterName)).get(0).getMetadata().getName();
    String kafkaMirrorMakerLogs = kubeClient().logsInSpecificNamespace(INFRA_NAMESPACE, mirrorMakerPodName);
    assertThat(kafkaMirrorMakerLogs, not(containsString("keytool error: java.io.FileNotFoundException: /opt/kafka/consumer-oauth-certs/**/* (No such file or directory)")));
    resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(INFRA_NAMESPACE, oauthClusterName, USER_NAME).build());
    KafkaUserUtils.waitForKafkaUserCreation(INFRA_NAMESPACE, USER_NAME);
    LOGGER.info("Creating new client with new consumer-group and also to point on {} cluster", targetKafkaCluster);
    KafkaOauthClients kafkaOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withClientUserName(USER_NAME).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(targetKafkaCluster)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, kafkaOauthClientJob.consumerStrimziOauthTls(targetKafkaCluster));
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Description(io.vertx.core.cli.annotations.Description) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) Tag(org.junit.jupiter.api.Tag)

Example 23 with KafkaOauthClientsBuilder

use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.

the class OauthTlsIsolatedST method testProducerConsumerConnect.

@Description("As an oauth KafkaConnect, I am able to sink messages from kafka broker topic using encrypted communication.")
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testProducerConsumerConnect(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
    KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).build());
    String defaultKafkaClientsPodName = ResourceManager.kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, INFRA_NAMESPACE, oauthClusterName, 1).editSpec().withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-connect").withNewClientSecret().withSecretName("my-connect-oauth").withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().withNewTls().addNewTrustedCertificate().withSecretName(oauthClusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(oauthClusterName + "-kafka-bootstrap:9093").endSpec().build());
    String kafkaConnectPodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(INFRA_NAMESPACE, kafkaConnectPodName);
    KafkaConnectorUtils.createFileSinkConnector(INFRA_NAMESPACE, defaultKafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, INFRA_NAMESPACE, 8083));
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(INFRA_NAMESPACE, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "\"Hello-world - 99\"");
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Description(io.vertx.core.cli.annotations.Description) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest) Tag(org.junit.jupiter.api.Tag)

Example 24 with KafkaOauthClientsBuilder

use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.

the class OauthAuthorizationIsolatedST method testSuperUserWithOauthAuthorization.

@Description("As a superuser of team A and team B, i am able to break defined authorization rules")
@ParallelTest
@Order(6)
void testSuperUserWithOauthAuthorization(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
    String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
    String teamBProducerName = TEAM_B_PRODUCER_NAME + "-" + clusterName;
    String teamBConsumerName = TEAM_B_CONSUMER_NAME + "-" + clusterName;
    // only write means that Team A can not create new topic 'x-.*'
    String topicXName = TOPIC_X + mapWithTestTopics.get(extensionContext.getDisplayName());
    LabelSelector kafkaSelector = KafkaResource.getLabelSelector(oauthClusterName, KafkaResources.kafkaStatefulSetName(oauthClusterName));
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicXName, INFRA_NAMESPACE).build());
    LOGGER.info("Verifying that team B is not able write to topic starting with 'x-' because in kafka cluster" + "does not have super-users to break authorization rules");
    resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(INFRA_NAMESPACE, oauthClusterName, userName).build());
    KafkaOauthClients teamBOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamBProducerName).withConsumerName(teamBConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicXName).withMessageCount(MESSAGE_COUNT).withConsumerGroup("x-consumer_group_b-" + clusterName).withOauthClientId(TEAM_B_CLIENT).withOauthClientSecret(TEAM_B_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientUserName(userName).build();
    resourceManager.createResource(extensionContext, teamBOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    JobUtils.waitForJobFailure(teamBProducerName, INFRA_NAMESPACE, 30_000);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamBProducerName);
    LOGGER.info("Verifying that team A is not able read to topic starting with 'x-' because in kafka cluster" + "does not have super-users to break authorization rules");
    KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicXName).withMessageCount(MESSAGE_COUNT).withConsumerGroup("x-consumer_group_b1-" + clusterName).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientUserName(userName).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(oauthClusterName));
    JobUtils.waitForJobFailure(teamAConsumerName, INFRA_NAMESPACE, 30_000);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAConsumerName);
    Map<String, String> kafkaPods = PodUtils.podSnapshot(INFRA_NAMESPACE, kafkaSelector);
    KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
        List<String> superUsers = new ArrayList<>(2);
        superUsers.add("service-account-" + TEAM_A_CLIENT);
        superUsers.add("service-account-" + TEAM_B_CLIENT);
        ((KafkaAuthorizationKeycloak) kafka.getSpec().getKafka().getAuthorization()).setSuperUsers(superUsers);
    }, INFRA_NAMESPACE);
    RollingUpdateUtils.waitTillComponentHasRolled(INFRA_NAMESPACE, kafkaSelector, 1, kafkaPods);
    LOGGER.info("Verifying that team B is able to write to topic starting with 'x-' and break authorization rule");
    resourceManager.createResource(extensionContext, teamBOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamBProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    LOGGER.info("Verifying that team A is able to write to topic starting with 'x-' and break authorization rule");
    teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup("x-consumer_group_b2-" + clusterName).withTopicName(topicXName).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) ArrayList(java.util.ArrayList) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) KafkaAuthorizationKeycloak(io.strimzi.api.kafka.model.KafkaAuthorizationKeycloak) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Order(org.junit.jupiter.api.Order) TestMethodOrder(org.junit.jupiter.api.TestMethodOrder) Description(io.vertx.core.cli.annotations.Description) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 25 with KafkaOauthClientsBuilder

use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.

the class OauthAuthorizationIsolatedST method testSessionReAuthentication.

/**
 * 1) Try to send messages to topic starting with `x-` with producer from Dev Team A
 * 2) Change the Oauth listener configuration -> add the maxSecondsWithoutReauthentication set to 30s
 * 3) Try to send messages with delay of 1000ms (in the meantime, the permissions configuration will be changed)
 * 4) Get all configuration from the Keycloak (realms, policies) and change the policy so the Dev Team A producer should not be able to send messages to the topic
 *      starting with `x-` -> updating the policy through the Keycloak API
 * 5) Wait for the WaitException to appear -> as the producer doesn't have permission for sending messages, the
 *      job will be in error state
 * 6) Try to send messages to topic with `a-` -> we should still be able to sent messages, because we didn't changed the permissions
 * 6) Change the permissions back and check that the messages are correctly sent
 *
 * The re-authentication can be seen in the log of team-a-producer pod.
 */
@IsolatedTest("Modification of shared Kafka cluster")
@Order(7)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testSessionReAuthentication(ExtensionContext extensionContext) {
    String topicXName = TOPIC_X + "-example-topic";
    String topicAName = TOPIC_A + "-example-topic";
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
    String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
    LOGGER.info("Verifying that team A producer is able to send messages to the {} topic -> the topic starting with 'x'", topicXName);
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicXName, INFRA_NAMESPACE).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicAName, INFRA_NAMESPACE).build());
    KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicXName).withMessageCount(MESSAGE_COUNT).withConsumerGroup("a-consumer_group").withClientUserName(TEAM_A_CLIENT).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    LOGGER.info("Adding the maxSecondsWithoutReauthentication to Kafka listener with OAuth authentication");
    KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
        kafka.getSpec().getKafka().setListeners(Arrays.asList(new GenericKafkaListenerBuilder().withName("tls").withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withUserNameClaim(keycloakInstance.getUserNameClaim()).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).withMaxSecondsWithoutReauthentication(30).endKafkaListenerAuthenticationOAuth().build()));
    }, INFRA_NAMESPACE);
    KafkaUtils.waitForKafkaReady(INFRA_NAMESPACE, oauthClusterName);
    String baseUri = "https://" + keycloakInstance.getHttpsUri();
    LOGGER.info("Setting the master realm token's lifespan to 3600s");
    // get admin token for all operation on realms
    String userName = new String(Base64.getDecoder().decode(kubeClient().getSecret(INFRA_NAMESPACE, "credential-example-keycloak").getData().get("ADMIN_USERNAME").getBytes()));
    String password = new String(Base64.getDecoder().decode(kubeClient().getSecret(INFRA_NAMESPACE, "credential-example-keycloak").getData().get("ADMIN_PASSWORD").getBytes()));
    String token = KeycloakUtils.getToken(INFRA_NAMESPACE, baseUri, userName, password);
    // firstly we will increase token lifespan
    JsonObject masterRealm = KeycloakUtils.getKeycloakRealm(INFRA_NAMESPACE, baseUri, token, "master");
    masterRealm.put("accessTokenLifespan", "3600");
    KeycloakUtils.putConfigurationToRealm(INFRA_NAMESPACE, baseUri, token, masterRealm, "master");
    // now we need to get the token with new lifespan
    token = KeycloakUtils.getToken(INFRA_NAMESPACE, baseUri, userName, password);
    LOGGER.info("Getting the {} kafka client for obtaining the Dev A Team policy for the x topics", TEST_REALM);
    // we need to get clients for kafka-authz realm to access auth policies in kafka client
    JsonArray kafkaAuthzRealm = KeycloakUtils.getKeycloakRealmClients(INFRA_NAMESPACE, baseUri, token, TEST_REALM);
    String kafkaClientId = "";
    for (Object client : kafkaAuthzRealm) {
        JsonObject clientObject = new JsonObject(client.toString());
        if (clientObject.getString("clientId").equals("kafka")) {
            kafkaClientId = clientObject.getString("id");
        }
    }
    JsonArray kafkaAuthzRealmPolicies = KeycloakUtils.getPoliciesFromRealmClient(INFRA_NAMESPACE, baseUri, token, TEST_REALM, kafkaClientId);
    JsonObject devAPolicy = new JsonObject();
    for (Object resource : kafkaAuthzRealmPolicies) {
        JsonObject resourceObject = new JsonObject(resource.toString());
        if (resourceObject.getValue("name").toString().contains("Dev Team A can write to topics that start with x- on any cluster")) {
            devAPolicy = resourceObject;
        }
    }
    JsonObject newDevAPolicy = devAPolicy;
    Map<String, String> config = new HashMap<>();
    config.put("resources", "[\"Topic:x-*\"]");
    config.put("scopes", "[\"Describe\"]");
    config.put("applyPolicies", "[\"Dev Team A\"]");
    newDevAPolicy.put("config", config);
    LOGGER.info("Changing the Dev Team A policy for topics starting with x- and checking that job will not be successful");
    KeycloakUtils.updatePolicyOfRealmClient(INFRA_NAMESPACE, baseUri, token, newDevAPolicy, TEST_REALM, kafkaClientId);
    assertThrows(WaitException.class, () -> ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    LOGGER.info("Sending messages to topic starting with a- -> the messages should be successfully sent");
    teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withTopicName(topicAName).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    LOGGER.info("Changing back to the original settings and checking, if the producer will be successful");
    config.put("scopes", "[\"Describe\",\"Write\"]");
    newDevAPolicy.put("config", config);
    KeycloakUtils.updatePolicyOfRealmClient(INFRA_NAMESPACE, baseUri, token, newDevAPolicy, TEST_REALM, kafkaClientId);
    teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withTopicName(topicXName).withDelayMs(1000).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    LOGGER.info("Changing configuration of Kafka back to it's original form");
    KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
        kafka.getSpec().getKafka().setListeners(Arrays.asList(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance)));
    }, INFRA_NAMESPACE);
    KafkaUtils.waitForKafkaReady(INFRA_NAMESPACE, oauthClusterName);
}
Also used : JsonArray(io.vertx.core.json.JsonArray) KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) HashMap(java.util.HashMap) JsonObject(io.vertx.core.json.JsonObject) JsonObject(io.vertx.core.json.JsonObject) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Order(org.junit.jupiter.api.Order) TestMethodOrder(org.junit.jupiter.api.TestMethodOrder) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest)

Aggregations

KafkaOauthClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients)42 KafkaOauthClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder)42 ParallelTest (io.strimzi.systemtest.annotations.ParallelTest)32 Description (io.vertx.core.cli.annotations.Description)28 Order (org.junit.jupiter.api.Order)16 TestMethodOrder (org.junit.jupiter.api.TestMethodOrder)16 Tag (org.junit.jupiter.api.Tag)14 CertSecretSourceBuilder (io.strimzi.api.kafka.model.CertSecretSourceBuilder)12 GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)10 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)10 IsolatedTest (io.strimzi.systemtest.annotations.IsolatedTest)8 BridgeClients (io.strimzi.systemtest.kafkaclients.internalClients.BridgeClients)4 BridgeClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder)4 WaitException (io.strimzi.test.WaitException)4 LabelSelector (io.fabric8.kubernetes.api.model.LabelSelector)2 CertSecretSource (io.strimzi.api.kafka.model.CertSecretSource)2 InlineLogging (io.strimzi.api.kafka.model.InlineLogging)2 KafkaAuthorizationKeycloak (io.strimzi.api.kafka.model.KafkaAuthorizationKeycloak)2 KafkaMirrorMaker2ClusterSpec (io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpec)2 KafkaMirrorMaker2ClusterSpecBuilder (io.strimzi.api.kafka.model.KafkaMirrorMaker2ClusterSpecBuilder)2