use of io.strimzi.api.kafka.model.CertSecretSourceBuilder in project strimzi by strimzi.
the class OauthAuthorizationIsolatedST method testSessionReAuthentication.
/**
* 1) Try to send messages to topic starting with `x-` with producer from Dev Team A
* 2) Change the Oauth listener configuration -> add the maxSecondsWithoutReauthentication set to 30s
* 3) Try to send messages with delay of 1000ms (in the meantime, the permissions configuration will be changed)
* 4) Get all configuration from the Keycloak (realms, policies) and change the policy so the Dev Team A producer should not be able to send messages to the topic
* starting with `x-` -> updating the policy through the Keycloak API
* 5) Wait for the WaitException to appear -> as the producer doesn't have permission for sending messages, the
* job will be in error state
* 6) Try to send messages to topic with `a-` -> we should still be able to sent messages, because we didn't changed the permissions
* 6) Change the permissions back and check that the messages are correctly sent
*
* The re-authentication can be seen in the log of team-a-producer pod.
*/
@IsolatedTest("Modification of shared Kafka cluster")
@Order(7)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testSessionReAuthentication(ExtensionContext extensionContext) {
String topicXName = TOPIC_X + "-example-topic";
String topicAName = TOPIC_A + "-example-topic";
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
LOGGER.info("Verifying that team A producer is able to send messages to the {} topic -> the topic starting with 'x'", topicXName);
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicXName, clusterOperator.getDeploymentNamespace()).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicAName, clusterOperator.getDeploymentNamespace()).build());
KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(clusterOperator.getDeploymentNamespace()).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicXName).withMessageCount(MESSAGE_COUNT).withConsumerGroup("a-consumer_group").withClientUserName(TEAM_A_CLIENT).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
LOGGER.info("Adding the maxSecondsWithoutReauthentication to Kafka listener with OAuth authentication");
KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
kafka.getSpec().getKafka().setListeners(Arrays.asList(new GenericKafkaListenerBuilder().withName("tls").withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withUserNameClaim(keycloakInstance.getUserNameClaim()).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).withMaxSecondsWithoutReauthentication(30).endKafkaListenerAuthenticationOAuth().build()));
}, clusterOperator.getDeploymentNamespace());
KafkaUtils.waitForKafkaReady(clusterOperator.getDeploymentNamespace(), oauthClusterName);
String baseUri = "https://" + keycloakInstance.getHttpsUri();
LOGGER.info("Setting the master realm token's lifespan to 3600s");
// get admin token for all operation on realms
String userName = new String(Base64.getDecoder().decode(kubeClient().getSecret(clusterOperator.getDeploymentNamespace(), "credential-example-keycloak").getData().get("ADMIN_USERNAME").getBytes()));
String password = new String(Base64.getDecoder().decode(kubeClient().getSecret(clusterOperator.getDeploymentNamespace(), "credential-example-keycloak").getData().get("ADMIN_PASSWORD").getBytes()));
String token = KeycloakUtils.getToken(clusterOperator.getDeploymentNamespace(), baseUri, userName, password);
// firstly we will increase token lifespan
JsonObject masterRealm = KeycloakUtils.getKeycloakRealm(clusterOperator.getDeploymentNamespace(), baseUri, token, "master");
masterRealm.put("accessTokenLifespan", "3600");
KeycloakUtils.putConfigurationToRealm(clusterOperator.getDeploymentNamespace(), baseUri, token, masterRealm, "master");
// now we need to get the token with new lifespan
token = KeycloakUtils.getToken(clusterOperator.getDeploymentNamespace(), baseUri, userName, password);
LOGGER.info("Getting the {} kafka client for obtaining the Dev A Team policy for the x topics", TEST_REALM);
// we need to get clients for kafka-authz realm to access auth policies in kafka client
JsonArray kafkaAuthzRealm = KeycloakUtils.getKeycloakRealmClients(clusterOperator.getDeploymentNamespace(), baseUri, token, TEST_REALM);
String kafkaClientId = "";
for (Object client : kafkaAuthzRealm) {
JsonObject clientObject = new JsonObject(client.toString());
if (clientObject.getString("clientId").equals("kafka")) {
kafkaClientId = clientObject.getString("id");
}
}
JsonArray kafkaAuthzRealmPolicies = KeycloakUtils.getPoliciesFromRealmClient(clusterOperator.getDeploymentNamespace(), baseUri, token, TEST_REALM, kafkaClientId);
JsonObject devAPolicy = new JsonObject();
for (Object resource : kafkaAuthzRealmPolicies) {
JsonObject resourceObject = new JsonObject(resource.toString());
if (resourceObject.getValue("name").toString().contains("Dev Team A can write to topics that start with x- on any cluster")) {
devAPolicy = resourceObject;
}
}
JsonObject newDevAPolicy = devAPolicy;
Map<String, String> config = new HashMap<>();
config.put("resources", "[\"Topic:x-*\"]");
config.put("scopes", "[\"Describe\"]");
config.put("applyPolicies", "[\"Dev Team A\"]");
newDevAPolicy.put("config", config);
LOGGER.info("Changing the Dev Team A policy for topics starting with x- and checking that job will not be successful");
KeycloakUtils.updatePolicyOfRealmClient(clusterOperator.getDeploymentNamespace(), baseUri, token, newDevAPolicy, TEST_REALM, kafkaClientId);
assertThrows(WaitException.class, () -> ClientUtils.waitForClientSuccess(teamAProducerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT));
JobUtils.deleteJobWithWait(clusterOperator.getDeploymentNamespace(), teamAProducerName);
LOGGER.info("Sending messages to topic starting with a- -> the messages should be successfully sent");
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withTopicName(topicAName).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
LOGGER.info("Changing back to the original settings and checking, if the producer will be successful");
config.put("scopes", "[\"Describe\",\"Write\"]");
newDevAPolicy.put("config", config);
KeycloakUtils.updatePolicyOfRealmClient(clusterOperator.getDeploymentNamespace(), baseUri, token, newDevAPolicy, TEST_REALM, kafkaClientId);
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withTopicName(topicXName).withDelayMs(1000).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
LOGGER.info("Changing configuration of Kafka back to it's original form");
KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
kafka.getSpec().getKafka().setListeners(Arrays.asList(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance)));
}, clusterOperator.getDeploymentNamespace());
KafkaUtils.waitForKafkaReady(clusterOperator.getDeploymentNamespace(), oauthClusterName);
}
use of io.strimzi.api.kafka.model.CertSecretSourceBuilder in project strimzi by strimzi.
the class OauthAuthorizationIsolatedST method testKeycloakAuthorizerToDelegateToSimpleAuthorizer.
@ParallelNamespaceTest
@Order(10)
void testKeycloakAuthorizerToDelegateToSimpleAuthorizer(ExtensionContext extensionContext) {
TestStorage testStorage = new TestStorage(extensionContext);
// we have to create keycloak, team-a-client and team-b-client secret from `infra-namespace` to the new namespace
resourceManager.createResource(extensionContext, kubeClient().getSecret(clusterOperator.getDeploymentNamespace(), KeycloakInstance.KEYCLOAK_SECRET_NAME));
resourceManager.createResource(extensionContext, kubeClient().getSecret(clusterOperator.getDeploymentNamespace(), TEAM_A_CLIENT_SECRET));
resourceManager.createResource(extensionContext, kubeClient().getSecret(clusterOperator.getDeploymentNamespace(), TEAM_B_CLIENT_SECRET));
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 1, 1).editSpec().editKafka().withListeners(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance)).withNewKafkaAuthorizationKeycloak().withClientId(KAFKA_CLIENT_ID).withDisableTlsHostnameVerification(true).withDelegateToKafkaAcls(true).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).endKafkaAuthorizationKeycloak().endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getNamespaceName(), testStorage.getClusterName(), TEAM_A_CLIENT).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getNamespaceName(), testStorage.getClusterName(), TEAM_B_CLIENT).build());
final String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + testStorage.getClusterName();
final String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + testStorage.getClusterName();
final String topicName = TOPIC_A + "-" + testStorage.getTopicName();
final String consumerGroup = "a-consumer_group-" + testStorage.getConsumerName();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(testStorage.getClusterName(), topicName, testStorage.getNamespaceName()).build());
KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(testStorage.getClusterName())).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(teamAProducerName, testStorage.getNamespaceName(), MESSAGE_COUNT);
resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(teamAConsumerName, testStorage.getNamespaceName(), MESSAGE_COUNT);
}
use of io.strimzi.api.kafka.model.CertSecretSourceBuilder in project strimzi by strimzi.
the class OauthTlsIsolatedST method testProducerConsumerBridge.
@Description("As a oauth bridge, i am able to send messages to bridge endpoint using encrypted communication")
@ParallelTest
@Tag(BRIDGE)
void testProducerConsumerBridge(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, clusterOperator.getDeploymentNamespace()).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(clusterOperator.getDeploymentNamespace()).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
resourceManager.createResource(extensionContext, KafkaBridgeTemplates.kafkaBridge(oauthClusterName, KafkaResources.tlsBootstrapAddress(oauthClusterName), 1).editMetadata().withNamespace(clusterOperator.getDeploymentNamespace()).endMetadata().editSpec().withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(oauthClusterName)).build()).endTls().withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-bridge").withNewClientSecret().withSecretName(BRIDGE_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().addNewTlsTrustedCertificate().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).endTlsTrustedCertificate().withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().endSpec().build());
producerName = "bridge-producer-" + clusterName;
BridgeClients kafkaBridgeClientJob = new BridgeClientsBuilder().withProducerName(producerName).withBootstrapAddress(KafkaBridgeResources.serviceName(oauthClusterName)).withTopicName(topicName).withMessageCount(10).withPort(HTTP_BRIDGE_DEFAULT_PORT).withDelayMs(1000).withPollInterval(1000).withNamespaceName(clusterOperator.getDeploymentNamespace()).build();
resourceManager.createResource(extensionContext, kafkaBridgeClientJob.producerStrimziBridge());
ClientUtils.waitForClientSuccess(producerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
}
use of io.strimzi.api.kafka.model.CertSecretSourceBuilder in project strimzi by strimzi.
the class OauthTlsIsolatedST method testIntrospectionEndpoint.
@ParallelTest
void testIntrospectionEndpoint(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, clusterOperator.getDeploymentNamespace()).build());
keycloakInstance.setIntrospectionEndpointUri("https://" + keycloakInstance.getHttpsUri() + "/auth/realms/internal/protocol/openid-connect/token/introspect");
String introspectionKafka = oauthClusterName + "-intro";
CertSecretSource cert = new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build();
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(introspectionKafka, 1).editMetadata().withNamespace(clusterOperator.getDeploymentNamespace()).endMetadata().editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName("tls").withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withNewKafkaListenerAuthenticationOAuth().withClientId(OAUTH_KAFKA_BROKER_NAME).withNewClientSecret().withSecretName(OAUTH_KAFKA_BROKER_SECRET).withKey(OAUTH_KEY).endClientSecret().withAccessTokenIsJwt(false).withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withIntrospectionEndpointUri(keycloakInstance.getIntrospectionEndpointUri()).withTlsTrustedCertificates(cert).withDisableTlsHostnameVerification(true).endKafkaListenerAuthenticationOAuth().build()).endKafka().endSpec().build());
KafkaOauthClients oauthInternalClientIntrospectionJob = new KafkaOauthClientsBuilder().withNamespaceName(clusterOperator.getDeploymentNamespace()).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(introspectionKafka)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthInternalClientIntrospectionJob.producerStrimziOauthTls(introspectionKafka));
ClientUtils.waitForClientSuccess(producerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthInternalClientIntrospectionJob.consumerStrimziOauthTls(introspectionKafka));
ClientUtils.waitForClientSuccess(consumerName, clusterOperator.getDeploymentNamespace(), MESSAGE_COUNT);
}
use of io.strimzi.api.kafka.model.CertSecretSourceBuilder in project strimzi by strimzi.
the class KafkaBrokerConfigurationBuilderTest method testKeycloakAuthorization.
@ParallelTest
public void testKeycloakAuthorization() {
CertSecretSource cert = new CertSecretSourceBuilder().withSecretName("my-secret").withCertificate("my.crt").build();
KafkaAuthorization auth = new KafkaAuthorizationKeycloakBuilder().withTokenEndpointUri("http://token-endpoint-uri").withClientId("my-client-id").withDelegateToKafkaAcls(false).withGrantsRefreshPeriodSeconds(120).withGrantsRefreshPoolSize(10).withTlsTrustedCertificates(cert).withDisableTlsHostnameVerification(true).addToSuperUsers("giada", "CN=paccu").withConnectTimeoutSeconds(30).build();
String configuration = new KafkaBrokerConfigurationBuilder(Reconciliation.DUMMY_RECONCILIATION).withAuthorization("my-cluster", auth, false).build();
assertThat(configuration, isEquivalent("authorizer.class.name=io.strimzi.kafka.oauth.server.authorizer.KeycloakRBACAuthorizer\n" + "strimzi.authorization.token.endpoint.uri=http://token-endpoint-uri\n" + "strimzi.authorization.client.id=my-client-id\n" + "strimzi.authorization.delegate.to.kafka.acl=false\n" + "strimzi.authorization.kafka.cluster.name=my-cluster\n" + "strimzi.authorization.ssl.truststore.location=/tmp/kafka/authz-keycloak.truststore.p12\n" + "strimzi.authorization.ssl.truststore.password=${CERTS_STORE_PASSWORD}\n" + "strimzi.authorization.ssl.truststore.type=PKCS12\n" + "strimzi.authorization.ssl.secure.random.implementation=SHA1PRNG\n" + "strimzi.authorization.ssl.endpoint.identification.algorithm=\n" + "strimzi.authorization.grants.refresh.period.seconds=120\n" + "strimzi.authorization.grants.refresh.pool.size=10\n" + "strimzi.authorization.connect.timeout.seconds=30\n" + "super.users=User:CN=my-cluster-kafka,O=io.strimzi;User:CN=my-cluster-entity-topic-operator,O=io.strimzi;User:CN=my-cluster-entity-user-operator,O=io.strimzi;User:CN=my-cluster-kafka-exporter,O=io.strimzi;User:CN=my-cluster-cruise-control,O=io.strimzi;User:CN=cluster-operator,O=io.strimzi;User:giada;User:CN=paccu"));
}
Aggregations