use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.
the class OauthAuthorizationIsolatedST method testKeycloakAuthorizerToDelegateToSimpleAuthorizer.
@ParallelNamespaceTest
@Order(10)
void testKeycloakAuthorizerToDelegateToSimpleAuthorizer(ExtensionContext extensionContext) {
TestStorage testStorage = new TestStorage(extensionContext);
// we have to create keycloak, team-a-client and team-b-client secret from `infra-namespace` to the new namespace
resourceManager.createResource(extensionContext, kubeClient().getSecret(INFRA_NAMESPACE, KeycloakInstance.KEYCLOAK_SECRET_NAME));
resourceManager.createResource(extensionContext, kubeClient().getSecret(INFRA_NAMESPACE, TEAM_A_CLIENT_SECRET));
resourceManager.createResource(extensionContext, kubeClient().getSecret(INFRA_NAMESPACE, TEAM_B_CLIENT_SECRET));
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 1, 1).editSpec().editKafka().withListeners(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance)).withNewKafkaAuthorizationKeycloak().withClientId(KAFKA_CLIENT_ID).withDisableTlsHostnameVerification(true).withDelegateToKafkaAcls(true).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).endKafkaAuthorizationKeycloak().endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getNamespaceName(), testStorage.getClusterName(), TEAM_A_CLIENT).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(testStorage.getNamespaceName(), testStorage.getClusterName(), TEAM_B_CLIENT).build());
final String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + testStorage.getClusterName();
final String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + testStorage.getClusterName();
final String topicName = TOPIC_A + "-" + testStorage.getTopicName();
final String consumerGroup = "a-consumer_group-" + testStorage.getConsumerName();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(testStorage.getClusterName(), topicName, testStorage.getNamespaceName()).build());
KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(testStorage.getClusterName())).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(teamAProducerName, testStorage.getNamespaceName(), MESSAGE_COUNT);
resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(testStorage.getClusterName()));
ClientUtils.waitForClientSuccess(teamAConsumerName, testStorage.getNamespaceName(), MESSAGE_COUNT);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.
the class OauthAuthorizationIsolatedST method testTeamAReadFromTopic.
@Description("As a member of team A, I should be able only read from consumer that starts with a_")
@ParallelTest
@Order(3)
void testTeamAReadFromTopic(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
String topicAName = TOPIC_A + "-" + mapWithTestTopics.get(extensionContext.getDisplayName());
String consumerGroup = "a-consumer_group-" + clusterName;
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicAName, INFRA_NAMESPACE).build());
KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicAName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, topicAName);
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
// team A client shouldn't be able to consume messages with wrong consumer group
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup("bad_consumer_group" + clusterName).withTopicName(topicAName).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(oauthClusterName));
JobUtils.waitForJobFailure(teamAConsumerName, INFRA_NAMESPACE, 30_000);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
// team A client should be able to consume messages with correct consumer group
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup("a-correct_consumer_group" + clusterName).withTopicName(topicAName).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.
the class OauthPlainIsolatedST method testProducerConsumerConnect.
@Description("As an oauth KafkaConnect, I should be able to sink messages from kafka broker topic.")
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testProducerConsumerConnect(ExtensionContext extensionContext) {
String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, oauthClusterName, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().withNewSpec().withReplicas(1).withBootstrapServers(KafkaResources.plainBootstrapAddress(oauthClusterName)).withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-connect").withNewClientSecret().withSecretName(CONNECT_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withConnectTimeoutSeconds(CONNECT_TIMEOUT_S).withReadTimeoutSeconds(READ_TIMEOUT_S).endKafkaClientAuthenticationOAuth().withTls(null).withNewInlineLogging().addToLoggers("connect.root.logger.level", "DEBUG").endInlineLogging().endSpec().build());
final String kafkaConnectPodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(INFRA_NAMESPACE, kafkaConnectPodName);
KafkaConnectorUtils.createFileSinkConnector(INFRA_NAMESPACE, kafkaConnectPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, "http://localhost:8083");
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(INFRA_NAMESPACE, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "\"Hello-world - 99\"");
final String kafkaConnectLogs = KubeClusterResource.cmdKubeClient(INFRA_NAMESPACE).execInCurrentNamespace(Level.DEBUG, "logs", kafkaConnectPodName).out();
verifyOauthConfiguration(kafkaConnectLogs);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.
the class OauthPlainIsolatedST method testProducerConsumerAudienceTokenChecks.
@ParallelTest
void testProducerConsumerAudienceTokenChecks(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
LOGGER.info("Setting producer and consumer properties");
KafkaOauthClients oauthInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + audienceListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
LOGGER.info("Use clients without access token containing audience token");
resourceManager.createResource(extensionContext, oauthInternalClientJob.producerStrimziOauthPlain());
assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(producerName, INFRA_NAMESPACE, MESSAGE_COUNT));
resourceManager.createResource(extensionContext, oauthInternalClientJob.consumerStrimziOauthPlain());
assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT));
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
LOGGER.info("Use clients with Access token containing audience token");
KafkaOauthClients oauthAudienceInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceProducerName);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceConsumerName);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi-kafka-operator by strimzi.
the class OauthPlainIsolatedST method testProducerConsumerMirrorMaker2.
@IsolatedTest("Using more tha one Kafka cluster in one namespace")
@Tag(MIRROR_MAKER2)
@Tag(CONNECT_COMPONENTS)
@Tag(NODEPORT_SUPPORTED)
void testProducerConsumerMirrorMaker2(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
String kafkaSourceClusterName = oauthClusterName;
String kafkaTargetClusterName = clusterName + "-target";
// mirror maker 2 adding prefix to mirrored topic for in this case mirrotopic will be : my-cluster.my-topic
String kafkaTargetClusterTopicName = kafkaSourceClusterName + "." + topicName;
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaTargetClusterName, 1, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withUserNameClaim(keycloakInstance.getUserNameClaim()).endKafkaListenerAuthenticationOAuth().build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(false).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withUserNameClaim(keycloakInstance.getUserNameClaim()).endKafkaListenerAuthenticationOAuth().build()).endKafka().endSpec().build());
// Deploy Mirror Maker 2.0 with oauth
KafkaMirrorMaker2ClusterSpec sourceClusterWithOauth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaSourceClusterName).withConfig(connectorConfig).withBootstrapServers(KafkaResources.plainBootstrapAddress(kafkaSourceClusterName)).withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker-2").withNewClientSecret().withSecretName(MIRROR_MAKER_2_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withConnectTimeoutSeconds(CONNECT_TIMEOUT_S).withReadTimeoutSeconds(READ_TIMEOUT_S).endKafkaClientAuthenticationOAuth().build();
KafkaMirrorMaker2ClusterSpec targetClusterWithOauth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaTargetClusterName).withConfig(connectorConfig).withBootstrapServers(KafkaResources.plainBootstrapAddress(kafkaTargetClusterName)).withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker-2").withNewClientSecret().withSecretName(MIRROR_MAKER_2_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withConnectTimeoutSeconds(CONNECT_TIMEOUT_S).withReadTimeoutSeconds(READ_TIMEOUT_S).endKafkaClientAuthenticationOAuth().build();
resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(oauthClusterName, kafkaTargetClusterName, kafkaSourceClusterName, 1, false).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().withClusters(sourceClusterWithOauth, targetClusterWithOauth).editFirstMirror().withSourceCluster(kafkaSourceClusterName).endMirror().endSpec().build());
final String kafkaMirrorMaker2PodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, oauthClusterName, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker2.RESOURCE_KIND).get(0).getMetadata().getName();
final String kafkaMirrorMaker2Logs = KubeClusterResource.cmdKubeClient(INFRA_NAMESPACE).execInCurrentNamespace(Level.DEBUG, "logs", kafkaMirrorMaker2PodName).out();
verifyOauthConfiguration(kafkaMirrorMaker2Logs);
TestUtils.waitFor("Waiting for Mirror Maker 2 will copy messages from " + kafkaSourceClusterName + " to " + kafkaTargetClusterName, Duration.ofSeconds(30).toMillis(), Constants.TIMEOUT_FOR_MIRROR_MAKER_COPY_MESSAGES_BETWEEN_BROKERS, () -> {
LOGGER.info("Deleting the Job {}", consumerName);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
LOGGER.info("Creating new client with new consumer-group and also to point on {} cluster", kafkaTargetClusterName);
KafkaOauthClients kafkaOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(kafkaTargetClusterName)).withTopicName(kafkaTargetClusterTopicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, kafkaOauthClientJob.consumerStrimziOauthPlain());
try {
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
return true;
} catch (WaitException e) {
e.printStackTrace();
return false;
}
});
}
Aggregations