use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.
the class OauthPlainIsolatedST method testProducerConsumerAudienceTokenChecks.
@ParallelTest
void testProducerConsumerAudienceTokenChecks(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
LOGGER.info("Setting producer and consumer properties");
KafkaOauthClients oauthInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + audienceListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
LOGGER.info("Use clients without access token containing audience token");
resourceManager.createResource(extensionContext, oauthInternalClientJob.producerStrimziOauthPlain());
assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(producerName, INFRA_NAMESPACE, MESSAGE_COUNT));
resourceManager.createResource(extensionContext, oauthInternalClientJob.consumerStrimziOauthPlain());
assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT));
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
LOGGER.info("Use clients with Access token containing audience token");
KafkaOauthClients oauthAudienceInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceProducerName);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceConsumerName);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.
the class OauthPlainIsolatedST method testSaslPlainProducerConsumer.
@ParallelTest
void testSaslPlainProducerConsumer(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
String plainAdditionalConfig = "sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username=%s password=%s;\n" + "sasl.mechanism=PLAIN";
KafkaOauthClients plainSaslOauthConsumerClientsJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withAdditionalConfig(String.format(plainAdditionalConfig, OAUTH_CLIENT_AUDIENCE_CONSUMER, OAUTH_CLIENT_AUDIENCE_SECRET)).build();
KafkaOauthClients plainSaslOauthProducerClientsJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withAdditionalConfig(String.format(plainAdditionalConfig, OAUTH_CLIENT_AUDIENCE_PRODUCER, OAUTH_CLIENT_AUDIENCE_SECRET)).build();
resourceManager.createResource(extensionContext, plainSaslOauthProducerClientsJob.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
resourceManager.createResource(extensionContext, plainSaslOauthConsumerClientsJob.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.
the class OauthTlsIsolatedST method testProducerConsumer.
@Description("As an oauth producer, I am able to produce messages to the kafka broker\n" + "As an oauth consumer, I am able to consumer messages from the kafka broker using encrypted communication")
@ParallelTest
void testProducerConsumer(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.
the class OauthTlsIsolatedST method testProducerConsumerConnect.
@Description("As an oauth KafkaConnect, I am able to sink messages from kafka broker topic using encrypted communication.")
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testProducerConsumerConnect(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).build());
String defaultKafkaClientsPodName = ResourceManager.kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, INFRA_NAMESPACE, oauthClusterName, 1).editSpec().withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-connect").withNewClientSecret().withSecretName("my-connect-oauth").withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().withNewTls().addNewTrustedCertificate().withSecretName(oauthClusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(oauthClusterName + "-kafka-bootstrap:9093").endSpec().build());
String kafkaConnectPodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(INFRA_NAMESPACE, kafkaConnectPodName);
KafkaConnectorUtils.createFileSinkConnector(INFRA_NAMESPACE, defaultKafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, INFRA_NAMESPACE, 8083));
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(INFRA_NAMESPACE, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "\"Hello-world - 99\"");
}
use of io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder in project strimzi by strimzi.
the class OauthTlsIsolatedST method testMirrorMaker.
@Description("As a oauth mirror maker, I am able to replicate topic data using using encrypted communication")
@IsolatedTest("Using more tha one Kafka cluster in one namespace")
@Tag(MIRROR_MAKER)
@Tag(NODEPORT_SUPPORTED)
@SuppressWarnings({ "checkstyle:MethodLength" })
void testMirrorMaker(ExtensionContext extensionContext) {
// Nodeport needs cluster wide rights to work properly which is not possible with STRIMZI_RBAC_SCOPE=NAMESPACE
assumeFalse(Environment.isNamespaceRbacScope());
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
String targetKafkaCluster = oauthClusterName + "-target";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(targetKafkaCluster, 1, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().editKafka().withListeners(OauthAbstractST.BUILD_OAUTH_TLS_LISTENER.apply(keycloakInstance), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withNewKafkaListenerAuthenticationOAuth().withValidIssuerUri(keycloakInstance.getValidIssuerUri()).withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds()).withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds()).withJwksEndpointUri(keycloakInstance.getJwksEndpointUri()).withUserNameClaim(keycloakInstance.getUserNameClaim()).withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaListenerAuthenticationOAuth().build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(oauthClusterName, oauthClusterName, targetKafkaCluster, ClientUtils.generateRandomConsumerGroup(), 1, true).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().withNewConsumer().withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(oauthClusterName)).build()).endTls().withBootstrapServers(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withGroupId(ClientUtils.generateRandomConsumerGroup()).addToConfig(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker").withNewClientSecret().withSecretName(MIRROR_MAKER_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().endConsumer().withNewProducer().withBootstrapServers(KafkaResources.tlsBootstrapAddress(targetKafkaCluster)).withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(targetKafkaCluster)).build()).endTls().withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-mirror-maker").withNewClientSecret().withSecretName(MIRROR_MAKER_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().addToConfig(ProducerConfig.ACKS_CONFIG, "all").endProducer().endSpec().build());
String mirrorMakerPodName = kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, KafkaMirrorMakerResources.deploymentName(oauthClusterName)).get(0).getMetadata().getName();
String kafkaMirrorMakerLogs = kubeClient().logsInSpecificNamespace(INFRA_NAMESPACE, mirrorMakerPodName);
assertThat(kafkaMirrorMakerLogs, not(containsString("keytool error: java.io.FileNotFoundException: /opt/kafka/consumer-oauth-certs/**/* (No such file or directory)")));
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(INFRA_NAMESPACE, oauthClusterName, USER_NAME).build());
KafkaUserUtils.waitForKafkaUserCreation(INFRA_NAMESPACE, USER_NAME);
LOGGER.info("Creating new client with new consumer-group and also to point on {} cluster", targetKafkaCluster);
KafkaOauthClients kafkaOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withClientUserName(USER_NAME).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(targetKafkaCluster)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, kafkaOauthClientJob.consumerStrimziOauthTls(targetKafkaCluster));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
}
Aggregations