Search in sources :

Example 96 with ParallelTest

use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi-kafka-operator by strimzi.

the class OauthAuthorizationIsolatedST method testTeamAReadFromTopic.

@Description("As a member of team A, I should be able only read from consumer that starts with a_")
@ParallelTest
@Order(3)
void testTeamAReadFromTopic(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
    String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
    String topicAName = TOPIC_A + "-" + mapWithTestTopics.get(extensionContext.getDisplayName());
    String consumerGroup = "a-consumer_group-" + clusterName;
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicAName, INFRA_NAMESPACE).build());
    KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicAName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, topicAName);
    resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    // team A client shouldn't be able to consume messages with wrong consumer group
    teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup("bad_consumer_group" + clusterName).withTopicName(topicAName).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.consumerStrimziOauthTls(oauthClusterName));
    JobUtils.waitForJobFailure(teamAConsumerName, INFRA_NAMESPACE, 30_000);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
    // team A client should be able to consume messages with correct consumer group
    teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup("a-correct_consumer_group" + clusterName).withTopicName(topicAName).build();
    resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
    ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Order(org.junit.jupiter.api.Order) TestMethodOrder(org.junit.jupiter.api.TestMethodOrder) Description(io.vertx.core.cli.annotations.Description) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 97 with ParallelTest

use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi-kafka-operator by strimzi.

the class OauthPlainIsolatedST method testProducerConsumerConnect.

@Description("As an oauth KafkaConnect, I should be able to sink messages from kafka broker topic.")
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testProducerConsumerConnect(ExtensionContext extensionContext) {
    String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
    resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, kafkaClientsName).build());
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, oauthClusterName, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().withNewSpec().withReplicas(1).withBootstrapServers(KafkaResources.plainBootstrapAddress(oauthClusterName)).withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-connect").withNewClientSecret().withSecretName(CONNECT_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withConnectTimeoutSeconds(CONNECT_TIMEOUT_S).withReadTimeoutSeconds(READ_TIMEOUT_S).endKafkaClientAuthenticationOAuth().withTls(null).withNewInlineLogging().addToLoggers("connect.root.logger.level", "DEBUG").endInlineLogging().endSpec().build());
    final String kafkaConnectPodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(INFRA_NAMESPACE, kafkaConnectPodName);
    KafkaConnectorUtils.createFileSinkConnector(INFRA_NAMESPACE, kafkaConnectPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, "http://localhost:8083");
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(INFRA_NAMESPACE, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "\"Hello-world - 99\"");
    final String kafkaConnectLogs = KubeClusterResource.cmdKubeClient(INFRA_NAMESPACE).execInCurrentNamespace(Level.DEBUG, "logs", kafkaConnectPodName).out();
    verifyOauthConfiguration(kafkaConnectLogs);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) Description(io.vertx.core.cli.annotations.Description) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest) Tag(org.junit.jupiter.api.Tag)

Example 98 with ParallelTest

use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi-kafka-operator by strimzi.

the class OauthPlainIsolatedST method testProducerConsumerAudienceTokenChecks.

@ParallelTest
void testProducerConsumerAudienceTokenChecks(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
    String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    LOGGER.info("Setting producer and consumer properties");
    KafkaOauthClients oauthInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + audienceListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    LOGGER.info("Use clients without access token containing audience token");
    resourceManager.createResource(extensionContext, oauthInternalClientJob.producerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(producerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    resourceManager.createResource(extensionContext, oauthInternalClientJob.consumerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
    LOGGER.info("Use clients with Access token containing audience token");
    KafkaOauthClients oauthAudienceInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.producerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.consumerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceProducerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceConsumerName);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 99 with ParallelTest

use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi-kafka-operator by strimzi.

the class OauthPlainIsolatedST method testAccessTokenClaimCheck.

@ParallelTest
void testAccessTokenClaimCheck(ExtensionContext extensionContext) {
    String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    String audienceProducerName = OAUTH_CLIENT_AUDIENCE_PRODUCER + "-" + clusterName;
    String audienceConsumerName = OAUTH_CLIENT_AUDIENCE_CONSUMER + "-" + clusterName;
    String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    LOGGER.info("Use clients with clientId not containing 'hello-world' in access token.");
    KafkaOauthClients oauthAudienceInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(audienceProducerName).withConsumerName(audienceConsumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthProducerClientId(OAUTH_CLIENT_AUDIENCE_PRODUCER).withOauthConsumerClientId(OAUTH_CLIENT_AUDIENCE_CONSUMER).withOauthClientSecret(OAUTH_CLIENT_AUDIENCE_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.producerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(audienceProducerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    resourceManager.createResource(extensionContext, oauthAudienceInternalClientJob.consumerStrimziOauthPlain());
    assertDoesNotThrow(() -> ClientUtils.waitForClientTimeout(audienceConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT));
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceProducerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, audienceConsumerName);
    LOGGER.info("Use clients with clientId containing 'hello-world' in access token.");
    KafkaOauthClients oauthInternalClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + customClaimListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
    resourceManager.createResource(extensionContext, oauthInternalClientJob.producerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    resourceManager.createResource(extensionContext, oauthInternalClientJob.consumerStrimziOauthPlain());
    ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
}
Also used : KafkaOauthClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder) KafkaOauthClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 100 with ParallelTest

use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi-kafka-operator by strimzi.

the class OauthPlainIsolatedST method testSaslPlainAuthenticationKafkaConnectIsAbleToConnectToKafkaOAuth.

@ParallelTest
void testSaslPlainAuthenticationKafkaConnectIsAbleToConnectToKafkaOAuth(ExtensionContext extensionContext) {
    TestStorage testStorage = new TestStorage(extensionContext);
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, testStorage.getKafkaClientsName()).build());
    resourceManager.createResource(extensionContext, false, KafkaConnectTemplates.kafkaConnect(extensionContext, testStorage.getClusterName(), oauthClusterName, 1).withNewSpec().withReplicas(1).withBootstrapServers(KafkaResources.plainBootstrapAddress(oauthClusterName)).withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationPlain().withUsername("kafka-connect").withNewPasswordSecret().withSecretName(CONNECT_OAUTH_SECRET).withPassword("clientSecret").endPasswordSecret().endKafkaClientAuthenticationPlain().withTls(null).endSpec().build());
    // verify that KafkaConnect is able to connect to Oauth Kafka configured as plain
    KafkaConnectUtils.waitForConnectReady(testStorage.getNamespaceName(), testStorage.getClusterName());
}
Also used : TestStorage(io.strimzi.systemtest.storage.TestStorage) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Aggregations

ParallelTest (io.strimzi.systemtest.annotations.ParallelTest)192 Tag (org.junit.jupiter.api.Tag)62 InternalKafkaClient (io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient)38 Matchers.containsString (org.hamcrest.Matchers.containsString)38 KafkaOauthClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClients)32 KafkaOauthClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaOauthClientsBuilder)32 OptionalDouble (java.util.OptionalDouble)28 Pattern (java.util.regex.Pattern)28 StringContains.containsString (org.hamcrest.core.StringContains.containsString)28 BridgeClients (io.strimzi.systemtest.kafkaclients.internalClients.BridgeClients)26 BridgeClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder)26 Description (io.vertx.core.cli.annotations.Description)24 KafkaUser (io.strimzi.api.kafka.model.KafkaUser)16 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)16 KafkaResources (io.strimzi.api.kafka.model.KafkaResources)14 AbstractST (io.strimzi.systemtest.AbstractST)14 Constants (io.strimzi.systemtest.Constants)14 INFRA_NAMESPACE (io.strimzi.systemtest.Constants.INFRA_NAMESPACE)14 REGRESSION (io.strimzi.systemtest.Constants.REGRESSION)14 IsolatedSuite (io.strimzi.systemtest.annotations.IsolatedSuite)14