use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class OauthTlsIsolatedST method testProducerConsumer.
@Description("As an oauth producer, I am able to produce messages to the kafka broker\n" + "As an oauth consumer, I am able to consumer messages from the kafka broker using encrypted communication")
@ParallelTest
void testProducerConsumer(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class OauthTlsIsolatedST method testProducerConsumerConnect.
@Description("As an oauth KafkaConnect, I am able to sink messages from kafka broker topic using encrypted communication.")
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testProducerConsumerConnect(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).build());
String defaultKafkaClientsPodName = ResourceManager.kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, oauthClusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, INFRA_NAMESPACE, oauthClusterName, 1).editSpec().withConfig(connectorConfig).addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-connect").withNewClientSecret().withSecretName("my-connect-oauth").withKey(OAUTH_KEY).endClientSecret().withTlsTrustedCertificates(new CertSecretSourceBuilder().withSecretName(KeycloakInstance.KEYCLOAK_SECRET_NAME).withCertificate(KeycloakInstance.KEYCLOAK_SECRET_CERT).build()).withDisableTlsHostnameVerification(true).endKafkaClientAuthenticationOAuth().withNewTls().addNewTrustedCertificate().withSecretName(oauthClusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(oauthClusterName + "-kafka-bootstrap:9093").endSpec().build());
String kafkaConnectPodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(INFRA_NAMESPACE, kafkaConnectPodName);
KafkaConnectorUtils.createFileSinkConnector(INFRA_NAMESPACE, defaultKafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, INFRA_NAMESPACE, 8083));
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(INFRA_NAMESPACE, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "\"Hello-world - 99\"");
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class CustomAuthorizerST method testAclWithSuperUser.
@ParallelTest
@Tag(INTERNAL_CLIENTS_USED)
void testAclWithSuperUser(ExtensionContext extensionContext) {
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(CLUSTER_NAME, topicName, namespace).build());
KafkaUser adminUser = KafkaUserTemplates.tlsUser(namespace, CLUSTER_NAME, ADMIN).editSpec().withNewKafkaUserAuthorizationSimple().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(AclOperation.WRITE).endAcl().addNewAcl().withNewAclRuleTopicResource().withName(topicName).endAclRuleTopicResource().withOperation(AclOperation.DESCRIBE).endAcl().endKafkaUserAuthorizationSimple().endSpec().build();
resourceManager.createResource(extensionContext, adminUser);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespace, true, kafkaClientsName, adminUser).build());
String kafkaClientsPodName = kubeClient(namespace).listPodsByPrefixInName(namespace, kafkaClientsName).get(0).getMetadata().getName();
LOGGER.info("Checking kafka super user:{} that is able to send messages to topic:{}", ADMIN, topicName);
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(CLUSTER_NAME).withKafkaUsername(ADMIN).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).withUsingPodName(kafkaClientsPodName).build();
assertThat(internalKafkaClient.sendMessagesTls(), is(MESSAGE_COUNT));
LOGGER.info("Checking kafka super user:{} that is able to read messages to topic:{} regardless that " + "we configured Acls with only write operation", ADMIN, TOPIC_NAME);
assertThat(internalKafkaClient.receiveMessagesTls(), is(MESSAGE_COUNT));
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class OauthAuthorizationIsolatedST method testTeamBWriteToTopic.
@Description("As a member of team B, I should be able to write and read from topics that starts with b-")
@ParallelTest
@Order(4)
void testTeamBWriteToTopic(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
String consumerGroup = "x-" + clusterName;
String teamBProducerName = TEAM_B_PRODUCER_NAME + "-" + clusterName;
String teamBConsumerName = TEAM_B_CONSUMER_NAME + "-" + clusterName;
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients teamBOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamBProducerName).withConsumerName(teamBConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_B_CLIENT).withOauthClientSecret(TEAM_B_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, TOPIC_NAME);
// Producer will not produce messages because authorization topic will failed. Team A can write only to topic starting with 'x-'
resourceManager.createResource(extensionContext, teamBOauthClientJob.producerStrimziOauthTls(oauthClusterName));
JobUtils.waitForJobFailure(teamBProducerName, INFRA_NAMESPACE, 30_000);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamBProducerName);
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, TOPIC_B);
teamBOauthClientJob = new KafkaOauthClientsBuilder(teamBOauthClientJob).withConsumerGroup("x-consumer_group_b-" + clusterName).withTopicName(TOPIC_B).build();
resourceManager.createResource(extensionContext, teamBOauthClientJob.producerStrimziOauthTls(oauthClusterName));
resourceManager.createResource(extensionContext, teamBOauthClientJob.consumerStrimziOauthTls(oauthClusterName));
ClientUtils.waitTillContinuousClientsFinish(teamBProducerName, teamBConsumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class OauthAuthorizationIsolatedST method testTeamAWriteToTopic.
@Description("As a member of team A, I should be able to write to topics that starts with x- on any cluster and " + "and should also write and read to topics starting with 'a-'")
@ParallelTest
@Order(2)
void testTeamAWriteToTopic(ExtensionContext extensionContext) {
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String teamAProducerName = TEAM_A_PRODUCER_NAME + "-" + clusterName;
String teamAConsumerName = TEAM_A_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
String consumerGroup = "a-consumer_group-" + clusterName;
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
KafkaOauthClients teamAOauthClientJob = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(teamAProducerName).withConsumerName(teamAConsumerName).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withConsumerGroup(consumerGroup).withOauthClientId(TEAM_A_CLIENT).withOauthClientSecret(TEAM_A_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, topicName);
LOGGER.info("Producer will not produce messages because authorization topic will failed. Team A can write only to topic starting with 'x-'");
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
JobUtils.waitForJobFailure(teamAProducerName, INFRA_NAMESPACE, 30_000);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
String topicXName = TOPIC_X + "-" + clusterName;
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, topicXName);
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup(consumerGroup).withTopicName(topicXName).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
JobUtils.waitForJobFailure(teamAProducerName, INFRA_NAMESPACE, 30_000);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
// Team A can not create topic starting with 'x-' only write to existing on
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicXName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, teamAProducerName);
String topicAName = TOPIC_A + "-" + clusterName;
LOGGER.info("Sending {} messages to broker with topic name {}", MESSAGE_COUNT, topicAName);
teamAOauthClientJob = new KafkaOauthClientsBuilder(teamAOauthClientJob).withConsumerGroup(consumerGroup).withTopicName(topicAName).build();
resourceManager.createResource(extensionContext, teamAOauthClientJob.producerStrimziOauthTls(oauthClusterName));
ClientUtils.waitForClientSuccess(teamAProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
}
Aggregations