use of io.strimzi.api.kafka.model.KafkaUser in project strimzi-kafka-operator by strimzi.
the class CustomAuthorizerST method testAclWithSuperUser.
@ParallelTest
@Tag(INTERNAL_CLIENTS_USED)
void testAclWithSuperUser(ExtensionContext extensionContext) {
final TestStorage testStorage = new TestStorage(extensionContext, namespace);
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(CLUSTER_NAME, testStorage.getTopicName(), namespace).build());
KafkaUser adminUser = KafkaUserTemplates.tlsUser(namespace, CLUSTER_NAME, ADMIN).editSpec().withNewKafkaUserAuthorizationSimple().addNewAcl().withNewAclRuleTopicResource().withName(testStorage.getTopicName()).endAclRuleTopicResource().withOperation(AclOperation.WRITE).endAcl().addNewAcl().withNewAclRuleTopicResource().withName(testStorage.getTopicName()).endAclRuleTopicResource().withOperation(AclOperation.DESCRIBE).endAcl().endKafkaUserAuthorizationSimple().endSpec().build();
resourceManager.createResource(extensionContext, adminUser);
LOGGER.info("Checking kafka super user:{} that is able to send messages to topic:{}", ADMIN, testStorage.getTopicName());
KafkaClients kafkaClients = new KafkaClientsBuilder().withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).withNamespaceName(testStorage.getNamespaceName()).withMessageCount(MESSAGE_COUNT).withBootstrapAddress(KafkaResources.tlsBootstrapAddress(CLUSTER_NAME)).withTopicName(testStorage.getTopicName()).withUserName(ADMIN).build();
resourceManager.createResource(extensionContext, kafkaClients.producerTlsStrimzi(CLUSTER_NAME));
ClientUtils.waitForClientSuccess(testStorage.getProducerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
LOGGER.info("Checking kafka super user:{} that is able to read messages to topic:{} regardless that " + "we configured Acls with only write operation", ADMIN, TOPIC_NAME);
resourceManager.createResource(extensionContext, kafkaClients.consumerTlsStrimzi(CLUSTER_NAME));
ClientUtils.waitForClientSuccess(testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
use of io.strimzi.api.kafka.model.KafkaUser in project strimzi-kafka-operator by strimzi.
the class UserST method testTlsUserWithQuotas.
@ParallelTest
void testTlsUserWithQuotas(ExtensionContext extensionContext) {
KafkaUser user = KafkaUserTemplates.tlsUser(namespace, userClusterName, "encrypted-arnost").build();
testUserWithQuotas(extensionContext, user);
}
use of io.strimzi.api.kafka.model.KafkaUser in project strimzi-kafka-operator by strimzi.
the class UserST method testUpdateUser.
@ParallelTest
@Tag(ACCEPTANCE)
void testUpdateUser(ExtensionContext extensionContext) {
String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(namespace, userClusterName, userName).build());
String kafkaUserSecret = TestUtils.toJsonString(kubeClient(namespace).getSecret(userName));
assertThat(kafkaUserSecret, hasJsonPath("$.data['ca.crt']", notNullValue()));
assertThat(kafkaUserSecret, hasJsonPath("$.data['user.crt']", notNullValue()));
assertThat(kafkaUserSecret, hasJsonPath("$.data['user.key']", notNullValue()));
assertThat(kafkaUserSecret, hasJsonPath("$.metadata.name", equalTo(userName)));
assertThat(kafkaUserSecret, hasJsonPath("$.metadata.namespace", equalTo(namespace)));
KafkaUser kUser = KafkaUserResource.kafkaUserClient().inNamespace(namespace).withName(userName).get();
String kafkaUserAsJson = TestUtils.toJsonString(kUser);
assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.name", equalTo(userName)));
assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.namespace", equalTo(namespace)));
assertThat(kafkaUserAsJson, hasJsonPath("$.spec.authentication.type", equalTo(Constants.TLS_LISTENER_DEFAULT_NAME)));
long observedGeneration = KafkaUserResource.kafkaUserClient().inNamespace(namespace).withName(userName).get().getStatus().getObservedGeneration();
KafkaUserResource.replaceUserResourceInSpecificNamespace(userName, ku -> {
ku.getMetadata().setResourceVersion(null);
ku.getSpec().setAuthentication(new KafkaUserScramSha512ClientAuthentication());
}, namespace);
KafkaUserUtils.waitForKafkaUserIncreaseObserverGeneration(namespace, observedGeneration, userName);
KafkaUserUtils.waitForKafkaUserCreation(namespace, userName);
String anotherKafkaUserSecret = TestUtils.toJsonString(kubeClient(namespace).getSecret(namespace, userName));
assertThat(anotherKafkaUserSecret, hasJsonPath("$.data.password", notNullValue()));
kUser = Crds.kafkaUserOperation(kubeClient().getClient()).inNamespace(namespace).withName(userName).get();
kafkaUserAsJson = TestUtils.toJsonString(kUser);
assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.name", equalTo(userName)));
assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.namespace", equalTo(namespace)));
assertThat(kafkaUserAsJson, hasJsonPath("$.spec.authentication.type", equalTo("scram-sha-512")));
Crds.kafkaUserOperation(kubeClient().getClient()).inNamespace(namespace).delete(kUser);
KafkaUserUtils.waitForKafkaUserDeletion(userName);
}
use of io.strimzi.api.kafka.model.KafkaUser in project strimzi-kafka-operator by strimzi.
the class UserST method testScramUserWithQuotas.
@ParallelTest
void testScramUserWithQuotas(ExtensionContext extensionContext) {
KafkaUser user = KafkaUserTemplates.scramShaUser(namespace, userClusterName, "scramed-arnost").build();
testUserWithQuotas(extensionContext, user);
}
use of io.strimzi.api.kafka.model.KafkaUser in project strimzi-kafka-operator by strimzi.
the class ListenersST method testMessagesTlsScramShaWithPredefinedPassword.
@ParallelNamespaceTest
@KRaftNotSupported("UserOperator is not supported by KRaft mode and is used in this test case")
void testMessagesTlsScramShaWithPredefinedPassword(ExtensionContext extensionContext) {
final TestStorage testStorage = new TestStorage(extensionContext);
final String firstUnencodedPassword = "completely_secret_password";
final String secondUnencodedPassword = "completely_different_secret_password";
final String firstEncodedPassword = Base64.getEncoder().encodeToString(firstUnencodedPassword.getBytes(StandardCharsets.UTF_8));
final String secondEncodedPassword = Base64.getEncoder().encodeToString(secondUnencodedPassword.getBytes(StandardCharsets.UTF_8));
final String secretName = testStorage.getClusterName() + "-secret";
Secret password = new SecretBuilder().withNewMetadata().withName(secretName).endMetadata().addToData("password", firstEncodedPassword).build();
kubeClient().namespace(testStorage.getNamespaceName()).createSecret(password);
assertThat("Password in secret is not correct", kubeClient().namespace(testStorage.getNamespaceName()).getSecret(secretName).getData().get("password"), is(firstEncodedPassword));
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(testStorage.getClusterName(), testStorage.getUserName()).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("password", secretName, false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(testStorage.getClusterName(), 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withType(KafkaListenerType.INTERNAL).withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9096).withTls(true).withNewKafkaListenerAuthenticationScramSha512Auth().endKafkaListenerAuthenticationScramSha512Auth().build()).endKafka().endSpec().build(), kafkaUser, KafkaTopicTemplates.topic(testStorage.getClusterName(), testStorage.getUserName()).build());
KafkaClients kafkaClients = new KafkaClientsBuilder().withNamespaceName(testStorage.getNamespaceName()).withTopicName(testStorage.getTopicName()).withBootstrapAddress(KafkaResources.bootstrapServiceName(testStorage.getClusterName()) + ":9096").withMessageCount(MESSAGE_COUNT).withUserName(testStorage.getUserName()).withProducerName(testStorage.getProducerName()).withConsumerName(testStorage.getConsumerName()).build();
resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(testStorage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(testStorage.getClusterName()));
ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
LOGGER.info("Changing password in secret: {}, we should be able to send/receive messages", secretName);
password = new SecretBuilder(password).addToData("password", secondEncodedPassword).build();
kubeClient().namespace(testStorage.getNamespaceName()).createSecret(password);
SecretUtils.waitForUserPasswordChange(testStorage.getNamespaceName(), testStorage.getUserName(), secondEncodedPassword);
LOGGER.info("Receiving messages with new password");
kafkaClients = new KafkaClientsBuilder(kafkaClients).withConsumerGroup(ClientUtils.generateRandomConsumerGroup()).build();
resourceManager.createResource(extensionContext, kafkaClients.producerScramShaTlsStrimzi(testStorage.getClusterName()), kafkaClients.consumerScramShaTlsStrimzi(testStorage.getClusterName()));
ClientUtils.waitForClientsSuccess(testStorage.getProducerName(), testStorage.getConsumerName(), testStorage.getNamespaceName(), MESSAGE_COUNT);
}
Aggregations