Search in sources :

Example 1 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kafka-admin-api by bf2fc6cc711aee1a0c2a.

the class AdminClientFactory method createAdminClient.

/**
 * Route handler common to all Kafka resource routes. Responsible for creating
 * the map of properties used to configure the Kafka Admin Client. When OAuth
 * has been enabled via the environment, the access token will be retrieved from
 * the authenticated user principal present in the context (created by Vert.x
 * handler when a valid JWT was presented by the client). The configuration property
 * map will be placed in the context under the key identified by the
 * {@link #ADMIN_CLIENT_CONFIG} constant.
 */
public AdminClient createAdminClient() {
    Map<String, Object> acConfig = config.getAcConfig();
    if (config.isOauthEnabled()) {
        if (token.isResolvable()) {
            final String accessToken = token.get().getRawToken();
            if (accessToken == null) {
                throw new NotAuthorizedException(Response.status(Status.UNAUTHORIZED));
            }
            acConfig.put(SaslConfigs.SASL_JAAS_CONFIG, String.format(SASL_OAUTH_CONFIG_TEMPLATE, accessToken));
        } else {
            log.warn("OAuth is enabled, but there is no JWT principal");
        }
    } else if (config.isBasicEnabled()) {
        extractCredentials(Optional.ofNullable(headers.get().getHeaderString(HttpHeaders.AUTHORIZATION))).ifPresentOrElse(credentials -> acConfig.put(SaslConfigs.SASL_JAAS_CONFIG, credentials), () -> {
            throw new NotAuthorizedException("Invalid or missing credentials", Response.status(Status.UNAUTHORIZED).build());
        });
    } else {
        log.debug("OAuth is disabled - no attempt to set access token in Admin Client config");
    }
    return AdminClient.create(acConfig);
}
Also used : Consumer(org.apache.kafka.clients.consumer.Consumer) Producer(org.apache.kafka.clients.producer.Producer) KafkaAdminConfigRetriever(org.bf2.admin.kafka.admin.KafkaAdminConfigRetriever) Logger(org.jboss.logging.Logger) Vertx(io.vertx.core.Vertx) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) AdminClient(org.apache.kafka.clients.admin.AdminClient) Inject(javax.inject.Inject) Objects(java.util.Objects) Base64(java.util.Base64) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) HttpHeaders(javax.ws.rs.core.HttpHeaders) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Response(javax.ws.rs.core.Response) RequestScoped(javax.enterprise.context.RequestScoped) JsonWebToken(org.eclipse.microprofile.jwt.JsonWebToken) Map(java.util.Map) NotAuthorizedException(javax.ws.rs.NotAuthorizedException) Optional(java.util.Optional) SaslConfigs(org.apache.kafka.common.config.SaslConfigs) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Status(javax.ws.rs.core.Response.Status) Instance(javax.enterprise.inject.Instance) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) NotAuthorizedException(javax.ws.rs.NotAuthorizedException)

Example 2 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kafka-admin-api by bf2fc6cc711aee1a0c2a.

the class AccessControlListIT method testGetAclsOrderByProperies.

@ParameterizedTest
@CsvSource({ Types.AclBinding.PROP_PERMISSION + "," + SORT_ASC, Types.AclBinding.PROP_PERMISSION + "," + SORT_DESC, Types.AclBinding.PROP_RESOURCE_TYPE + "," + SORT_ASC, Types.AclBinding.PROP_RESOURCE_TYPE + "," + SORT_DESC, Types.AclBinding.PROP_PATTERN_TYPE + "," + SORT_ASC, Types.AclBinding.PROP_PATTERN_TYPE + "," + SORT_DESC, Types.AclBinding.PROP_OPERATION + "," + SORT_ASC, Types.AclBinding.PROP_OPERATION + "," + SORT_DESC, Types.AclBinding.PROP_PRINCIPAL + "," + SORT_ASC, Types.AclBinding.PROP_PRINCIPAL + "," + SORT_DESC, Types.AclBinding.PROP_RESOURCE_NAME + "," + SORT_ASC, Types.AclBinding.PROP_RESOURCE_NAME + "," + SORT_DESC })
void testGetAclsOrderByProperies(String orderKey, String order) throws Exception {
    JsonObject allowedResourceOperations = Json.createReader(new StringReader(validResourceOperations)).readObject();
    List<JsonObject> newBindings = Stream.of(Json.createObjectBuilder().build()).flatMap(binding -> join(binding, Types.AclBinding.PROP_PERMISSION, AclPermissionType.ALLOW, AclPermissionType.DENY)).flatMap(binding -> join(binding, Types.AclBinding.PROP_RESOURCE_TYPE, ResourceType.TOPIC, ResourceType.GROUP, ResourceType.CLUSTER, ResourceType.TRANSACTIONAL_ID)).flatMap(binding -> join(binding, Types.AclBinding.PROP_PATTERN_TYPE, PatternType.LITERAL, PatternType.PREFIXED)).flatMap(binding -> join(binding, Types.AclBinding.PROP_OPERATION, AclOperation.READ, AclOperation.ALL, AclOperation.ALTER, AclOperation.DELETE, AclOperation.CREATE, AclOperation.ALTER_CONFIGS, AclOperation.DESCRIBE, AclOperation.DESCRIBE_CONFIGS, AclOperation.WRITE)).flatMap(binding -> join(binding, Types.AclBinding.PROP_PRINCIPAL, "User:{uuid}")).flatMap(binding -> join(binding, Types.AclBinding.PROP_RESOURCE_NAME, "resource-{uuid}")).filter(binding -> {
        String resourceType = binding.getString(Types.AclBinding.PROP_RESOURCE_TYPE).toLowerCase(Locale.US);
        String operation = binding.getString(Types.AclBinding.PROP_OPERATION).toLowerCase(Locale.US);
        return allowedResourceOperations.getJsonArray(resourceType).stream().filter(value -> value.getValueType() == ValueType.STRING).map(JsonString.class::cast).map(JsonString::getString).anyMatch(operation::equals);
    }).map(binding -> {
        if (ResourceType.CLUSTER.name().equals(binding.getString(Types.AclBinding.PROP_RESOURCE_TYPE))) {
            // Only value allowed is "kafka-cluster"
            binding = Json.createObjectBuilder(binding).add(Types.AclBinding.PROP_RESOURCE_NAME, "kafka-cluster").build();
        }
        return binding;
    }).distinct().collect(Collectors.toList());
    List<String> sortKeys = new LinkedList<>(AccessControlOperations.SORT_KEYS.keySet());
    // Remove the primary sort key, handled as a special case
    sortKeys.remove(orderKey);
    List<JsonObject> expectedValues = newBindings.stream().map(JsonObject.class::cast).sorted((j1, j2) -> {
        int result;
        if ((result = j1.getString(orderKey).compareTo(j2.getString(orderKey))) != 0) {
            return SORT_DESC.equals(order) ? (result * -1) : result;
        }
        for (String key : sortKeys) {
            if ((result = j1.getString(key).compareTo(j2.getString(key))) != 0) {
                return result;
            }
        }
        return 0;
    }).collect(Collectors.toList());
    final int expectedTotal = newBindings.size();
    final int pageSize = expectedTotal + 1;
    final var queryParams = Map.of("page", "1", "size", String.valueOf(pageSize), "orderKey", orderKey, "order", order);
    Properties adminConfig = ClientsConfig.getAdminConfigOauth(config, tokenUtils.getToken(UserType.OWNER.getUsername()));
    /*
         * Due to the number of ACLs created for this case (> 200), using the
         * bulk API directly is necessary.
         */
    try (Admin admin = Admin.create(adminConfig)) {
        admin.createAcls(newBindings.stream().map(Types.AclBinding::fromJsonObject).map(Types.AclBinding::toKafkaBinding).collect(Collectors.toList())).all().whenComplete((result, error) -> {
            if (error != null) {
                fail(error);
            } else {
                var response = getAcls(UserType.OWNER, queryParams).body("total", equalTo(expectedTotal)).body("size", equalTo(pageSize)).body("page", equalTo(1)).body("items", hasSize(expectedTotal));
                JsonObject responseBody = Json.createReader(response.extract().asInputStream()).readObject();
                List<JsonObject> responseValues = responseBody.getJsonArray("items").stream().map(JsonObject.class::cast).collect(Collectors.toList());
                assertEquals(expectedValues, responseValues, "Unexpected response order");
            }
        }).toCompletionStage().toCompletableFuture().get(30, TimeUnit.SECONDS);
    }
}
Also used : IntStream(java.util.stream.IntStream) Assertions.fail(org.junit.jupiter.api.Assertions.fail) BeforeEach(org.junit.jupiter.api.BeforeEach) CsvSource(org.junit.jupiter.params.provider.CsvSource) ValidatableResponse(io.restassured.response.ValidatableResponse) AclPermissionType(org.apache.kafka.common.acl.AclPermissionType) UserType(org.bf2.admin.kafka.systemtest.deployment.DeploymentManager.UserType) PatternType(org.apache.kafka.common.resource.PatternType) QuarkusTest(io.quarkus.test.junit.QuarkusTest) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) ClientsConfig(org.bf2.admin.kafka.systemtest.utils.ClientsConfig) Locale(java.util.Locale) Map(java.util.Map) Admin(org.apache.kafka.clients.admin.Admin) Json(javax.json.Json) Matchers.hasSize(org.hamcrest.Matchers.hasSize) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) LinkedList(java.util.LinkedList) TestOAuthProfile(org.bf2.admin.kafka.systemtest.TestOAuthProfile) Status(javax.ws.rs.core.Response.Status) JsonObject(javax.json.JsonObject) Properties(java.util.Properties) Matchers.notNullValue(org.hamcrest.Matchers.notNullValue) KafkaAdminConfigRetriever(org.bf2.admin.kafka.admin.KafkaAdminConfigRetriever) TestProfile(io.quarkus.test.junit.TestProfile) AclOperation(org.apache.kafka.common.acl.AclOperation) Config(org.eclipse.microprofile.config.Config) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) JsonString(javax.json.JsonString) Test(org.junit.jupiter.api.Test) TimeUnit(java.util.concurrent.TimeUnit) ValueType(javax.json.JsonValue.ValueType) TokenUtils(org.bf2.admin.kafka.systemtest.utils.TokenUtils) AfterEach(org.junit.jupiter.api.AfterEach) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) List(java.util.List) Stream(java.util.stream.Stream) ResourceType(org.apache.kafka.common.resource.ResourceType) StringReader(java.io.StringReader) AccessControlOperations(org.bf2.admin.kafka.admin.AccessControlOperations) Matchers.equalTo(org.hamcrest.Matchers.equalTo) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) Method(io.restassured.http.Method) RestAssured.given(io.restassured.RestAssured.given) Types(org.bf2.admin.kafka.admin.model.Types) JsonObjectBuilder(javax.json.JsonObjectBuilder) Types(org.bf2.admin.kafka.admin.model.Types) JsonObject(javax.json.JsonObject) JsonString(javax.json.JsonString) Properties(java.util.Properties) Admin(org.apache.kafka.clients.admin.Admin) LinkedList(java.util.LinkedList) StringReader(java.io.StringReader) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) JsonString(javax.json.JsonString) CsvSource(org.junit.jupiter.params.provider.CsvSource) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 3 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kafka-admin-api by bf2fc6cc711aee1a0c2a.

the class DeploymentManager method deployStrimziKafka.

private KafkaContainer<?> deployStrimziKafka() {
    LOGGER.info("Deploying Strimzi Kafka container");
    class StrimziPlainKafkaContainer extends StrimziKafkaContainer implements KafkaContainer<StrimziKafkaContainer> {

        StrimziPlainKafkaContainer(String version) {
            super(version);
        }
    }
    String imageTag = System.getProperty("strimzi-kafka.tag");
    var container = new StrimziPlainKafkaContainer(imageTag).withLabels(Collections.singletonMap("test-ident", Environment.TEST_CONTAINER_LABEL)).withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.plain-kafka"), true)).withCreateContainerCmdModifier(cmd -> cmd.withName(name("plain-kafka"))).withNetwork(testNetwork);
    container.start();
    return (KafkaContainer<?>) container;
}
Also used : BindMode(org.testcontainers.containers.BindMode) CONFIG(org.bf2.admin.kafka.systemtest.Environment.CONFIG) Logger(org.jboss.logging.Logger) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) Network(org.testcontainers.containers.Network) StrimziKafkaContainer(io.strimzi.StrimziKafkaContainer) ArrayList(java.util.ArrayList) Slf4jLogConsumer(org.testcontainers.containers.output.Slf4jLogConsumer) Duration(java.time.Duration) Map(java.util.Map) GenericContainer(org.testcontainers.containers.GenericContainer) Startable(org.testcontainers.lifecycle.Startable) Wait(org.testcontainers.containers.wait.strategy.Wait) Properties(java.util.Properties) KafkaAdminConfigRetriever(org.bf2.admin.kafka.admin.KafkaAdminConfigRetriever) Environment(org.bf2.admin.kafka.systemtest.Environment) IOException(java.io.IOException) UUID(java.util.UUID) InputStreamReader(java.io.InputStreamReader) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) UncheckedIOException(java.io.UncheckedIOException) Base64(java.util.Base64) List(java.util.List) MountableFile(org.testcontainers.utility.MountableFile) BufferedReader(java.io.BufferedReader) Collections(java.util.Collections) InputStream(java.io.InputStream) StrimziKafkaContainer(io.strimzi.StrimziKafkaContainer) Slf4jLogConsumer(org.testcontainers.containers.output.Slf4jLogConsumer) StrimziKafkaContainer(io.strimzi.StrimziKafkaContainer)

Example 4 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kas-fleetshard by bf2fc6cc711aee1a0c2a.

the class AbstractKafkaCluster method getReadiness.

@Override
public OperandReadiness getReadiness(ManagedKafka managedKafka) {
    Kafka kafka = cachedKafka(managedKafka);
    if (kafka == null) {
        return new OperandReadiness(Status.False, Reason.Installing, String.format("Kafka %s does not exist", kafkaClusterName(managedKafka)));
    }
    Optional<Condition> notReady = kafkaCondition(kafka, c -> "NotReady".equals(c.getType()));
    if (notReady.filter(c -> "True".equals(c.getStatus())).isPresent()) {
        Condition c = notReady.get();
        return new OperandReadiness(Status.False, "Creating".equals(c.getReason()) ? Reason.Installing : Reason.Error, c.getMessage());
    }
    if (isStrimziUpdating(managedKafka)) {
        // the status here is actually unknown
        return new OperandReadiness(Status.True, Reason.StrimziUpdating, null);
    }
    if (isKafkaUpdating(managedKafka) || isKafkaUpgradeStabilityChecking(managedKafka)) {
        return new OperandReadiness(Status.True, Reason.KafkaUpdating, null);
    }
    if (isKafkaIbpUpdating(managedKafka)) {
        return new OperandReadiness(Status.True, Reason.KafkaIbpUpdating, null);
    }
    Optional<Condition> ready = kafkaCondition(kafka, c -> "Ready".equals(c.getType()));
    if (ready.filter(c -> "True".equals(c.getStatus())).isPresent()) {
        return new OperandReadiness(Status.True, null, null);
    }
    if (isReconciliationPaused(managedKafka)) {
        // strimzi may in the future report the status even when paused, but for now we don't know
        return new OperandReadiness(Status.Unknown, Reason.Paused, String.format("Kafka %s is paused for an unknown reason", kafkaClusterName(managedKafka)));
    }
    return new OperandReadiness(Status.False, Reason.Installing, String.format("Kafka %s is not providing status", kafkaClusterName(managedKafka)));
}
Also used : Condition(io.strimzi.api.kafka.model.status.Condition) Quantity(io.fabric8.kubernetes.api.model.Quantity) Context(io.javaoperatorsdk.operator.api.Context) KafkaStatus(io.strimzi.api.kafka.model.status.KafkaStatus) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) Arrays(java.util.Arrays) GenericSecretSource(io.strimzi.api.kafka.model.GenericSecretSource) Status(org.bf2.operator.resources.v1alpha1.ManagedKafkaCondition.Status) Logger(org.jboss.logging.Logger) KafkaListenerAuthentication(io.strimzi.api.kafka.model.listener.KafkaListenerAuthentication) StrimziManager(org.bf2.operator.managers.StrimziManager) GenericKafkaListenerConfigurationBrokerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder) GenericKafkaListenerConfigurationBootstrapBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBootstrapBuilder) Function(java.util.function.Function) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) CertAndKeySecretSourceBuilder(io.strimzi.api.kafka.model.CertAndKeySecretSourceBuilder) GenericKafkaListenerConfigurationBroker(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBroker) SecuritySecretManager(org.bf2.operator.managers.SecuritySecretManager) Map(java.util.Map) KafkaManager(org.bf2.operator.managers.KafkaManager) KafkaResourceClient(org.bf2.operator.clients.KafkaResourceClient) CertAndKeySecretSource(io.strimzi.api.kafka.model.CertAndKeySecretSource) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) Predicate(java.util.function.Predicate) Pod(io.fabric8.kubernetes.api.model.Pod) InformerManager(org.bf2.operator.managers.InformerManager) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Reason(org.bf2.operator.resources.v1alpha1.ManagedKafkaCondition.Reason) OpenShiftClient(io.fabric8.openshift.client.OpenShiftClient) ManagedKafkaAuthenticationOAuth(org.bf2.operator.resources.v1alpha1.ManagedKafkaAuthenticationOAuth) Objects(java.util.Objects) List(java.util.List) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) GenericSecretSourceBuilder(io.strimzi.api.kafka.model.GenericSecretSourceBuilder) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) KafkaListenerAuthenticationOAuthBuilder(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuthBuilder) Optional(java.util.Optional) NetworkPolicyPeerBuilder(io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPeerBuilder) Condition(io.strimzi.api.kafka.model.status.Condition) ConfigProperty(org.eclipse.microprofile.config.inject.ConfigProperty) Kafka(io.strimzi.api.kafka.model.Kafka) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka) Collections(java.util.Collections) GenericKafkaListenerConfigurationBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBuilder) Kafka(io.strimzi.api.kafka.model.Kafka) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka)

Example 5 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kas-fleetshard by bf2fc6cc711aee1a0c2a.

the class AbstractKafkaCluster method isKafkaAnnotationUpdating.

private boolean isKafkaAnnotationUpdating(ManagedKafka managedKafka, String annotation, Function<Kafka, String> valueSupplier) {
    Kafka kafka = cachedKafka(managedKafka);
    if (kafka == null) {
        return false;
    }
    List<Pod> kafkaPods = kubernetesClient.pods().inNamespace(kafka.getMetadata().getNamespace()).withLabel("strimzi.io/name", kafka.getMetadata().getName() + "-kafka").list().getItems();
    boolean isKafkaAnnotationUpdating = false;
    String expectedValue = valueSupplier.apply(kafka);
    for (Pod kafkaPod : kafkaPods) {
        String annotationValueOnPod = Optional.ofNullable(kafkaPod.getMetadata().getAnnotations()).map(annotations -> annotations.get(annotation)).orElse(null);
        if (annotationValueOnPod == null) {
            log.errorf("Kafka pod [%s] is missing annotation '%s'", kafkaPod.getMetadata().getName(), annotation);
            throw new RuntimeException();
        }
        log.tracef("Kafka pod [%s] annotation '%s' = %s [expected value %s]", kafkaPod.getMetadata().getName(), annotation, annotationValueOnPod, expectedValue);
        isKafkaAnnotationUpdating |= !annotationValueOnPod.equals(expectedValue);
        if (isKafkaAnnotationUpdating) {
            break;
        }
    }
    return isKafkaAnnotationUpdating;
}
Also used : Quantity(io.fabric8.kubernetes.api.model.Quantity) Context(io.javaoperatorsdk.operator.api.Context) KafkaStatus(io.strimzi.api.kafka.model.status.KafkaStatus) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) Arrays(java.util.Arrays) GenericSecretSource(io.strimzi.api.kafka.model.GenericSecretSource) Status(org.bf2.operator.resources.v1alpha1.ManagedKafkaCondition.Status) Logger(org.jboss.logging.Logger) KafkaListenerAuthentication(io.strimzi.api.kafka.model.listener.KafkaListenerAuthentication) StrimziManager(org.bf2.operator.managers.StrimziManager) GenericKafkaListenerConfigurationBrokerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder) GenericKafkaListenerConfigurationBootstrapBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBootstrapBuilder) Function(java.util.function.Function) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) CertAndKeySecretSourceBuilder(io.strimzi.api.kafka.model.CertAndKeySecretSourceBuilder) GenericKafkaListenerConfigurationBroker(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBroker) SecuritySecretManager(org.bf2.operator.managers.SecuritySecretManager) Map(java.util.Map) KafkaManager(org.bf2.operator.managers.KafkaManager) KafkaResourceClient(org.bf2.operator.clients.KafkaResourceClient) CertAndKeySecretSource(io.strimzi.api.kafka.model.CertAndKeySecretSource) CertSecretSource(io.strimzi.api.kafka.model.CertSecretSource) Predicate(java.util.function.Predicate) Pod(io.fabric8.kubernetes.api.model.Pod) InformerManager(org.bf2.operator.managers.InformerManager) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) Reason(org.bf2.operator.resources.v1alpha1.ManagedKafkaCondition.Reason) OpenShiftClient(io.fabric8.openshift.client.OpenShiftClient) ManagedKafkaAuthenticationOAuth(org.bf2.operator.resources.v1alpha1.ManagedKafkaAuthenticationOAuth) Objects(java.util.Objects) List(java.util.List) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) GenericSecretSourceBuilder(io.strimzi.api.kafka.model.GenericSecretSourceBuilder) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) KafkaListenerAuthenticationOAuthBuilder(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuthBuilder) Optional(java.util.Optional) NetworkPolicyPeerBuilder(io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyPeerBuilder) Condition(io.strimzi.api.kafka.model.status.Condition) ConfigProperty(org.eclipse.microprofile.config.inject.ConfigProperty) Kafka(io.strimzi.api.kafka.model.Kafka) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka) Collections(java.util.Collections) GenericKafkaListenerConfigurationBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBuilder) Pod(io.fabric8.kubernetes.api.model.Pod) Kafka(io.strimzi.api.kafka.model.Kafka) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka)

Aggregations

ManagedKafka (org.bf2.operator.resources.v1alpha1.ManagedKafka)45 Kafka (io.strimzi.api.kafka.model.Kafka)31 Test (org.junit.jupiter.api.Test)24 List (java.util.List)19 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)19 QuarkusTest (io.quarkus.test.junit.QuarkusTest)18 Map (java.util.Map)18 ArrayList (java.util.ArrayList)17 Inject (javax.inject.Inject)13 Quantity (io.fabric8.kubernetes.api.model.Quantity)12 Optional (java.util.Optional)11 Collections (java.util.Collections)10 Collectors (java.util.stream.Collectors)10 HashMap (java.util.HashMap)9 Objects (java.util.Objects)9 StrimziManager (org.bf2.operator.managers.StrimziManager)9 Logger (org.jboss.logging.Logger)9 KubernetesClient (io.fabric8.kubernetes.client.KubernetesClient)8 IOException (java.io.IOException)8 ManagedKafkaUtils.exampleManagedKafka (org.bf2.operator.utils.ManagedKafkaUtils.exampleManagedKafka)8