Search in sources :

Example 16 with GenericKafkaListener

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener in project strimzi by strimzi.

the class ListenersValidatorTest method testTlsCustomCertOnNonTlsListener.

@ParallelTest
public void testTlsCustomCertOnNonTlsListener() {
    GenericKafkaListener listener1 = new GenericKafkaListenerBuilder().withName("plain").withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withNewConfiguration().withNewBrokerCertChainAndKey().withCertificate("cert").withKey("key").withSecretName("secretName").endBrokerCertChainAndKey().endConfiguration().build();
    List<GenericKafkaListener> listeners = asList(listener1);
    assertThat(ListenersValidator.validateAndGetErrorMessages(3, listeners), containsInAnyOrder("listener plain cannot configure custom TLS certificate with disabled TLS encryption"));
}
Also used : GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) ParallelTest(io.strimzi.test.annotations.ParallelTest)

Example 17 with GenericKafkaListener

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener in project strimzi by strimzi.

the class KafkaAssemblyOperatorTest method createCluster.

private void createCluster(VertxTestContext context, Kafka kafka, List<Secret> secrets) {
    KafkaCluster kafkaCluster = KafkaCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
    ZookeeperCluster zookeeperCluster = ZookeeperCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
    EntityOperator entityOperator = EntityOperator.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
    // create CM, Service, headless service, statefulset and so on
    ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(openShift);
    ClusterOperatorConfig config = ResourceUtils.dummyClusterOperatorConfig(VERSIONS);
    var mockKafkaOps = supplier.kafkaOperator;
    ConfigMapOperator mockCmOps = supplier.configMapOperations;
    ServiceOperator mockServiceOps = supplier.serviceOperations;
    StatefulSetOperator mockStsOps = supplier.stsOperations;
    PvcOperator mockPvcOps = supplier.pvcOperations;
    PodOperator mockPodOps = supplier.podOperations;
    DeploymentOperator mockDepOps = supplier.deploymentOperations;
    SecretOperator mockSecretOps = supplier.secretOperations;
    NetworkPolicyOperator mockPolicyOps = supplier.networkPolicyOperator;
    PodDisruptionBudgetOperator mockPdbOps = supplier.podDisruptionBudgetOperator;
    RouteOperator mockRouteOps = supplier.routeOperations;
    IngressOperator mockIngressOps = supplier.ingressOperations;
    NodeOperator mockNodeOps = supplier.nodeOperator;
    CrdOperator<KubernetesClient, StrimziPodSet, StrimziPodSetList> mockPodSetOps = supplier.strimziPodSetOperator;
    // Create a CM
    String kafkaName = kafka.getMetadata().getName();
    String kafkaNamespace = kafka.getMetadata().getNamespace();
    when(mockKafkaOps.get(kafkaNamespace, kafkaName)).thenReturn(null);
    when(mockKafkaOps.getAsync(eq(kafkaNamespace), eq(kafkaName))).thenReturn(Future.succeededFuture(kafka));
    when(mockKafkaOps.updateStatusAsync(any(), any(Kafka.class))).thenReturn(Future.succeededFuture());
    when(mockPodSetOps.reconcile(any(), any(), any(), any())).thenReturn(Future.succeededFuture(ReconcileResult.created(new StrimziPodSet())));
    when(mockPodSetOps.getAsync(any(), any())).thenReturn(Future.succeededFuture(null));
    ArgumentCaptor<Service> serviceCaptor = ArgumentCaptor.forClass(Service.class);
    ArgumentCaptor<NetworkPolicy> policyCaptor = ArgumentCaptor.forClass(NetworkPolicy.class);
    ArgumentCaptor<PodDisruptionBudget> pdbCaptor = ArgumentCaptor.forClass(PodDisruptionBudget.class);
    ArgumentCaptor<io.fabric8.kubernetes.api.model.policy.v1beta1.PodDisruptionBudget> pdbV1Beta1Captor = ArgumentCaptor.forClass(io.fabric8.kubernetes.api.model.policy.v1beta1.PodDisruptionBudget.class);
    ArgumentCaptor<StatefulSet> ssCaptor = ArgumentCaptor.forClass(StatefulSet.class);
    when(mockStsOps.reconcile(any(), eq(kafkaNamespace), eq(ZookeeperCluster.zookeeperClusterName(kafkaName)), ssCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new StatefulSet())));
    when(mockStsOps.scaleDown(any(), anyString(), anyString(), anyInt())).thenReturn(Future.succeededFuture(null));
    when(mockStsOps.scaleUp(any(), anyString(), anyString(), anyInt())).thenReturn(Future.succeededFuture(42));
    AtomicReference<StatefulSet> ref = new AtomicReference<>();
    when(mockStsOps.reconcile(any(), eq(kafkaNamespace), eq(KafkaCluster.kafkaClusterName(kafkaName)), ssCaptor.capture())).thenAnswer(i -> {
        StatefulSet sts = new StatefulSetBuilder().withNewMetadata().withName(kafkaName + "kafka").withNamespace(kafkaNamespace).addToLabels(Labels.STRIMZI_CLUSTER_LABEL, kafkaName).endMetadata().withNewSpec().withReplicas(3).endSpec().build();
        ref.set(sts);
        return Future.succeededFuture(ReconcileResult.created(sts));
    });
    when(mockPolicyOps.reconcile(any(), anyString(), anyString(), policyCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new NetworkPolicy())));
    when(mockStsOps.getAsync(eq(kafkaNamespace), eq(ZookeeperCluster.zookeeperClusterName(kafkaName)))).thenReturn(Future.succeededFuture());
    when(mockStsOps.getAsync(eq(kafkaNamespace), eq(KafkaCluster.kafkaClusterName(kafkaName)))).thenAnswer(i -> Future.succeededFuture(ref.get()));
    when(mockPdbOps.reconcile(any(), anyString(), anyString(), pdbCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new PodDisruptionBudget())));
    // Service mocks
    Set<Service> createdServices = new HashSet<>();
    createdServices.add(kafkaCluster.generateService());
    createdServices.add(kafkaCluster.generateHeadlessService());
    createdServices.addAll(kafkaCluster.generateExternalBootstrapServices());
    int replicas = kafkaCluster.getReplicas();
    for (int i = 0; i < replicas; i++) {
        createdServices.addAll(kafkaCluster.generateExternalServices(i));
    }
    Map<String, Service> expectedServicesMap = createdServices.stream().collect(Collectors.toMap(s -> s.getMetadata().getName(), s -> s));
    when(mockServiceOps.get(eq(kafkaNamespace), anyString())).thenAnswer(i -> Future.succeededFuture(expectedServicesMap.get(i.<String>getArgument(1))));
    when(mockServiceOps.getAsync(eq(kafkaNamespace), anyString())).thenAnswer(i -> {
        Service svc = expectedServicesMap.get(i.<String>getArgument(1));
        if (svc != null && "NodePort".equals(svc.getSpec().getType())) {
            svc.getSpec().getPorts().get(0).setNodePort(32000);
        }
        return Future.succeededFuture(svc);
    });
    when(mockServiceOps.reconcile(any(), anyString(), anyString(), serviceCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new Service())));
    when(mockServiceOps.endpointReadiness(any(), anyString(), any(), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
    when(mockServiceOps.listAsync(eq(kafkaNamespace), any(Labels.class))).thenReturn(Future.succeededFuture(emptyList()));
    // Ingress mocks
    when(mockIngressOps.listAsync(eq(kafkaNamespace), any(Labels.class))).thenReturn(Future.succeededFuture(emptyList()));
    // Route Mocks
    if (openShift) {
        Set<Route> expectedRoutes = new HashSet<>(kafkaCluster.generateExternalBootstrapRoutes());
        for (int i = 0; i < replicas; i++) {
            expectedRoutes.addAll(kafkaCluster.generateExternalRoutes(i));
        }
        Map<String, Route> expectedRoutesMap = expectedRoutes.stream().collect(Collectors.toMap(s -> s.getMetadata().getName(), s -> s));
        when(mockRouteOps.get(eq(kafkaNamespace), anyString())).thenAnswer(i -> Future.succeededFuture(expectedRoutesMap.get(i.<String>getArgument(1))));
        when(mockRouteOps.getAsync(eq(kafkaNamespace), anyString())).thenAnswer(i -> {
            Route rt = expectedRoutesMap.get(i.<String>getArgument(1));
            if (rt != null) {
                RouteStatus st = new RouteStatusBuilder().withIngress(new RouteIngressBuilder().withHost("host").build()).build();
                rt.setStatus(st);
            }
            return Future.succeededFuture(rt);
        });
        when(mockRouteOps.listAsync(eq(kafkaNamespace), any(Labels.class))).thenReturn(Future.succeededFuture(emptyList()));
    }
    // Mock pod readiness
    when(mockPodOps.readiness(any(), anyString(), anyString(), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
    when(mockPodOps.listAsync(anyString(), any(Labels.class))).thenReturn(Future.succeededFuture(emptyList()));
    // Mock node ops
    when(mockNodeOps.listAsync(any(Labels.class))).thenReturn(Future.succeededFuture(emptyList()));
    Map<String, PersistentVolumeClaim> zkPvcs = createPvcs(kafkaNamespace, zookeeperCluster.getStorage(), zookeeperCluster.getReplicas(), (replica, storageId) -> AbstractModel.VOLUME_NAME + "-" + ZookeeperCluster.zookeeperPodName(kafkaName, replica));
    Map<String, PersistentVolumeClaim> kafkaPvcs = createPvcs(kafkaNamespace, kafkaCluster.getStorage(), kafkaCluster.getReplicas(), (replica, storageId) -> {
        String name = VolumeUtils.createVolumePrefix(storageId, false);
        return name + "-" + KafkaCluster.kafkaPodName(kafkaName, replica);
    });
    when(mockPvcOps.get(eq(kafkaNamespace), ArgumentMatchers.startsWith("data-"))).thenAnswer(invocation -> {
        String pvcName = invocation.getArgument(1);
        if (pvcName.contains(zookeeperCluster.getName())) {
            return zkPvcs.get(pvcName);
        } else if (pvcName.contains(kafkaCluster.getName())) {
            return kafkaPvcs.get(pvcName);
        }
        return null;
    });
    when(mockPvcOps.getAsync(eq(kafkaNamespace), ArgumentMatchers.startsWith("data-"))).thenAnswer(invocation -> {
        String pvcName = invocation.getArgument(1);
        if (pvcName.contains(zookeeperCluster.getName())) {
            return Future.succeededFuture(zkPvcs.get(pvcName));
        } else if (pvcName.contains(kafkaCluster.getName())) {
            return Future.succeededFuture(kafkaPvcs.get(pvcName));
        }
        return Future.succeededFuture(null);
    });
    when(mockPvcOps.listAsync(eq(kafkaNamespace), ArgumentMatchers.any(Labels.class))).thenAnswer(invocation -> Future.succeededFuture(Collections.EMPTY_LIST));
    Set<String> expectedPvcs = new HashSet<>(zkPvcs.keySet());
    expectedPvcs.addAll(kafkaPvcs.keySet());
    ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class);
    when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture());
    Set<String> expectedSecrets = set(KafkaCluster.clientsCaKeySecretName(kafkaName), KafkaCluster.clientsCaCertSecretName(kafkaName), KafkaCluster.clusterCaCertSecretName(kafkaName), KafkaCluster.clusterCaKeySecretName(kafkaName), KafkaCluster.brokersSecretName(kafkaName), ZookeeperCluster.nodesSecretName(kafkaName), ClusterOperator.secretName(kafkaName));
    if (metrics) {
        expectedSecrets.add(KafkaExporter.secretName(kafkaName));
    }
    expectedSecrets.addAll(secrets.stream().map(s -> s.getMetadata().getName()).collect(Collectors.toSet()));
    if (eoConfig != null) {
        // it's expected only when the Entity Operator is deployed by the Cluster Operator
        expectedSecrets.add(EntityTopicOperator.secretName(kafkaName));
        expectedSecrets.add(EntityUserOperator.secretName(kafkaName));
    }
    when(mockDepOps.reconcile(any(), anyString(), anyString(), any())).thenAnswer(invocation -> {
        String name = invocation.getArgument(2);
        Deployment desired = invocation.getArgument(3);
        if (desired != null) {
            if (name.contains("operator")) {
                if (entityOperator != null) {
                    context.verify(() -> assertThat(desired.getMetadata().getName(), is(EntityOperator.entityOperatorName(kafkaName))));
                }
            } else if (name.contains("exporter")) {
                context.verify(() -> assertThat(metrics, is(true)));
            }
        }
        return Future.succeededFuture(desired != null ? ReconcileResult.created(desired) : ReconcileResult.deleted());
    });
    when(mockDepOps.getAsync(anyString(), anyString())).thenReturn(Future.succeededFuture());
    when(mockDepOps.waitForObserved(any(), anyString(), anyString(), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
    when(mockDepOps.readiness(any(), anyString(), anyString(), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
    Map<String, Secret> secretsMap = secrets.stream().collect(Collectors.toMap(s -> s.getMetadata().getName(), s -> s));
    when(mockSecretOps.list(anyString(), any())).thenAnswer(i -> new ArrayList<>(secretsMap.values()));
    when(mockSecretOps.getAsync(anyString(), any())).thenAnswer(i -> Future.succeededFuture(secretsMap.get(i.<String>getArgument(1))));
    when(mockSecretOps.getAsync(kafkaNamespace, KafkaResources.clusterCaCertificateSecretName(kafkaName))).thenAnswer(i -> Future.succeededFuture(secretsMap.get(i.<String>getArgument(1))));
    when(mockSecretOps.getAsync(kafkaNamespace, ClusterOperator.secretName(kafkaName))).thenAnswer(i -> Future.succeededFuture(secretsMap.get(i.<String>getArgument(1))));
    when(mockSecretOps.reconcile(any(), anyString(), anyString(), any())).thenAnswer(invocation -> {
        Secret desired = invocation.getArgument(3);
        if (desired != null) {
            secretsMap.put(desired.getMetadata().getName(), desired);
        }
        return Future.succeededFuture(ReconcileResult.created(new Secret()));
    });
    ArgumentCaptor<ConfigMap> metricsCaptor = ArgumentCaptor.forClass(ConfigMap.class);
    ArgumentCaptor<String> metricsNameCaptor = ArgumentCaptor.forClass(String.class);
    when(mockCmOps.reconcile(any(), anyString(), metricsNameCaptor.capture(), metricsCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new ConfigMap())));
    ArgumentCaptor<ConfigMap> logCaptor = ArgumentCaptor.forClass(ConfigMap.class);
    ArgumentCaptor<String> logNameCaptor = ArgumentCaptor.forClass(String.class);
    when(mockCmOps.reconcile(any(), anyString(), logNameCaptor.capture(), logCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new ConfigMap())));
    ConfigMap metricsCm = kafkaCluster.generateAncillaryConfigMap(new MetricsAndLogging(metricsCM, null), emptySet(), emptySet(), false);
    when(mockCmOps.getAsync(kafkaNamespace, KafkaCluster.metricAndLogConfigsName(kafkaName))).thenReturn(Future.succeededFuture(metricsCm));
    when(mockCmOps.getAsync(kafkaNamespace, metricsCMName)).thenReturn(Future.succeededFuture(metricsCM));
    when(mockCmOps.getAsync(kafkaNamespace, differentMetricsCMName)).thenReturn(Future.succeededFuture(metricsCM));
    when(mockCmOps.getAsync(anyString(), eq(JmxTrans.jmxTransConfigName(kafkaName)))).thenReturn(Future.succeededFuture(new ConfigMapBuilder().withNewMetadata().withResourceVersion("123").endMetadata().build()));
    ArgumentCaptor<Route> routeCaptor = ArgumentCaptor.forClass(Route.class);
    ArgumentCaptor<String> routeNameCaptor = ArgumentCaptor.forClass(String.class);
    if (openShift) {
        when(mockRouteOps.reconcile(any(), eq(kafkaNamespace), routeNameCaptor.capture(), routeCaptor.capture())).thenReturn(Future.succeededFuture(ReconcileResult.created(new Route())));
    }
    KafkaAssemblyOperator ops = new KafkaAssemblyOperator(vertx, new PlatformFeaturesAvailability(openShift, kubernetesVersion), certManager, passwordGenerator, supplier, config);
    // Now try to create a KafkaCluster based on this CM
    Checkpoint async = context.checkpoint();
    ops.createOrUpdate(new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, kafkaNamespace, kafkaName), kafka).onComplete(context.succeeding(v -> context.verify(() -> {
        // We expect a headless and headful service
        Set<String> expectedServices = set(ZookeeperCluster.headlessServiceName(kafkaName), ZookeeperCluster.serviceName(kafkaName), KafkaCluster.serviceName(kafkaName), KafkaCluster.headlessServiceName(kafkaName));
        if (kafkaListeners != null) {
            List<GenericKafkaListener> externalListeners = ListenersUtils.externalListeners(kafkaListeners);
            for (GenericKafkaListener listener : externalListeners) {
                expectedServices.add(ListenersUtils.backwardsCompatibleBootstrapServiceName(kafkaName, listener));
                for (int i = 0; i < kafkaCluster.getReplicas(); i++) {
                    expectedServices.add(ListenersUtils.backwardsCompatibleBrokerServiceName(kafkaName, i, listener));
                }
            }
        }
        List<Service> capturedServices = serviceCaptor.getAllValues();
        assertThat(capturedServices.stream().filter(Objects::nonNull).map(svc -> svc.getMetadata().getName()).collect(Collectors.toSet()).size(), is(expectedServices.size()));
        assertThat(capturedServices.stream().filter(Objects::nonNull).map(svc -> svc.getMetadata().getName()).collect(Collectors.toSet()), is(expectedServices));
        // Assertions on the statefulset
        List<StatefulSet> capturedSs = ssCaptor.getAllValues();
        // We expect a statefulSet for kafka and zookeeper...
        assertThat(capturedSs.stream().map(sts -> sts.getMetadata().getName()).collect(Collectors.toSet()), is(set(KafkaCluster.kafkaClusterName(kafkaName), ZookeeperCluster.zookeeperClusterName(kafkaName))));
        // expected Secrets with certificates
        assertThat(new TreeSet<>(secretsMap.keySet()), is(new TreeSet<>(expectedSecrets)));
        // Check PDBs
        assertThat(pdbCaptor.getAllValues(), hasSize(2));
        assertThat(pdbCaptor.getAllValues().stream().map(sts -> sts.getMetadata().getName()).collect(Collectors.toSet()), is(set(KafkaCluster.kafkaClusterName(kafkaName), ZookeeperCluster.zookeeperClusterName(kafkaName))));
        // Check PVCs
        assertThat(pvcCaptor.getAllValues(), hasSize(expectedPvcs.size()));
        assertThat(pvcCaptor.getAllValues().stream().map(pvc -> pvc.getMetadata().getName()).collect(Collectors.toSet()), is(expectedPvcs));
        for (PersistentVolumeClaim pvc : pvcCaptor.getAllValues()) {
            assertThat(pvc.getMetadata().getAnnotations(), hasKey(AbstractModel.ANNO_STRIMZI_IO_DELETE_CLAIM));
        }
        // Verify deleted routes
        if (openShift) {
            Set<String> expectedRoutes = set(KafkaCluster.serviceName(kafkaName));
            for (int i = 0; i < kafkaCluster.getReplicas(); i++) {
                expectedRoutes.add(KafkaCluster.externalServiceName(kafkaName, i));
            }
            assertThat(captured(routeNameCaptor), is(expectedRoutes));
        } else {
            assertThat(routeNameCaptor.getAllValues(), hasSize(0));
        }
        async.flag();
    })));
}
Also used : RouteIngressBuilder(io.fabric8.openshift.api.model.RouteIngressBuilder) JmxTransQueryTemplateBuilder(io.strimzi.api.kafka.model.template.JmxTransQueryTemplateBuilder) ArgumentMatchers(org.mockito.ArgumentMatchers) KafkaJmxOptions(io.strimzi.api.kafka.model.KafkaJmxOptions) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) PodDisruptionBudget(io.fabric8.kubernetes.api.model.policy.v1.PodDisruptionBudget) AfterAll(org.junit.jupiter.api.AfterAll) PersistentClaimStorage(io.strimzi.api.kafka.model.storage.PersistentClaimStorage) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) RouteStatusBuilder(io.fabric8.openshift.api.model.RouteStatusBuilder) KafkaResources(io.strimzi.api.kafka.model.KafkaResources) BeforeAll(org.junit.jupiter.api.BeforeAll) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) Mockito.doAnswer(org.mockito.Mockito.doAnswer) ResourceOperatorSupplier(io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier) AbstractModel(io.strimzi.operator.cluster.model.AbstractModel) StatefulSetOperator(io.strimzi.operator.cluster.operator.resource.StatefulSetOperator) KafkaJmxOptionsBuilder(io.strimzi.api.kafka.model.KafkaJmxOptionsBuilder) KafkaVersion(io.strimzi.operator.cluster.model.KafkaVersion) Set(java.util.Set) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) EphemeralStorage(io.strimzi.api.kafka.model.storage.EphemeralStorage) PasswordGenerator(io.strimzi.operator.common.PasswordGenerator) PersistentVolumeClaim(io.fabric8.kubernetes.api.model.PersistentVolumeClaim) RouteOperator(io.strimzi.operator.common.operator.resource.RouteOperator) EntityOperator(io.strimzi.operator.cluster.model.EntityOperator) ClusterCa(io.strimzi.operator.cluster.model.ClusterCa) PersistentVolumeClaimBuilder(io.fabric8.kubernetes.api.model.PersistentVolumeClaimBuilder) EntityUserOperatorSpecBuilder(io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder) ClusterOperatorConfig(io.strimzi.operator.cluster.ClusterOperatorConfig) StatefulSetBuilder(io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder) VertxTestContext(io.vertx.junit5.VertxTestContext) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) IngressOperator(io.strimzi.operator.common.operator.resource.IngressOperator) EntityUserOperator(io.strimzi.operator.cluster.model.EntityUserOperator) KafkaBuilder(io.strimzi.api.kafka.model.KafkaBuilder) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) PersistentClaimStorageBuilder(io.strimzi.api.kafka.model.storage.PersistentClaimStorageBuilder) KafkaVersionTestUtils(io.strimzi.operator.cluster.KafkaVersionTestUtils) CrdOperator(io.strimzi.operator.common.operator.resource.CrdOperator) Matchers.hasSize(org.hamcrest.Matchers.hasSize) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) KafkaJmxAuthenticationPasswordBuilder(io.strimzi.api.kafka.model.KafkaJmxAuthenticationPasswordBuilder) KubernetesVersion(io.strimzi.operator.KubernetesVersion) Vertx(io.vertx.core.Vertx) JmxTransSpecBuilder(io.strimzi.api.kafka.model.JmxTransSpecBuilder) Mockito.times(org.mockito.Mockito.times) StatefulSet(io.fabric8.kubernetes.api.model.apps.StatefulSet) PvcOperator(io.strimzi.operator.common.operator.resource.PvcOperator) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) Reconciliation(io.strimzi.operator.common.Reconciliation) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Mockito.never(org.mockito.Mockito.never) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Storage(io.strimzi.api.kafka.model.storage.Storage) BiFunction(java.util.function.BiFunction) Timeout(io.vertx.junit5.Timeout) Matchers.hasKey(org.hamcrest.Matchers.hasKey) KafkaExporter(io.strimzi.operator.cluster.model.KafkaExporter) Route(io.fabric8.openshift.api.model.Route) PodOperator(io.strimzi.operator.common.operator.resource.PodOperator) ResourceUtils(io.strimzi.operator.cluster.ResourceUtils) KafkaExporterSpec(io.strimzi.api.kafka.model.KafkaExporterSpec) MethodSource(org.junit.jupiter.params.provider.MethodSource) ListenersUtils(io.strimzi.operator.cluster.model.ListenersUtils) Collections.emptyList(java.util.Collections.emptyList) DeploymentOperator(io.strimzi.operator.common.operator.resource.DeploymentOperator) SecretOperator(io.strimzi.operator.common.operator.resource.SecretOperator) ClientsCa(io.strimzi.operator.cluster.model.ClientsCa) VertxExtension(io.vertx.junit5.VertxExtension) Future(io.vertx.core.Future) Collectors(java.util.stream.Collectors) EntityTopicOperator(io.strimzi.operator.cluster.model.EntityTopicOperator) Objects(java.util.Objects) List(java.util.List) Labels(io.strimzi.operator.common.model.Labels) StrimziPodSet(io.strimzi.api.kafka.model.StrimziPodSet) Secret(io.fabric8.kubernetes.api.model.Secret) Optional(java.util.Optional) Checkpoint(io.vertx.junit5.Checkpoint) PodDisruptionBudgetOperator(io.strimzi.operator.common.operator.resource.PodDisruptionBudgetOperator) PlatformFeaturesAvailability(io.strimzi.operator.PlatformFeaturesAvailability) MockCertManager(io.strimzi.operator.common.operator.MockCertManager) EntityOperatorSpecBuilder(io.strimzi.api.kafka.model.EntityOperatorSpecBuilder) JmxTrans(io.strimzi.operator.cluster.model.JmxTrans) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) EntityTopicOperatorSpecBuilder(io.strimzi.api.kafka.model.EntityTopicOperatorSpecBuilder) KafkaStatus(io.strimzi.api.kafka.model.status.KafkaStatus) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) NetworkPolicyOperator(io.strimzi.operator.common.operator.resource.NetworkPolicyOperator) SingleVolumeStorage(io.strimzi.api.kafka.model.storage.SingleVolumeStorage) HashMap(java.util.HashMap) ZookeeperCluster(io.strimzi.operator.cluster.model.ZookeeperCluster) VolumeUtils(io.strimzi.operator.cluster.model.VolumeUtils) AtomicReference(java.util.concurrent.atomic.AtomicReference) MetricsAndLogging(io.strimzi.operator.common.MetricsAndLogging) HashSet(java.util.HashSet) JmxPrometheusExporterMetrics(io.strimzi.api.kafka.model.JmxPrometheusExporterMetrics) ServiceOperator(io.strimzi.operator.common.operator.resource.ServiceOperator) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaCluster(io.strimzi.operator.cluster.model.KafkaCluster) ClusterOperator(io.strimzi.operator.cluster.ClusterOperator) ConfigMapOperator(io.strimzi.operator.common.operator.resource.ConfigMapOperator) InlineLogging(io.strimzi.api.kafka.model.InlineLogging) TestUtils(io.strimzi.test.TestUtils) ReconcileResult(io.strimzi.operator.common.operator.resource.ReconcileResult) Collections.singletonMap(java.util.Collections.singletonMap) Service(io.fabric8.kubernetes.api.model.Service) RouteStatus(io.fabric8.openshift.api.model.RouteStatus) StatefulSetDiff(io.strimzi.operator.cluster.operator.resource.StatefulSetDiff) ArgumentMatchers.anyInt(org.mockito.ArgumentMatchers.anyInt) EntityOperatorSpec(io.strimzi.api.kafka.model.EntityOperatorSpec) CruiseControl(io.strimzi.operator.cluster.model.CruiseControl) Collections.emptyMap(java.util.Collections.emptyMap) NodeOperator(io.strimzi.operator.common.operator.resource.NodeOperator) StrimziPodSetList(io.strimzi.api.kafka.StrimziPodSetList) Collections.emptySet(java.util.Collections.emptySet) JmxTransOutputDefinitionTemplateBuilder(io.strimzi.api.kafka.model.template.JmxTransOutputDefinitionTemplateBuilder) TestUtils.set(io.strimzi.test.TestUtils.set) Mockito.when(org.mockito.Mockito.when) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) NetworkPolicy(io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy) Kafka(io.strimzi.api.kafka.model.Kafka) Deployment(io.fabric8.kubernetes.api.model.apps.Deployment) Collections(java.util.Collections) KafkaCluster(io.strimzi.operator.cluster.model.KafkaCluster) Deployment(io.fabric8.kubernetes.api.model.apps.Deployment) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) RouteStatus(io.fabric8.openshift.api.model.RouteStatus) PvcOperator(io.strimzi.operator.common.operator.resource.PvcOperator) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) TreeSet(java.util.TreeSet) NodeOperator(io.strimzi.operator.common.operator.resource.NodeOperator) DeploymentOperator(io.strimzi.operator.common.operator.resource.DeploymentOperator) Route(io.fabric8.openshift.api.model.Route) HashSet(java.util.HashSet) NetworkPolicy(io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy) MetricsAndLogging(io.strimzi.operator.common.MetricsAndLogging) PodDisruptionBudgetOperator(io.strimzi.operator.common.operator.resource.PodDisruptionBudgetOperator) Labels(io.strimzi.operator.common.model.Labels) StatefulSetOperator(io.strimzi.operator.cluster.operator.resource.StatefulSetOperator) StrimziPodSetList(io.strimzi.api.kafka.StrimziPodSetList) SecretOperator(io.strimzi.operator.common.operator.resource.SecretOperator) Secret(io.fabric8.kubernetes.api.model.Secret) PlatformFeaturesAvailability(io.strimzi.operator.PlatformFeaturesAvailability) Objects(java.util.Objects) PersistentVolumeClaim(io.fabric8.kubernetes.api.model.PersistentVolumeClaim) StatefulSet(io.fabric8.kubernetes.api.model.apps.StatefulSet) RouteOperator(io.strimzi.operator.common.operator.resource.RouteOperator) StrimziPodSet(io.strimzi.api.kafka.model.StrimziPodSet) EntityOperator(io.strimzi.operator.cluster.model.EntityOperator) PodDisruptionBudget(io.fabric8.kubernetes.api.model.policy.v1.PodDisruptionBudget) ClusterOperatorConfig(io.strimzi.operator.cluster.ClusterOperatorConfig) Kafka(io.strimzi.api.kafka.model.Kafka) ServiceOperator(io.strimzi.operator.common.operator.resource.ServiceOperator) StatefulSetBuilder(io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder) NetworkPolicyOperator(io.strimzi.operator.common.operator.resource.NetworkPolicyOperator) ResourceOperatorSupplier(io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier) Reconciliation(io.strimzi.operator.common.Reconciliation) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) PodOperator(io.strimzi.operator.common.operator.resource.PodOperator) Service(io.fabric8.kubernetes.api.model.Service) ZookeeperCluster(io.strimzi.operator.cluster.model.ZookeeperCluster) AtomicReference(java.util.concurrent.atomic.AtomicReference) RouteStatusBuilder(io.fabric8.openshift.api.model.RouteStatusBuilder) Checkpoint(io.vertx.junit5.Checkpoint) IngressOperator(io.strimzi.operator.common.operator.resource.IngressOperator) Checkpoint(io.vertx.junit5.Checkpoint) RouteIngressBuilder(io.fabric8.openshift.api.model.RouteIngressBuilder) ConfigMapOperator(io.strimzi.operator.common.operator.resource.ConfigMapOperator)

Example 18 with GenericKafkaListener

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener in project strimzi by strimzi.

the class CustomResourceStatusIsolatedST method setup.

@BeforeAll
void setup(ExtensionContext extensionContext) {
    clusterOperator.unInstall();
    clusterOperator = new SetupClusterOperator.SetupClusterOperatorBuilder().withExtensionContext(BeforeAllOnce.getSharedExtensionContext()).withNamespace(Constants.INFRA_NAMESPACE).withOperationTimeout(Constants.CO_OPERATION_TIMEOUT_SHORT).createInstallation().runInstallation();
    GenericKafkaListener plain = new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).build();
    GenericKafkaListener tls = new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).build();
    GenericKafkaListener nodePort = new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(false).build();
    List<GenericKafkaListener> listeners;
    if (Environment.isNamespaceRbacScope()) {
        listeners = asList(plain, tls);
    } else {
        listeners = asList(plain, tls, nodePort);
    }
    KafkaBuilder kafkaBuilder = KafkaTemplates.kafkaEphemeral(CUSTOM_RESOURCE_STATUS_CLUSTER_NAME, 3, 3).editSpec().editKafka().withListeners(listeners).endKafka().endSpec();
    String kafkaClientsName = Constants.INFRA_NAMESPACE + "-shared-" + Constants.KAFKA_CLIENTS;
    resourceManager.createResource(extensionContext, kafkaBuilder.build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(CUSTOM_RESOURCE_STATUS_CLUSTER_NAME, TOPIC_NAME).build());
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
    topicOperatorReconciliationInterval = KafkaResource.kafkaClient().inNamespace(Constants.INFRA_NAMESPACE).withName(CUSTOM_RESOURCE_STATUS_CLUSTER_NAME).get().getSpec().getEntityOperator().getTopicOperator().getReconciliationIntervalSeconds() * 1_000 * 2 + 5_000;
}
Also used : GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) KafkaBuilder(io.strimzi.api.kafka.model.KafkaBuilder) StringContains.containsString(org.hamcrest.core.StringContains.containsString) BeforeAll(org.junit.jupiter.api.BeforeAll)

Example 19 with GenericKafkaListener

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener in project strimzi by strimzi.

the class KafkaST method testRegenerateCertExternalAddressChange.

@ParallelNamespaceTest
@Tag(LOADBALANCER_SUPPORTED)
void testRegenerateCertExternalAddressChange(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final LabelSelector kafkaSelector = KafkaResource.getLabelSelector(clusterName, KafkaResources.kafkaStatefulSetName(clusterName));
    LOGGER.info("Creating kafka without external listener");
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(clusterName, 3, 1).build());
    final String brokerSecret = clusterName + "-kafka-brokers";
    Secret secretsWithoutExt = kubeClient(namespaceName).getSecret(namespaceName, brokerSecret);
    LOGGER.info("Editing kafka with external listener");
    KafkaResource.replaceKafkaResourceInSpecificNamespace(clusterName, kafka -> {
        List<GenericKafkaListener> lst = asList(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.LOADBALANCER).withTls(true).withNewConfiguration().withFinalizers(LB_FINALIZERS).endConfiguration().build());
        kafka.getSpec().getKafka().setListeners(lst);
    }, namespaceName);
    RollingUpdateUtils.waitTillComponentHasRolled(namespaceName, kafkaSelector, 3, PodUtils.podSnapshot(namespaceName, kafkaSelector));
    Secret secretsWithExt = kubeClient(namespaceName).getSecret(namespaceName, brokerSecret);
    LOGGER.info("Checking secrets");
    kubeClient(namespaceName).listPodsByPrefixInName(namespaceName, KafkaResources.kafkaStatefulSetName(clusterName)).forEach(kafkaPod -> {
        String kafkaPodName = kafkaPod.getMetadata().getName();
        assertThat(secretsWithExt.getData().get(kafkaPodName + ".crt"), is(not(secretsWithoutExt.getData().get(kafkaPodName + ".crt"))));
        assertThat(secretsWithExt.getData().get(kafkaPodName + ".key"), is(not(secretsWithoutExt.getData().get(kafkaPodName + ".key"))));
    });
}
Also used : Secret(io.fabric8.kubernetes.api.model.Secret) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) Matchers.containsString(org.hamcrest.Matchers.containsString) Matchers.emptyOrNullString(org.hamcrest.Matchers.emptyOrNullString) TestUtils.fromYamlString(io.strimzi.test.TestUtils.fromYamlString) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 20 with GenericKafkaListener

use of io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener in project strimzi by strimzi.

the class OauthScopeIsolatedST method testClientScopeKafkaSetIncorrectly.

@IsolatedTest("Modification of shared Kafka cluster")
void testClientScopeKafkaSetIncorrectly(ExtensionContext extensionContext) throws UnexpectedException {
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
    final String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final LabelSelector kafkaSelector = KafkaResource.getLabelSelector(oauthClusterName, KafkaResources.kafkaStatefulSetName(oauthClusterName));
    KafkaClients oauthInternalClientChecksJob = new KafkaClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + scopeListenerPort).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withAdditionalConfig(additionalOauthConfig).build();
    // re-configuring Kafka listener to have client scope assigned to null
    KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
        List<GenericKafkaListener> scopeListeners = kafka.getSpec().getKafka().getListeners().stream().filter(listener -> listener.getName().equals(scopeListener)).collect(Collectors.toList());
        ((KafkaListenerAuthenticationOAuth) scopeListeners.get(0).getAuth()).setClientScope(null);
        kafka.getSpec().getKafka().getListeners().set(0, scopeListeners.get(0));
    }, INFRA_NAMESPACE);
    RollingUpdateUtils.waitForComponentAndPodsReady(INFRA_NAMESPACE, kafkaSelector, 1);
    // verification phase client should fail here because clientScope is set to 'null'
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
    resourceManager.createResource(extensionContext, oauthInternalClientChecksJob.producerStrimzi());
    // client should fail because the listener requires scope: 'test' in JWT token but was (the listener) temporarily
    // configured without clientScope resulting in a JWT token without the scope claim when using the clientId and
    // secret passed via SASL/PLAIN to obtain an access token in client's name.
    ClientUtils.waitForClientTimeout(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
    JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
    // rollback previous configuration
    // re-configuring Kafka listener to have client scope assigned to 'test'
    KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
        List<GenericKafkaListener> scopeListeners = kafka.getSpec().getKafka().getListeners().stream().filter(listener -> listener.getName().equals(scopeListener)).collect(Collectors.toList());
        ((KafkaListenerAuthenticationOAuth) scopeListeners.get(0).getAuth()).setClientScope("test");
        kafka.getSpec().getKafka().getListeners().set(0, scopeListeners.get(0));
    }, INFRA_NAMESPACE);
    RollingUpdateUtils.waitForComponentAndPodsReady(INFRA_NAMESPACE, kafkaSelector, 1);
}
Also used : KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest) CoreMatchers(org.hamcrest.CoreMatchers) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) KafkaClientsTemplates(io.strimzi.systemtest.templates.crd.KafkaClientsTemplates) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) KafkaConnectTemplates(io.strimzi.systemtest.templates.crd.KafkaConnectTemplates) CONNECT(io.strimzi.systemtest.Constants.CONNECT) KafkaResource(io.strimzi.systemtest.resources.crd.KafkaResource) Level(org.apache.logging.log4j.Level) ResourceManager.kubeClient(io.strimzi.systemtest.resources.ResourceManager.kubeClient) ExtensionContext(org.junit.jupiter.api.extension.ExtensionContext) INFRA_NAMESPACE(io.strimzi.systemtest.Constants.INFRA_NAMESPACE) AfterAll(org.junit.jupiter.api.AfterAll) PodUtils(io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils) KafkaResources(io.strimzi.api.kafka.model.KafkaResources) KubeClusterResource(io.strimzi.test.k8s.KubeClusterResource) BeforeAll(org.junit.jupiter.api.BeforeAll) Tag(org.junit.jupiter.api.Tag) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) StUtils(io.strimzi.systemtest.utils.StUtils) KafkaTemplates(io.strimzi.systemtest.templates.crd.KafkaTemplates) RollingUpdateUtils(io.strimzi.systemtest.utils.RollingUpdateUtils) IsolatedSuite(io.strimzi.systemtest.annotations.IsolatedSuite) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) JobUtils(io.strimzi.systemtest.utils.kubeUtils.controllers.JobUtils) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) UnexpectedException(java.rmi.UnexpectedException) OAUTH(io.strimzi.systemtest.Constants.OAUTH) Collectors(java.util.stream.Collectors) KafkaListenerAuthenticationOAuth(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuth) ClientUtils(io.strimzi.systemtest.utils.ClientUtils) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) KeycloakUtils(io.strimzi.systemtest.utils.specific.KeycloakUtils) List(java.util.List) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) KafkaTopicTemplates(io.strimzi.systemtest.templates.crd.KafkaTopicTemplates) REGRESSION(io.strimzi.systemtest.Constants.REGRESSION) KafkaConnectResources(io.strimzi.api.kafka.model.KafkaConnectResources) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) LabelSelector(io.fabric8.kubernetes.api.model.LabelSelector) KafkaListenerAuthenticationOAuth(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuth) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest)

Aggregations

GenericKafkaListener (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener)160 GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)116 ParallelTest (io.strimzi.test.annotations.ParallelTest)102 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)50 ArrayList (java.util.ArrayList)38 GenericKafkaListenerConfigurationBrokerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerConfigurationBrokerBuilder)18 Matchers.containsString (org.hamcrest.Matchers.containsString)16 KafkaListenerAuthenticationOAuth (io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuth)12 KafkaListenerAuthenticationOAuthBuilder (io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuthBuilder)12 Kafka (io.strimzi.api.kafka.model.Kafka)10 LabelSelector (io.fabric8.kubernetes.api.model.LabelSelector)8 HTTPIngressPathBuilder (io.fabric8.kubernetes.api.model.networking.v1.HTTPIngressPathBuilder)8 Ingress (io.fabric8.kubernetes.api.model.networking.v1.Ingress)8 IngressTLSBuilder (io.fabric8.kubernetes.api.model.networking.v1.IngressTLSBuilder)8 HashMap (java.util.HashMap)8 List (java.util.List)8 Collectors (java.util.stream.Collectors)8 NetworkPolicyIngressRule (io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyIngressRule)6 NetworkPolicyIngressRuleBuilder (io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicyIngressRuleBuilder)6 KafkaResources (io.strimzi.api.kafka.model.KafkaResources)6