Search in sources :

Example 6 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class HttpBridgeTlsST method testSendSimpleMessageTls.

@ParallelTest
void testSendSimpleMessageTls(ExtensionContext extensionContext) {
    // Create topic
    String topicName = KafkaTopicUtils.generateRandomNameOfTopic();
    BridgeClients kafkaBridgeClientJobProduce = new BridgeClientsBuilder(kafkaBridgeClientJob).withTopicName(topicName).build();
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(httpBridgeTlsClusterName, topicName).editMetadata().withNamespace(namespace).endMetadata().build());
    resourceManager.createResource(extensionContext, kafkaBridgeClientJobProduce.producerStrimziBridge());
    ClientUtils.waitForClientSuccess(producerName, namespace, MESSAGE_COUNT);
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(httpBridgeTlsClusterName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withKafkaUsername(sharedKafkaUserName).withUsingPodName(kafkaClientsPodName).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    assertThat(internalKafkaClient.receiveMessagesTls(), is(MESSAGE_COUNT));
}
Also used : BridgeClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) BridgeClients(io.strimzi.systemtest.kafkaclients.internalClients.BridgeClients) ParallelTest(io.strimzi.systemtest.annotations.ParallelTest)

Example 7 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class ConnectIsolatedST method testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication.

@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
    resourceManager.createResource(extensionContext, kafkaUser);
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().addNewTrustedCertificate().withSecretName(clusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(clusterName + "-kafka-bootstrap:9093").withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withNewPasswordSecret().withSecretName(userName).withPassword("password").endPasswordSecret().endKafkaClientAuthenticationScramSha512().endSpec().build());
    final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
    final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
    final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
    LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
    assertThat(kafkaConnectLogs, not(containsString("ERROR")));
    LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
    KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName + "-second", kafkaUser).build());
    final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) ConfigMapVolumeSourceBuilder(io.fabric8.kubernetes.api.model.ConfigMapVolumeSourceBuilder) SecretVolumeSourceBuilder(io.fabric8.kubernetes.api.model.SecretVolumeSourceBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) PasswordSecretSourceBuilder(io.strimzi.api.kafka.model.PasswordSecretSourceBuilder) ResourceRequirementsBuilder(io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) SecretKeySelectorBuilder(io.fabric8.kubernetes.api.model.SecretKeySelectorBuilder) ConfigMapKeySelectorBuilder(io.fabric8.kubernetes.api.model.ConfigMapKeySelectorBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 8 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class ConnectIsolatedST method testKafkaConnectWithPlainAndScramShaAuthentication.

@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testKafkaConnectWithPlainAndScramShaAuthentication(ExtensionContext extensionContext) {
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
    // Use a Kafka with plain listener disabled
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
    KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
    resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
    resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
    final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
    final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
    final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
    KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
    LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
    assertThat(kafkaConnectLogs, not(containsString("ERROR")));
    LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
    KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
    resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, false, kafkaClientsName + "-second", kafkaUser).build());
    final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME).build();
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesPlain(), internalKafkaClient.receiveMessagesPlain());
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
Also used : KafkaListenerAuthenticationScramSha512(io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) ConfigMapVolumeSourceBuilder(io.fabric8.kubernetes.api.model.ConfigMapVolumeSourceBuilder) SecretVolumeSourceBuilder(io.fabric8.kubernetes.api.model.SecretVolumeSourceBuilder) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) PasswordSecretSourceBuilder(io.strimzi.api.kafka.model.PasswordSecretSourceBuilder) ResourceRequirementsBuilder(io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) SecretKeySelectorBuilder(io.fabric8.kubernetes.api.model.SecretKeySelectorBuilder) ConfigMapKeySelectorBuilder(io.fabric8.kubernetes.api.model.ConfigMapKeySelectorBuilder) SecretBuilder(io.fabric8.kubernetes.api.model.SecretBuilder) InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) Matchers.containsString(org.hamcrest.Matchers.containsString) KafkaUser(io.strimzi.api.kafka.model.KafkaUser) PasswordSecretSourceBuilder(io.strimzi.api.kafka.model.PasswordSecretSourceBuilder) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest) Tag(org.junit.jupiter.api.Tag)

Example 9 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class LogDumpScriptIsolatedST method dumpPartitions.

@IsolatedTest
void dumpPartitions(ExtensionContext context) {
    String clusterName = mapWithClusterNames.get(context.getDisplayName());
    String groupId = "my-group";
    String partitionNumber = "0";
    String outPath = USER_PATH + "/target/" + clusterName;
    resourceManager.createResource(context, KafkaTemplates.kafkaPersistent(clusterName, 1, 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().build());
    String clientsPodName = deployAndGetInternalClientsPodName(context);
    InternalKafkaClient clients = buildInternalClients(context, clientsPodName, groupId, 10);
    String topicName = mapWithTestTopics.get(context.getDisplayName());
    // send messages and consume them
    clients.sendMessagesPlain();
    clients.receiveMessagesPlain();
    // dry run
    LOGGER.info("Print partition segments from cluster {}/{}", INFRA_NAMESPACE, clusterName);
    String[] printCmd = new String[] { USER_PATH + "/../tools/log-dump/run.sh", "partition", "--namespace", INFRA_NAMESPACE, "--cluster", clusterName, "--topic", topicName, "--partition", partitionNumber, "--dry-run" };
    Exec.exec(Level.INFO, printCmd);
    assertThat("Output directory created in dry mode", Files.notExists(Paths.get(outPath)));
    // partition dump
    LOGGER.info("Dump topic partition from cluster {}/{}", INFRA_NAMESPACE, clusterName);
    String[] dumpPartCmd = new String[] { USER_PATH + "/../tools/log-dump/run.sh", "partition", "--namespace", INFRA_NAMESPACE, "--cluster", clusterName, "--topic", topicName, "--partition", partitionNumber, "--out-path", outPath };
    Exec.exec(Level.INFO, dumpPartCmd);
    assertThat("No output directory created", Files.exists(Paths.get(outPath)));
    String dumpPartFilePath = outPath + "/" + topicName + "/kafka-0-" + topicName + "-" + partitionNumber + "/00000000000000000000.log";
    assertThat("No partition file created", Files.exists(Paths.get(dumpPartFilePath)));
    assertThat("Empty partition file", new File(dumpPartFilePath).length() > 0);
    // __consumer_offsets dump
    LOGGER.info("Dump consumer offsets partition from cluster {}/{}", INFRA_NAMESPACE, clusterName);
    String[] dumpCgCmd = new String[] { USER_PATH + "/../tools/log-dump/run.sh", "cg_offsets", "--namespace", INFRA_NAMESPACE, "--cluster", clusterName, "--group-id", groupId, "--out-path", outPath };
    Exec.exec(Level.INFO, dumpCgCmd);
    assertThat("No output directory created", Files.exists(Paths.get(outPath)));
    String dumpCgFilePath = outPath + "/__consumer_offsets/kafka-0-__consumer_offsets-12/00000000000000000000.log";
    assertThat("No partition file created", Files.exists(Paths.get(dumpCgFilePath)));
    assertThat("Empty partition file", new File(dumpCgFilePath).length() > 0);
}
Also used : InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) File(java.io.File) IsolatedTest(io.strimzi.test.annotations.IsolatedTest)

Example 10 with InternalKafkaClient

use of io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient in project strimzi by strimzi.

the class MetricsIsolatedST method testKafkaExporterDataAfterExchange.

@IsolatedTest
@Tag(ACCEPTANCE)
@Tag(INTERNAL_CLIENTS_USED)
void testKafkaExporterDataAfterExchange(ExtensionContext extensionContext) {
    InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(kafkaExporterTopic).withNamespaceName(INFRA_NAMESPACE).withClusterName(metricsClusterName).withMessageCount(5000).withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME).build();
    internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesPlain(), internalKafkaClient.receiveMessagesPlain());
    kafkaExporterMetricsData = collector.toBuilder().withNamespaceName(INFRA_NAMESPACE).withComponentType(ComponentType.KafkaExporter).build().collectMetricsFromPods();
    TestUtils.waitFor("Kafka Exporter will contain correct metrics", Constants.GLOBAL_POLL_INTERVAL, GLOBAL_TIMEOUT, () -> {
        try {
            assertThat("Kafka Exporter metrics should be non-empty", kafkaExporterMetricsData.size() > 0);
            kafkaExporterMetricsData.forEach((key, value) -> {
                assertThat("Value from collected metric should be non-empty", !value.isEmpty());
                assertThat(value, CoreMatchers.containsString("kafka_consumergroup_current_offset"));
                assertThat(value, CoreMatchers.containsString("kafka_consumergroup_lag"));
                assertThat(value, CoreMatchers.containsString("kafka_topic_partitions{topic=\"" + kafkaExporterTopic + "\"} 7"));
            });
            return true;
        } catch (Exception e) {
            return false;
        }
    });
}
Also used : InternalKafkaClient(io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) IsolatedTest(io.strimzi.systemtest.annotations.IsolatedTest) Tag(org.junit.jupiter.api.Tag)

Aggregations

InternalKafkaClient (io.strimzi.systemtest.kafkaclients.clients.InternalKafkaClient)160 ParallelNamespaceTest (io.strimzi.systemtest.annotations.ParallelNamespaceTest)100 KafkaUser (io.strimzi.api.kafka.model.KafkaUser)94 Tag (org.junit.jupiter.api.Tag)94 GenericKafkaListenerBuilder (io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder)86 Matchers.containsString (org.hamcrest.Matchers.containsString)60 SecretBuilder (io.fabric8.kubernetes.api.model.SecretBuilder)46 LabelSelector (io.fabric8.kubernetes.api.model.LabelSelector)42 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)42 ResourceRequirementsBuilder (io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder)34 HashMap (java.util.HashMap)28 Secret (io.fabric8.kubernetes.api.model.Secret)26 ParallelTest (io.strimzi.systemtest.annotations.ParallelTest)24 ExternalKafkaClient (io.strimzi.systemtest.kafkaclients.externalClients.ExternalKafkaClient)24 KafkaTopic (io.strimzi.api.kafka.model.KafkaTopic)18 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)18 Quantity (io.fabric8.kubernetes.api.model.Quantity)16 IsolatedTest (io.strimzi.systemtest.annotations.IsolatedTest)16 ConfigMapBuilder (io.fabric8.kubernetes.api.model.ConfigMapBuilder)14 ConfigMapKeySelectorBuilder (io.fabric8.kubernetes.api.model.ConfigMapKeySelectorBuilder)14