use of io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder in project strimzi by strimzi.
the class OauthPlainIsolatedST method testProducerConsumerBridge.
@Description("As a oauth bridge, I should be able to send messages to bridge endpoint.")
@ParallelTest
@Tag(BRIDGE)
void testProducerConsumerBridge(ExtensionContext extensionContext) {
String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
KafkaOauthClients oauthExampleClients = new KafkaOauthClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withOauthClientId(OAUTH_CLIENT_NAME).withOauthClientSecret(OAUTH_CLIENT_SECRET).withOauthTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, oauthExampleClients.producerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
resourceManager.createResource(extensionContext, oauthExampleClients.consumerStrimziOauthPlain());
ClientUtils.waitForClientSuccess(consumerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, consumerName);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, kafkaClientsName).build());
// needed for a verification of oauth configuration
InlineLogging ilDebug = new InlineLogging();
ilDebug.setLoggers(Map.of("rootLogger.level", "DEBUG"));
resourceManager.createResource(extensionContext, KafkaBridgeTemplates.kafkaBridge(oauthClusterName, KafkaResources.plainBootstrapAddress(oauthClusterName), 1).editMetadata().withNamespace(INFRA_NAMESPACE).endMetadata().editSpec().withNewKafkaClientAuthenticationOAuth().withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri()).withClientId("kafka-bridge").withNewClientSecret().withSecretName(BRIDGE_OAUTH_SECRET).withKey(OAUTH_KEY).endClientSecret().withConnectTimeoutSeconds(CONNECT_TIMEOUT_S).withReadTimeoutSeconds(READ_TIMEOUT_S).endKafkaClientAuthenticationOAuth().withLogging(ilDebug).endSpec().build());
final String kafkaBridgePodName = kubeClient(INFRA_NAMESPACE).listPods(INFRA_NAMESPACE, oauthClusterName, Labels.STRIMZI_KIND_LABEL, KafkaBridge.RESOURCE_KIND).get(0).getMetadata().getName();
final String kafkaBridgeLogs = KubeClusterResource.cmdKubeClient(INFRA_NAMESPACE).execInCurrentNamespace(Level.DEBUG, "logs", kafkaBridgePodName).out();
verifyOauthConfiguration(kafkaBridgeLogs);
String bridgeProducerName = "bridge-producer-" + clusterName;
BridgeClients kafkaBridgeClientJob = new BridgeClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(bridgeProducerName).withBootstrapAddress(KafkaBridgeResources.serviceName(oauthClusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withPort(HTTP_BRIDGE_DEFAULT_PORT).withDelayMs(1000).withPollInterval(1000).build();
resourceManager.createResource(extensionContext, kafkaBridgeClientJob.producerStrimziBridge());
ClientUtils.waitForClientSuccess(bridgeProducerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, bridgeProducerName);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder in project strimzi by strimzi.
the class MetricsIsolatedST method testKafkaBridgeMetrics.
@ParallelTest
@Tag(BRIDGE)
@Tag(ACCEPTANCE)
void testKafkaBridgeMetrics(ExtensionContext extensionContext) {
String producerName = "bridge-producer";
String consumerName = "bridge-consumer";
MetricsCollector bridgeCollector = collector.toBuilder().withComponentName(BRIDGE_CLUSTER).withComponentType(ComponentType.KafkaBridge).build();
// Attach consumer before producer
BridgeClients kafkaBridgeClientJob = new BridgeClientsBuilder().withNamespaceName(INFRA_NAMESPACE).withProducerName(producerName).withConsumerName(consumerName).withBootstrapAddress(KafkaBridgeResources.serviceName(BRIDGE_CLUSTER)).withTopicName(bridgeTopic).withMessageCount(MESSAGE_COUNT).withPort(Constants.HTTP_BRIDGE_DEFAULT_PORT).withDelayMs(200).withPollInterval(200).build();
resourceManager.createResource(extensionContext, kafkaBridgeClientJob.producerStrimziBridge());
resourceManager.createResource(extensionContext, kafkaBridgeClientJob.consumerStrimziBridge());
TestUtils.waitFor("KafkaProducer metrics will be available", Constants.GLOBAL_POLL_INTERVAL, Constants.GLOBAL_TIMEOUT, () -> {
LOGGER.info("Looking for 'strimzi_bridge_kafka_producer_count' in bridge metrics");
kafkaBridgeMetricsData = bridgeCollector.collectMetricsFromPods();
Pattern producerCountPattern = Pattern.compile("strimzi_bridge_kafka_producer_count\\{.*,} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> producerCountValues = MetricsCollector.collectSpecificMetric(producerCountPattern, kafkaBridgeMetricsData);
return producerCountValues.stream().mapToDouble(i -> i).count() == (double) 1;
});
TestUtils.waitFor("KafkaConsumer metrics will be available", Constants.GLOBAL_POLL_INTERVAL, Constants.GLOBAL_TIMEOUT, () -> {
LOGGER.info("Looking for 'strimzi_bridge_kafka_consumer_connection_count' in bridge metrics");
kafkaBridgeMetricsData = bridgeCollector.collectMetricsFromPods();
Pattern consumerConnectionsPattern = Pattern.compile("strimzi_bridge_kafka_consumer_connection_count\\{.*,} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> consumerConnectionsValues = MetricsCollector.collectSpecificMetric(consumerConnectionsPattern, kafkaBridgeMetricsData);
return consumerConnectionsValues.stream().mapToDouble(i -> i).count() > 0;
});
assertThat("Collected KafkaBridge metrics doesn't contains jvm metrics", kafkaBridgeMetricsData.values().toString().contains("jvm"));
assertThat("Collected KafkaBridge metrics doesn't contains HTTP metrics", kafkaBridgeMetricsData.values().toString().contains("strimzi_bridge_http_server"));
Pattern bridgeResponse = Pattern.compile("system_cpu_count ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(bridgeResponse, kafkaBridgeMetricsData);
assertThat(values.stream().mapToDouble(i -> i).sum(), is((double) 1));
}
use of io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder in project strimzi by strimzi.
the class HttpBridgeTlsST method testSendSimpleMessageTls.
@ParallelTest
void testSendSimpleMessageTls(ExtensionContext extensionContext) {
// Create topic
String topicName = KafkaTopicUtils.generateRandomNameOfTopic();
BridgeClients kafkaBridgeClientJobProduce = new BridgeClientsBuilder(kafkaBridgeClientJob).withTopicName(topicName).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(httpBridgeTlsClusterName, topicName).editMetadata().withNamespace(namespace).endMetadata().build());
resourceManager.createResource(extensionContext, kafkaBridgeClientJobProduce.producerStrimziBridge());
ClientUtils.waitForClientSuccess(producerName, namespace, MESSAGE_COUNT);
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(httpBridgeTlsClusterName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withKafkaUsername(sharedKafkaUserName).withUsingPodName(kafkaClientsPodName).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
assertThat(internalKafkaClient.receiveMessagesTls(), is(MESSAGE_COUNT));
}
use of io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder in project strimzi by strimzi.
the class HttpBridgeKafkaExternalListenersST method testWeirdUsername.
@SuppressWarnings({ "checkstyle:MethodLength" })
private void testWeirdUsername(ExtensionContext extensionContext, String weirdUserName, KafkaListenerAuthentication auth, KafkaBridgeSpec spec, SecurityProtocol securityProtocol) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3, 1).editMetadata().withNamespace(namespace).endMetadata().editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(auth).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(auth).build()).endKafka().endSpec().build());
BridgeClients kafkaBridgeClientJob = new BridgeClientsBuilder().withProducerName(clusterName + "-" + producerName).withConsumerName(clusterName + "-" + consumerName).withBootstrapAddress(KafkaBridgeResources.serviceName(clusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withPort(Constants.HTTP_BRIDGE_DEFAULT_PORT).withNamespaceName(namespace).build();
// Create topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).editMetadata().withNamespace(namespace).endMetadata().build());
// Create user
if (auth.getType().equals(Constants.TLS_LISTENER_DEFAULT_NAME)) {
resourceManager.createResource(extensionContext, KafkaUserTemplates.tlsUser(clusterName, weirdUserName).editMetadata().withNamespace(namespace).endMetadata().build());
} else {
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, weirdUserName).editMetadata().withNamespace(namespace).endMetadata().build());
}
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespace, true, kafkaClientsName).build());
// Deploy http bridge
resourceManager.createResource(extensionContext, KafkaBridgeTemplates.kafkaBridge(clusterName, KafkaResources.tlsBootstrapAddress(clusterName), 1).editMetadata().withNamespace(namespace).endMetadata().withNewSpecLike(spec).withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).withNewHttp(Constants.HTTP_BRIDGE_DEFAULT_PORT).withNewConsumer().addToConfig(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest").endConsumer().endSpec().build());
final Service service = KafkaBridgeUtils.createBridgeNodePortService(clusterName, namespace, BRIDGE_EXTERNAL_SERVICE);
ServiceResource.createServiceResource(extensionContext, service, namespace);
resourceManager.createResource(extensionContext, kafkaBridgeClientJob.consumerStrimziBridge());
final String kafkaProducerExternalName = "kafka-producer-external" + new Random().nextInt(Integer.MAX_VALUE);
final List<ListenerStatus> listenerStatusList = KafkaResource.kafkaClient().inNamespace(namespace).withName(clusterName).get().getStatus().getListeners();
final String externalBootstrapServers = listenerStatusList.stream().filter(listener -> listener.getType().equals(Constants.EXTERNAL_LISTENER_DEFAULT_NAME)).findFirst().orElseThrow(RuntimeException::new).getBootstrapServers();
final KafkaClients externalKafkaProducer = new KafkaClientsBuilder().withProducerName(kafkaProducerExternalName).withBootstrapAddress(externalBootstrapServers).withNamespaceName(namespace).withTopicName(topicName).withMessageCount(100).build();
if (auth.getType().equals(Constants.TLS_LISTENER_DEFAULT_NAME)) {
// tls producer
resourceManager.createResource(extensionContext, externalKafkaProducer.producerTlsStrimzi(clusterName, weirdUserName));
} else {
// scram-sha producer
resourceManager.createResource(extensionContext, externalKafkaProducer.producerScramShaStrimzi(clusterName, weirdUserName));
}
ClientUtils.waitForClientSuccess(kafkaProducerExternalName, namespace, MESSAGE_COUNT);
// delete kafka producer job
JobUtils.deleteJobWithWait(namespace, kafkaProducerExternalName);
ClientUtils.waitForClientSuccess(clusterName + "-" + consumerName, namespace, MESSAGE_COUNT);
}
use of io.strimzi.systemtest.kafkaclients.internalClients.BridgeClientsBuilder in project strimzi by strimzi.
the class HttpBridgeTlsST method testReceiveSimpleMessageTls.
@ParallelTest
void testReceiveSimpleMessageTls(ExtensionContext extensionContext) {
String topicName = KafkaTopicUtils.generateRandomNameOfTopic();
BridgeClients kafkaBridgeClientJobConsume = new BridgeClientsBuilder(kafkaBridgeClientJob).withTopicName(topicName).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(httpBridgeTlsClusterName, topicName).editMetadata().withNamespace(namespace).endMetadata().build());
resourceManager.createResource(extensionContext, kafkaBridgeClientJobConsume.consumerStrimziBridge());
// Send messages to Kafka
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(httpBridgeTlsClusterName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withKafkaUsername(sharedKafkaUserName).withUsingPodName(kafkaClientsPodName).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
assertThat(internalKafkaClient.sendMessagesTls(), is(MESSAGE_COUNT));
ClientUtils.waitForClientSuccess(consumerName, namespace, MESSAGE_COUNT);
}
Aggregations