use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class HttpBridgeKafkaExternalListenersST method testScramShaAuthWithWeirdUsername.
@ParallelTest
void testScramShaAuthWithWeirdUsername(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
// Create weird named user with . and more than 64 chars -> SCRAM-SHA
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasd.asdasidioiqweioqiweooioqieioqieoqieooi";
// Initialize PasswordSecret to set this as PasswordSecret in Mirror Maker spec
final PasswordSecretSource passwordSecret = new PasswordSecretSource();
passwordSecret.setSecretName(weirdUserName);
passwordSecret.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecret = new CertSecretSource();
certSecret.setCertificate("ca.crt");
certSecret.setSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName));
KafkaBridgeSpec bridgeSpec = new KafkaBridgeSpecBuilder().withNewKafkaClientAuthenticationScramSha512().withUsername(weirdUserName).withPasswordSecret(passwordSecret).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecret).endTls().build();
testWeirdUsername(extensionContext, weirdUserName, new KafkaListenerAuthenticationScramSha512(), bridgeSpec, SecurityProtocol.SASL_SSL);
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class HttpBridgeTlsST method testSendSimpleMessageTls.
@ParallelTest
void testSendSimpleMessageTls(ExtensionContext extensionContext) {
// Create topic
String topicName = KafkaTopicUtils.generateRandomNameOfTopic();
BridgeClients kafkaBridgeClientJobProduce = new BridgeClientsBuilder(kafkaBridgeClientJob).withTopicName(topicName).build();
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(httpBridgeTlsClusterName, topicName).editMetadata().withNamespace(namespace).endMetadata().build());
resourceManager.createResource(extensionContext, kafkaBridgeClientJobProduce.producerStrimziBridge());
ClientUtils.waitForClientSuccess(producerName, namespace, MESSAGE_COUNT);
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withTopicName(topicName).withNamespaceName(namespace).withClusterName(httpBridgeTlsClusterName).withMessageCount(MESSAGE_COUNT).withSecurityProtocol(SecurityProtocol.SSL).withKafkaUsername(sharedKafkaUserName).withUsingPodName(kafkaClientsPodName).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
assertThat(internalKafkaClient.receiveMessagesTls(), is(MESSAGE_COUNT));
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class MetricsIsolatedST method testKafkaConnectResponse.
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testKafkaConnectResponse() {
kafkaConnectMetricsData = collector.toBuilder().withComponentType(ComponentType.KafkaConnect).build().collectMetricsFromPods();
Pattern connectResponse = Pattern.compile("kafka_connect_node_response_total\\{clientid=\".*\",.*} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectResponse, kafkaConnectMetricsData);
assertThat("KafkaConnect response count doesn't match expected value", values.stream().mapToDouble(i -> i).sum() > 0);
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class MetricsIsolatedST method testKafkaExporterDifferentSetting.
@ParallelTest
void testKafkaExporterDifferentSetting() throws InterruptedException, ExecutionException, IOException {
LabelSelector exporterSelector = kubeClient().getDeploymentSelectors(INFRA_NAMESPACE, KafkaExporterResources.deploymentName(metricsClusterName));
String runScriptContent = getExporterRunScript(kubeClient().listPods(INFRA_NAMESPACE, exporterSelector).get(0).getMetadata().getName());
assertThat("Exporter starting script has wrong setting than it's specified in CR", runScriptContent.contains("--group.filter=\".*\""));
assertThat("Exporter starting script has wrong setting than it's specified in CR", runScriptContent.contains("--topic.filter=\".*\""));
Map<String, String> kafkaExporterSnapshot = DeploymentUtils.depSnapshot(INFRA_NAMESPACE, KafkaExporterResources.deploymentName(metricsClusterName));
KafkaResource.replaceKafkaResourceInSpecificNamespace(metricsClusterName, k -> {
k.getSpec().getKafkaExporter().setGroupRegex("my-group.*");
k.getSpec().getKafkaExporter().setTopicRegex(topicName);
}, INFRA_NAMESPACE);
DeploymentUtils.waitTillDepHasRolled(INFRA_NAMESPACE, KafkaExporterResources.deploymentName(metricsClusterName), 1, kafkaExporterSnapshot);
runScriptContent = getExporterRunScript(kubeClient().listPods(INFRA_NAMESPACE, exporterSelector).get(0).getMetadata().getName());
assertThat("Exporter starting script has wrong setting than it's specified in CR", runScriptContent.contains("--group.filter=\"my-group.*\""));
assertThat("Exporter starting script has wrong setting than it's specified in CR", runScriptContent.contains("--topic.filter=\"" + topicName + "\""));
}
use of io.strimzi.systemtest.annotations.ParallelTest in project strimzi by strimzi.
the class MetricsIsolatedST method testMirrorMaker2Metrics.
@ParallelTest
@Tag(MIRROR_MAKER2)
@Tag(CONNECT_COMPONENTS)
@Tag(ACCEPTANCE)
void testMirrorMaker2Metrics() {
kafkaMirrorMaker2MetricsData = collector.toBuilder().withComponentName(MIRROR_MAKER_CLUSTER).withComponentType(ComponentType.KafkaMirrorMaker2).build().collectMetricsFromPods();
Pattern connectResponse = Pattern.compile("kafka_connect_worker_connector_count ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectResponse, kafkaMirrorMaker2MetricsData);
assertThat(values.stream().mapToDouble(i -> i).sum(), is((double) 3));
connectResponse = Pattern.compile("kafka_connect_worker_task_count ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
values = MetricsCollector.collectSpecificMetric(connectResponse, kafkaMirrorMaker2MetricsData);
assertThat(values.stream().mapToDouble(i -> i).sum(), is((double) 1));
}
Aggregations