use of io.strimzi.systemtest.Constants.CONNECT in project strimzi by strimzi.
the class MetricsIsolatedST method testKafkaConnectResponse.
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testKafkaConnectResponse() {
kafkaConnectMetricsData = collector.toBuilder().withComponentType(ComponentType.KafkaConnect).build().collectMetricsFromPods();
Pattern connectResponse = Pattern.compile("kafka_connect_node_response_total\\{clientid=\".*\",.*} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectResponse, kafkaConnectMetricsData);
assertThat("KafkaConnect response count doesn't match expected value", values.stream().mapToDouble(i -> i).sum() > 0);
}
use of io.strimzi.systemtest.Constants.CONNECT in project strimzi by strimzi.
the class MetricsIsolatedST method testKafkaConnectRequests.
@ParallelTest
@Tag(ACCEPTANCE)
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testKafkaConnectRequests() {
kafkaConnectMetricsData = collector.toBuilder().withComponentType(ComponentType.KafkaConnect).build().collectMetricsFromPods();
Pattern connectRequests = Pattern.compile("kafka_connect_node_request_total\\{clientid=\".*\",} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectRequests, kafkaConnectMetricsData);
assertThat("KafkaConnect requests count doesn't match expected value", values.stream().mapToDouble(i -> i).sum() > 0);
}
use of io.strimzi.systemtest.Constants.CONNECT in project strimzi-kafka-operator by strimzi.
the class ConnectBuilderIsolatedST method testBuildFailsWithWrongChecksumOfArtifact.
@ParallelTest
void testBuildFailsWithWrongChecksumOfArtifact(ExtensionContext extensionContext) {
String connectClusterName = mapWithClusterNames.get(extensionContext.getDisplayName()) + "-connect";
String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
final String imageName = getImageNameForTestCase();
Plugin pluginWithWrongChecksum = new PluginBuilder().withName("connector-with-wrong-checksum").withArtifacts(new JarArtifactBuilder().withUrl(ECHO_SINK_JAR_URL).withSha512sum(ECHO_SINK_JAR_WRONG_CHECKSUM).build()).build();
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
String kafkaClientsPodName = kubeClient(INFRA_NAMESPACE).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
resourceManager.createResource(extensionContext, false, KafkaConnectTemplates.kafkaConnect(extensionContext, connectClusterName, INFRA_NAMESPACE, INFRA_NAMESPACE, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().withNewBuild().withPlugins(pluginWithWrongChecksum).withNewDockerOutput().withImage(imageName).endDockerOutput().endBuild().endSpec().build());
KafkaConnectUtils.waitForConnectNotReady(connectClusterName);
KafkaConnectUtils.waitUntilKafkaConnectStatusConditionContainsMessage(connectClusterName, INFRA_NAMESPACE, "The Kafka Connect build failed(.*)?");
LOGGER.info("Checking if KafkaConnect status condition contains message about build failure");
KafkaConnect kafkaConnect = KafkaConnectResource.kafkaConnectClient().inNamespace(INFRA_NAMESPACE).withName(connectClusterName).get();
LOGGER.info("Deploying network policies for KafkaConnect");
NetworkPolicyResource.deployNetworkPolicyForResource(extensionContext, kafkaConnect, KafkaConnectResources.deploymentName(connectClusterName));
Condition connectCondition = kafkaConnect.getStatus().getConditions().stream().findFirst().orElseThrow();
assertTrue(connectCondition.getMessage().matches("The Kafka Connect build failed(.*)?"));
assertThat(connectCondition.getType(), is(NotReady.toString()));
LOGGER.info("Replacing plugin's checksum with right one");
KafkaConnectResource.replaceKafkaConnectResource(connectClusterName, kC -> {
Plugin pluginWithRightChecksum = new PluginBuilder().withName("connector-with-right-checksum").withArtifacts(new JarArtifactBuilder().withUrl(ECHO_SINK_JAR_URL).withSha512sum(ECHO_SINK_JAR_CHECKSUM).build()).build();
kC.getSpec().getBuild().getPlugins().remove(0);
kC.getSpec().getBuild().getPlugins().add(pluginWithRightChecksum);
});
KafkaConnectUtils.waitForConnectReady(connectClusterName);
LOGGER.info("Checking if KafkaConnect API contains EchoSink connector");
String plugins = cmdKubeClient().execInPod(kafkaClientsPodName, "curl", "-X", "GET", "http://" + KafkaConnectResources.serviceName(connectClusterName) + ":8083/connector-plugins").out();
assertTrue(plugins.contains(ECHO_SINK_CLASS_NAME));
LOGGER.info("Checking if KafkaConnect resource contains EchoSink connector in status");
kafkaConnect = KafkaConnectResource.kafkaConnectClient().inNamespace(INFRA_NAMESPACE).withName(connectClusterName).get();
assertTrue(kafkaConnect.getStatus().getConnectorPlugins().stream().anyMatch(connectorPlugin -> connectorPlugin.getConnectorClass().contains(ECHO_SINK_CLASS_NAME)));
}
use of io.strimzi.systemtest.Constants.CONNECT in project strimzi-kafka-operator by strimzi.
the class MetricsIsolatedST method testKafkaConnectResponse.
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testKafkaConnectResponse() {
kafkaConnectMetricsData = collector.toBuilder().withComponentType(ComponentType.KafkaConnect).build().collectMetricsFromPods();
Pattern connectResponse = Pattern.compile("kafka_connect_node_response_total\\{clientid=\".*\",.*} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectResponse, kafkaConnectMetricsData);
assertThat("KafkaConnect response count doesn't match expected value", values.stream().mapToDouble(i -> i).sum() > 0);
}
use of io.strimzi.systemtest.Constants.CONNECT in project strimzi-kafka-operator by strimzi.
the class MetricsIsolatedST method testKafkaConnectIoNetwork.
@ParallelTest
@Tag(CONNECT)
@Tag(CONNECT_COMPONENTS)
void testKafkaConnectIoNetwork() {
kafkaConnectMetricsData = collector.toBuilder().withComponentType(ComponentType.KafkaConnect).build().collectMetricsFromPods();
Pattern connectIoNetwork = Pattern.compile("kafka_connect_network_io_total\\{clientid=\".*\",} ([\\d.][^\\n]+)", Pattern.CASE_INSENSITIVE);
ArrayList<Double> values = MetricsCollector.collectSpecificMetric(connectIoNetwork, kafkaConnectMetricsData);
assertThat("KafkaConnect IO network count doesn't match expected value", values.stream().mapToDouble(i -> i).sum() > 0);
}
Aggregations