use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class HttpBridgeKafkaExternalListenersST method testScramShaAuthWithWeirdUsername.
@ParallelTest
void testScramShaAuthWithWeirdUsername(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
// Create weird named user with . and more than 64 chars -> SCRAM-SHA
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasd.asdasidioiqweioqiweooioqieioqieoqieooi";
// Initialize PasswordSecret to set this as PasswordSecret in Mirror Maker spec
final PasswordSecretSource passwordSecret = new PasswordSecretSource();
passwordSecret.setSecretName(weirdUserName);
passwordSecret.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecret = new CertSecretSource();
certSecret.setCertificate("ca.crt");
certSecret.setSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName));
KafkaBridgeSpec bridgeSpec = new KafkaBridgeSpecBuilder().withNewKafkaClientAuthenticationScramSha512().withUsername(weirdUserName).withPasswordSecret(passwordSecret).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecret).endTls().build();
testWeirdUsername(extensionContext, weirdUserName, new KafkaListenerAuthenticationScramSha512(), bridgeSpec, SecurityProtocol.SASL_SSL);
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class ConnectIsolatedST method testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, kafkaUser);
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().addNewTrustedCertificate().withSecretName(clusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(clusterName + "-kafka-bootstrap:9093").withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withNewPasswordSecret().withSecretName(userName).withPassword("password").endPasswordSecret().endKafkaClientAuthenticationScramSha512().endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
assertThat(kafkaConnectLogs, not(containsString("ERROR")));
LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName + "-second", kafkaUser).build());
final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class ConnectIsolatedST method testKafkaConnectWithPlainAndScramShaAuthentication.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testKafkaConnectWithPlainAndScramShaAuthentication(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
// Use a Kafka with plain listener disabled
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
assertThat(kafkaConnectLogs, not(containsString("ERROR")));
LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, false, kafkaClientsName + "-second", kafkaUser).build());
final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME).build();
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesPlain(), internalKafkaClient.receiveMessagesPlain());
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class ConnectIsolatedST method testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
// changing the password in secret should cause the RU of connect pod
void testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
Secret passwordSecret = new SecretBuilder().withNewMetadata().withName("custom-pwd-secret").endMetadata().addToData("pwd", "MTIzNDU2Nzg5").build();
kubeClient(namespaceName).createSecret(passwordSecret);
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
resourceManager.createResource(extensionContext, kafkaUser);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
Map<String, String> connectSnapshot = DeploymentUtils.depSnapshot(namespaceName, KafkaConnectResources.deploymentName(clusterName));
String newPassword = "bmVjb0ppbmVob05lelNwcmF2bnlQYXNzd29yZA==";
Secret newPasswordSecret = new SecretBuilder().withNewMetadata().withName("new-custom-pwd-secret").endMetadata().addToData("pwd", newPassword).build();
kubeClient(namespaceName).createSecret(newPasswordSecret);
kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "new-custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
resourceManager.createResource(extensionContext, kafkaUser);
DeploymentUtils.waitTillDepHasRolled(namespaceName, KafkaConnectResources.deploymentName(clusterName), 1, connectSnapshot);
final String kafkaConnectPodNameAfterRU = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodNameAfterRU);
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class MirrorMaker2IsolatedST method testMirrorMaker2TlsAndScramSha512Auth.
/**
* Test mirroring messages by MirrorMaker 2.0 over tls transport using scram-sha-512 auth
*/
@SuppressWarnings({ "checkstyle:MethodLength" })
@ParallelNamespaceTest
void testMirrorMaker2TlsAndScramSha512Auth(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String sourceTopicName = "availability-topic-source-" + mapWithTestTopics.get(extensionContext.getDisplayName());
String targetTopicName = "availability-topic-target-" + mapWithTestTopics.get(extensionContext.getDisplayName());
String topicSourceNameMirrored = kafkaClusterSourceName + "." + sourceTopicName;
String topicSourceName = MIRRORMAKER2_TOPIC_NAME + "-" + rng.nextInt(Integer.MAX_VALUE);
String topicTargetName = kafkaClusterSourceName + "." + topicSourceName;
String kafkaUserSource = clusterName + "-my-user-source";
String kafkaUserTarget = clusterName + "-my-user-target";
// Deploy source kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
// Deploy target kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName, 3).build());
// Create Kafka user for source cluster
KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSource).build();
resourceManager.createResource(extensionContext, userSource);
// Create Kafka user for target cluster
KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTarget).build();
resourceManager.createResource(extensionContext, userTarget);
// Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
passwordSecretSource.setSecretName(kafkaUserSource);
passwordSecretSource.setPassword("password");
// Initialize PasswordSecretSource to set this as PasswordSecret in MirrorMaker2 spec
PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
passwordSecretTarget.setSecretName(kafkaUserTarget);
passwordSecretTarget.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for source
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for target
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
// Deploy client
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName, userSource, userTarget).build());
final String kafkaClientsPodName = kubeClient().listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(sourceTopicName).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", sourceTopicName, kafkaClusterSourceName, messagesCount);
// Check brokers availability
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", targetTopicName, kafkaClusterTargetName, userTarget.getMetadata().getName());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(targetTopicName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", targetTopicName, kafkaClusterTargetName, messagesCount);
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
// Deploy Mirror Maker with TLS and ScramSha512
KafkaMirrorMaker2ClusterSpec sourceClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSource).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
KafkaMirrorMaker2ClusterSpec targetClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTarget).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(targetClusterWithScramSha512Auth, sourceClusterWithScramSha512Auth).editFirstMirror().withTopicsExcludePattern("availability.*").endMirror().endSpec().build());
LOGGER.info("Setting topic to {}, cluster to {} and changing user to {}", topicSourceName, kafkaClusterSourceName, userSource.getMetadata().getName());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicSourceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).build();
LOGGER.info("Sending messages to - topic {}, cluster {} and message count of {}", topicSourceName, kafkaClusterSourceName, messagesCount);
int sent = internalKafkaClient.sendMessagesTls();
internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
LOGGER.info("Changing to target - topic {}, cluster {}, user {}", topicTargetName, kafkaClusterTargetName, userTarget.getMetadata().getName());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetName).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
LOGGER.info("Now messages should be mirrored to target topic and cluster");
internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
LOGGER.info("Messages successfully mirrored");
KafkaTopicUtils.waitForKafkaTopicCreation(namespaceName, topicTargetName);
KafkaTopic mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicTargetName).get();
assertThat(mirroredTopic.getSpec().getPartitions(), is(3));
assertThat(mirroredTopic.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL), is(kafkaClusterTargetName));
mirroredTopic = KafkaTopicResource.kafkaTopicClient().inNamespace(namespaceName).withName(topicSourceNameMirrored).get();
assertThat(mirroredTopic, nullValue());
}
Aggregations