use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class MirrorMaker2IsolatedST method testKMM2RollAfterSecretsCertsUpdateScramsha.
/**
* Test mirroring messages by MirrorMaker 2.0 over tls transport using scram-sha-512 auth
* while user Scram passwords, CA cluster and clients certificates are changed.
*/
@ParallelNamespaceTest
@SuppressWarnings({ "checkstyle:MethodLength" })
void testKMM2RollAfterSecretsCertsUpdateScramsha(ExtensionContext extensionContext) {
TestStorage testStorage = new TestStorage(extensionContext);
String kafkaClusterSourceName = testStorage.getClusterName() + "-source";
String kafkaClusterTargetName = testStorage.getClusterName() + "-target";
String topicSourceNameA = MIRRORMAKER2_TOPIC_NAME + "-a-" + rng.nextInt(Integer.MAX_VALUE);
String topicSourceNameB = MIRRORMAKER2_TOPIC_NAME + "-b-" + rng.nextInt(Integer.MAX_VALUE);
String topicTargetNameA = kafkaClusterSourceName + "." + topicSourceNameA;
String topicTargetNameB = kafkaClusterSourceName + "." + topicSourceNameB;
String kafkaUserSourceName = testStorage.getClusterName() + "-my-user-source";
String kafkaUserTargetName = testStorage.getClusterName() + "-my-user-target";
String kafkaTlsScramListenerName = "tlsscram";
// Deploy source kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(kafkaTlsScramListenerName).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
// Deploy target kafka with tls listeners with tls and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaPersistent(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(kafkaTlsScramListenerName).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationTls()).build()).endKafka().endSpec().build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceNameA).build(), KafkaTopicTemplates.topic(kafkaClusterTargetName, topicSourceNameB).build());
// Create Kafka user for source and target cluster
KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSourceName).build();
KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTargetName).build();
resourceManager.createResource(extensionContext, userSource, userTarget);
// Initialize PasswordSecretSource to set this as PasswordSecret in Source/Target MirrorMaker2 spec
PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
passwordSecretSource.setSecretName(kafkaUserSourceName);
passwordSecretSource.setPassword("password");
PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
passwordSecretTarget.setSecretName(kafkaUserTargetName);
passwordSecretTarget.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for source
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for target
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
// Deploy client
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(testStorage.getNamespaceName(), true, testStorage.getKafkaClientsName(), userSource, userTarget).build());
String kafkaClientsPodName = kubeClient().listPodsByPrefixInName(testStorage.getKafkaClientsName()).get(0).getMetadata().getName();
KafkaMirrorMaker2ClusterSpec sourceClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterSourceName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterSourceName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSourceName).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().build();
KafkaMirrorMaker2ClusterSpec targetClusterWithScramSha512Auth = new KafkaMirrorMaker2ClusterSpecBuilder().withAlias(kafkaClusterTargetName).withBootstrapServers(KafkaResources.tlsBootstrapAddress(kafkaClusterTargetName)).withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTargetName).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().addToConfig("config.storage.replication.factor", -1).addToConfig("offset.storage.replication.factor", -1).addToConfig("status.storage.replication.factor", -1).build();
resourceManager.createResource(extensionContext, KafkaMirrorMaker2Templates.kafkaMirrorMaker2(testStorage.getClusterName(), kafkaClusterTargetName, kafkaClusterSourceName, 1, true).editSpec().withClusters(targetClusterWithScramSha512Auth, sourceClusterWithScramSha512Auth).editFirstMirror().editSourceConnector().addToConfig("refresh.topics.interval.seconds", 1).endSourceConnector().endMirror().endSpec().build());
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicSourceNameA).withNamespaceName(testStorage.getNamespaceName()).withClusterName(kafkaClusterSourceName).withKafkaUsername(kafkaUserSourceName).withMessageCount(messagesCount).withListenerName(kafkaTlsScramListenerName).build();
int sent = internalKafkaClient.sendMessagesTls();
internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetNameA).withClusterName(kafkaClusterTargetName).withKafkaUsername(kafkaUserTargetName).withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
LOGGER.info("Now messages should be mirrored to target topic and cluster");
internalKafkaClient.checkProducedAndConsumedMessages(sent, internalKafkaClient.receiveMessagesTls());
LOGGER.info("Messages successfully mirrored");
String kmm2DeploymentName = KafkaMirrorMaker2Resources.deploymentName(testStorage.getClusterName());
Map<String, String> mmSnapshot = DeploymentUtils.depSnapshot(testStorage.getNamespaceName(), kmm2DeploymentName);
LOGGER.info("Changing KafkaUser sha-password on KMM2 Source and make sure it rolled");
Secret passwordSource = new SecretBuilder().withNewMetadata().withName(kafkaUserSourceName).endMetadata().addToData("password", "c291cmNlLXBhc3N3b3Jk").build();
kubeClient().patchSecret(testStorage.getNamespaceName(), kafkaUserSourceName, passwordSource);
mmSnapshot = DeploymentUtils.waitTillDepHasRolled(testStorage.getNamespaceName(), kmm2DeploymentName, 1, mmSnapshot);
LOGGER.info("Changing KafkaUser sha-password on KMM2 Target");
Secret passwordTarget = new SecretBuilder().withNewMetadata().withName(kafkaUserTargetName).endMetadata().addToData("password", "dGFyZ2V0LXBhc3N3b3Jk").build();
kubeClient().patchSecret(testStorage.getNamespaceName(), kafkaUserTargetName, passwordTarget);
DeploymentUtils.waitTillDepHasRolled(testStorage.getNamespaceName(), kmm2DeploymentName, 1, mmSnapshot);
LOGGER.info("Recreate kafkaClients pod with new passwords.");
resourceManager.deleteResource(kubeClient().namespace(testStorage.getNamespaceName()).getDeployment(testStorage.getKafkaClientsName()));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(testStorage.getNamespaceName(), true, testStorage.getKafkaClientsName(), userSource, userTarget).build());
kafkaClientsPodName = kubeClient().listPodsByPrefixInName(testStorage.getKafkaClientsName()).get(0).getMetadata().getName();
internalKafkaClient = internalKafkaClient.toBuilder().withUsingPodName(kafkaClientsPodName).withTopicName(topicSourceNameB).withClusterName(kafkaClusterSourceName).withKafkaUsername(kafkaUserSourceName).withListenerName(kafkaTlsScramListenerName).build();
sent = internalKafkaClient.sendMessagesTls();
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTargetNameB).withClusterName(kafkaClusterTargetName).withKafkaUsername(kafkaUserTargetName).withConsumerGroupName(ClientUtils.generateRandomConsumerGroup()).build();
LOGGER.info("Now messages should be mirrored to target topic and cluster");
internalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(sent);
LOGGER.info("Messages successfully mirrored");
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi by strimzi.
the class MirrorMakerIsolatedST method testMirrorMakerTlsScramSha.
/**
* Test mirroring messages by Mirror Maker over tls transport using scram-sha auth
*/
@ParallelNamespaceTest
@SuppressWarnings("checkstyle:methodlength")
void testMirrorMakerTlsScramSha(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String kafkaUserSource = clusterName + "-my-user-source";
String kafkaUserTarget = clusterName + "-my-user-target";
// Deploy source kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
// Deploy target kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicName).build());
// createAndWaitForReadiness Kafka user for source cluster
KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSource).build();
// createAndWaitForReadiness Kafka user for target cluster
KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTarget).build();
resourceManager.createResource(extensionContext, userSource);
resourceManager.createResource(extensionContext, userTarget);
// Initialize PasswordSecretSource to set this as PasswordSecret in Mirror Maker spec
PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
passwordSecretSource.setSecretName(kafkaUserSource);
passwordSecretSource.setPassword("password");
// Initialize PasswordSecretSource to set this as PasswordSecret in Mirror Maker spec
PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
passwordSecretTarget.setSecretName(kafkaUserTarget);
passwordSecretTarget.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for producer
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
// Deploy client
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, clusterName + "-" + Constants.KAFKA_CLIENTS, userSource, userTarget).build());
final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
String topicTestName1 = baseTopic + "-test-1";
String topicTestName2 = baseTopic + "-test-2";
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName2).build());
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsPodName).withTopicName(topicTestName1).withNamespaceName(namespaceName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).withMessageCount(messagesCount).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
// Check brokers availability
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicTestName2).withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
// Deploy Mirror Maker with TLS and ScramSha512
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, true).editSpec().editConsumer().withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserSource).withPasswordSecret(passwordSecretSource).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretSource).endTls().endConsumer().editProducer().withNewKafkaClientAuthenticationScramSha512().withUsername(kafkaUserTarget).withPasswordSecret(passwordSecretTarget).endKafkaClientAuthenticationScramSha512().withNewTls().withTrustedCertificates(certSecretTarget).endTls().endProducer().endSpec().build());
internalKafkaClient = internalKafkaClient.toBuilder().withTopicName(topicName).withClusterName(kafkaClusterSourceName).withKafkaUsername(userSource.getMetadata().getName()).build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
InternalKafkaClient newInternalKafkaClient = internalKafkaClient.toBuilder().withClusterName(kafkaClusterTargetName).withKafkaUsername(userTarget.getMetadata().getName()).build();
newInternalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(internalKafkaClient.getMessageCount());
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi-kafka-operator by strimzi.
the class KafkaBrokerConfigurationBuilder method configureAuthentication.
/**
* Configures authentication for a Kafka listener. This method is used only internally.
*
* @param listenerName Name of the listener as used in the Kafka broker configuration file.
* @param securityProtocol List of security protocols enabled int he broker. The method will add the security
* protocol configuration for this listener to this list (e.g. SASL_PLAINTEXT).
* @param tls Flag whether this protocol is using TLS or not
* @param auth The authentication confgiuration from the Kafka CR
*/
private void configureAuthentication(String listenerName, List<String> securityProtocol, boolean tls, KafkaListenerAuthentication auth) {
String listenerNameInProperty = listenerName.toLowerCase(Locale.ENGLISH);
String listenerNameInEnvVar = listenerName.replace("-", "_");
if (auth instanceof KafkaListenerAuthenticationOAuth) {
securityProtocol.add(String.format("%s:%s", listenerName, getSecurityProtocol(tls, true)));
KafkaListenerAuthenticationOAuth oauth = (KafkaListenerAuthenticationOAuth) auth;
List<String> options = new ArrayList<>();
options.addAll(getOAuthOptions(oauth));
if (oauth.getClientSecret() != null) {
options.add("oauth.client.secret=\"${STRIMZI_" + listenerNameInEnvVar + "_OAUTH_CLIENT_SECRET}\"");
}
if (oauth.getTlsTrustedCertificates() != null && oauth.getTlsTrustedCertificates().size() > 0) {
options.add(String.format("oauth.ssl.truststore.location=\"/tmp/kafka/oauth-%s.truststore.p12\"", listenerNameInProperty));
options.add("oauth.ssl.truststore.password=\"${CERTS_STORE_PASSWORD}\"");
options.add("oauth.ssl.truststore.type=\"PKCS12\"");
}
StringBuilder enabledMechanisms = new StringBuilder();
if (oauth.isEnableOauthBearer()) {
writer.println(String.format("listener.name.%s.oauthbearer.sasl.server.callback.handler.class=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler", listenerNameInProperty));
writer.println(String.format("listener.name.%s.oauthbearer.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required unsecuredLoginStringClaim_sub=\"thePrincipalName\" %s;", listenerNameInProperty, String.join(" ", options)));
enabledMechanisms.append("OAUTHBEARER");
}
if (oauth.isEnablePlain()) {
addOption(options, ServerPlainConfig.OAUTH_TOKEN_ENDPOINT_URI, oauth.getTokenEndpointUri());
writer.println(String.format("listener.name.%s.plain.sasl.server.callback.handler.class=io.strimzi.kafka.oauth.server.plain.JaasServerOauthOverPlainValidatorCallbackHandler", listenerNameInProperty));
writer.println(String.format("listener.name.%s.plain.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required %s;", listenerNameInProperty, String.join(" ", options)));
if (enabledMechanisms.length() > 0) {
enabledMechanisms.append(",");
}
enabledMechanisms.append("PLAIN");
}
writer.println(String.format("listener.name.%s.sasl.enabled.mechanisms=%s", listenerNameInProperty, enabledMechanisms));
if (oauth.getMaxSecondsWithoutReauthentication() != null) {
writer.println(String.format("listener.name.%s.connections.max.reauth.ms=%s", listenerNameInProperty, 1000 * oauth.getMaxSecondsWithoutReauthentication()));
}
writer.println();
} else if (auth instanceof KafkaListenerAuthenticationScramSha512) {
securityProtocol.add(String.format("%s:%s", listenerName, getSecurityProtocol(tls, true)));
writer.println(String.format("listener.name.%s.scram-sha-512.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required;", listenerNameInProperty));
writer.println(String.format("listener.name.%s.sasl.enabled.mechanisms=SCRAM-SHA-512", listenerNameInProperty));
writer.println();
} else if (auth instanceof KafkaListenerAuthenticationTls) {
securityProtocol.add(String.format("%s:%s", listenerName, getSecurityProtocol(tls, false)));
writer.println(String.format("listener.name.%s.ssl.client.auth=required", listenerNameInProperty));
writer.println(String.format("listener.name.%s.ssl.truststore.location=/tmp/kafka/clients.truststore.p12", listenerNameInProperty));
writer.println(String.format("listener.name.%s.ssl.truststore.password=${CERTS_STORE_PASSWORD}", listenerNameInProperty));
writer.println(String.format("listener.name.%s.ssl.truststore.type=PKCS12", listenerNameInProperty));
writer.println();
} else if (auth instanceof KafkaListenerAuthenticationCustom) {
KafkaListenerAuthenticationCustom customAuth = (KafkaListenerAuthenticationCustom) auth;
securityProtocol.add(String.format("%s:%s", listenerName, getSecurityProtocol(tls, customAuth.isSasl())));
KafkaListenerCustomAuthConfiguration config = new KafkaListenerCustomAuthConfiguration(reconciliation, customAuth.getListenerConfig().entrySet());
config.asOrderedProperties().asMap().forEach((key, value) -> writer.println(String.format("listener.name.%s.%s=%s", listenerNameInProperty, key, value)));
} else {
securityProtocol.add(String.format("%s:%s", listenerName, getSecurityProtocol(tls, false)));
}
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi-kafka-operator by strimzi.
the class ConnectIsolatedST method testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testSecretsWithKafkaConnectWithTlsAndScramShaAuthentication(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, kafkaUser);
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().addNewTrustedCertificate().withSecretName(clusterName + "-cluster-ca-cert").withCertificate("ca.crt").endTrustedCertificate().endTls().withBootstrapServers(clusterName + "-kafka-bootstrap:9093").withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withNewPasswordSecret().withSecretName(userName).withPassword("password").endPasswordSecret().endKafkaClientAuthenticationScramSha512().endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
assertThat(kafkaConnectLogs, not(containsString("ERROR")));
LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, kafkaClientsName + "-second", kafkaUser).build());
final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME).build();
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesTls(), internalKafkaClient.receiveMessagesTls());
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
use of io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512 in project strimzi-kafka-operator by strimzi.
the class ConnectIsolatedST method testConnectScramShaAuthWithWeirdUserName.
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(CONNECTOR_OPERATOR)
@ParallelNamespaceTest
void testConnectScramShaAuthWithWeirdUserName(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
// Create weird named user with . and more than 64 chars -> SCRAM-SHA
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasdsadasdasdasdasdgasgadfasdad";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, weirdUserName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(weirdUserName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(weirdUserName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName)).build()).endTls().endSpec().build());
testConnectAuthorizationWithWeirdUserName(extensionContext, clusterName, weirdUserName, SecurityProtocol.SASL_SSL, topicName);
}
Aggregations