use of io.strimzi.api.kafka.model.PasswordSecretSourceBuilder in project strimzi by strimzi.
the class ConnectIsolatedST method testKafkaConnectWithPlainAndScramShaAuthentication.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
void testKafkaConnectWithPlainAndScramShaAuthentication(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
// Use a Kafka with plain listener disabled
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).build();
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
final String kafkaConnectLogs = kubeClient(namespaceName).logs(kafkaConnectPodName);
final String kafkaClientsPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
LOGGER.info("Verifying that KafkaConnect pod logs don't contain ERRORs");
assertThat(kafkaConnectLogs, not(containsString("ERROR")));
LOGGER.info("Creating FileStreamSink connector via pod {} with topic {}", kafkaClientsPodName, topicName);
KafkaConnectorUtils.createFileSinkConnector(namespaceName, kafkaClientsPodName, topicName, Constants.DEFAULT_SINK_FILE_PATH, KafkaConnectResources.url(clusterName, namespaceName, 8083));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, false, kafkaClientsName + "-second", kafkaUser).build());
final String kafkaClientsSecondPodName = kubeClient(namespaceName).listPodsByPrefixInName(kafkaClientsName + "-second").get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder().withUsingPodName(kafkaClientsSecondPodName).withTopicName(topicName).withNamespaceName(namespaceName).withClusterName(clusterName).withKafkaUsername(userName).withMessageCount(MESSAGE_COUNT).withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME).build();
internalKafkaClient.checkProducedAndConsumedMessages(internalKafkaClient.sendMessagesPlain(), internalKafkaClient.receiveMessagesPlain());
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, Constants.DEFAULT_SINK_FILE_PATH, "99");
}
use of io.strimzi.api.kafka.model.PasswordSecretSourceBuilder in project strimzi by strimzi.
the class ConnectIsolatedST method testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged.
@ParallelNamespaceTest
@Tag(INTERNAL_CLIENTS_USED)
// changing the password in secret should cause the RU of connect pod
void testKafkaConnectWithScramShaAuthenticationRolledAfterPasswordChanged(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String userName = mapWithTestUsers.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.PLAIN_LISTENER_DEFAULT_NAME).withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
Secret passwordSecret = new SecretBuilder().withNewMetadata().withName("custom-pwd-secret").endMetadata().addToData("pwd", "MTIzNDU2Nzg5").build();
kubeClient(namespaceName).createSecret(passwordSecret);
KafkaUser kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
resourceManager.createResource(extensionContext, kafkaUser);
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, userName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).withNewSpec().withBootstrapServers(KafkaResources.plainBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(userName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(userName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withVersion(Environment.ST_KAFKA_VERSION).withReplicas(1).endSpec().build());
final String kafkaConnectPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodName);
Map<String, String> connectSnapshot = DeploymentUtils.depSnapshot(namespaceName, KafkaConnectResources.deploymentName(clusterName));
String newPassword = "bmVjb0ppbmVob05lelNwcmF2bnlQYXNzd29yZA==";
Secret newPasswordSecret = new SecretBuilder().withNewMetadata().withName("new-custom-pwd-secret").endMetadata().addToData("pwd", newPassword).build();
kubeClient(namespaceName).createSecret(newPasswordSecret);
kafkaUser = KafkaUserTemplates.scramShaUser(clusterName, userName).editSpec().withNewKafkaUserScramSha512ClientAuthentication().withNewPassword().withNewValueFrom().withNewSecretKeyRef("pwd", "new-custom-pwd-secret", false).endValueFrom().endPassword().endKafkaUserScramSha512ClientAuthentication().endSpec().build();
resourceManager.createResource(extensionContext, kafkaUser);
DeploymentUtils.waitTillDepHasRolled(namespaceName, KafkaConnectResources.deploymentName(clusterName), 1, connectSnapshot);
final String kafkaConnectPodNameAfterRU = kubeClient(namespaceName).listPodsByPrefixInName(KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
KafkaConnectUtils.waitUntilKafkaConnectRestApiIsAvailable(namespaceName, kafkaConnectPodNameAfterRU);
}
use of io.strimzi.api.kafka.model.PasswordSecretSourceBuilder in project strimzi-kafka-operator by strimzi.
the class ConnectIsolatedST method testConnectScramShaAuthWithWeirdUserName.
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(CONNECTOR_OPERATOR)
@ParallelNamespaceTest
void testConnectScramShaAuthWithWeirdUserName(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
// Create weird named user with . and more than 64 chars -> SCRAM-SHA
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasdsadasdasdasdasdgasgadfasdad";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, weirdUserName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(weirdUserName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(weirdUserName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName)).build()).endTls().endSpec().build());
testConnectAuthorizationWithWeirdUserName(extensionContext, clusterName, weirdUserName, SecurityProtocol.SASL_SSL, topicName);
}
use of io.strimzi.api.kafka.model.PasswordSecretSourceBuilder in project strimzi by strimzi.
the class ConnectIsolatedST method testConnectScramShaAuthWithWeirdUserName.
@Tag(NODEPORT_SUPPORTED)
@Tag(EXTERNAL_CLIENTS_USED)
@Tag(CONNECTOR_OPERATOR)
@ParallelNamespaceTest
void testConnectScramShaAuthWithWeirdUserName(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
// Create weird named user with . and more than 64 chars -> SCRAM-SHA
final String weirdUserName = "jjglmahyijoambryleyxjjglmahy.ijoambryleyxjjglmahyijoambryleyxasdsadasdasdasdasdgasgadfasdad";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).editSpec().editKafka().withListeners(new GenericKafkaListenerBuilder().withName(Constants.TLS_LISTENER_DEFAULT_NAME).withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build(), new GenericKafkaListenerBuilder().withName(Constants.EXTERNAL_LISTENER_DEFAULT_NAME).withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withAuth(new KafkaListenerAuthenticationScramSha512()).build()).endKafka().endSpec().build());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(clusterName, topicName).build());
resourceManager.createResource(extensionContext, KafkaUserTemplates.scramShaUser(clusterName, weirdUserName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1).editMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().withBootstrapServers(KafkaResources.tlsBootstrapAddress(clusterName)).withNewKafkaClientAuthenticationScramSha512().withUsername(weirdUserName).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(weirdUserName).withPassword("password").build()).endKafkaClientAuthenticationScramSha512().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").withNewTls().withTrustedCertificates(new CertSecretSourceBuilder().withCertificate("ca.crt").withSecretName(KafkaResources.clusterCaCertificateSecretName(clusterName)).build()).endTls().endSpec().build());
testConnectAuthorizationWithWeirdUserName(extensionContext, clusterName, weirdUserName, SecurityProtocol.SASL_SSL, topicName);
}
use of io.strimzi.api.kafka.model.PasswordSecretSourceBuilder in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class DebeziumOperandController method doReify.
@Override
protected List<HasMetadata> doReify(ManagedConnector connector, DebeziumShardMetadata shardMetadata, ConnectorConfiguration<ObjectNode> connectorConfiguration, ServiceAccountSpec serviceAccountSpec) {
final Map<String, String> secretsData = createSecretsData(connectorConfiguration.getConnectorSpec());
final Secret secret = new SecretBuilder().withMetadata(new ObjectMetaBuilder().withName(connector.getMetadata().getName() + Resources.CONNECTOR_SECRET_SUFFIX).build()).addToData(EXTERNAL_CONFIG_FILE, asBytesBase64(secretsData)).addToData(KAFKA_CLIENT_SECRET_KEY, serviceAccountSpec.getClientSecret()).build();
final KafkaConnectSpecBuilder kcsb = new KafkaConnectSpecBuilder().withReplicas(1).withBootstrapServers(connector.getSpec().getDeployment().getKafka().getUrl()).withKafkaClientAuthenticationPlain(new KafkaClientAuthenticationPlainBuilder().withUsername(serviceAccountSpec.getClientId()).withPasswordSecret(new PasswordSecretSourceBuilder().withSecretName(secret.getMetadata().getName()).withPassword(KAFKA_CLIENT_SECRET_KEY).build()).build()).addToConfig(DebeziumConstants.DEFAULT_CONFIG_OPTIONS).addToConfig(new TreeMap<>(configuration.kafkaConnect().config())).addToConfig("group.id", connector.getMetadata().getName()).addToConfig("key.converter", configuration.keyConverter()).addToConfig("value.converter", configuration.valueConverter()).addToConfig("offset.storage.topic", connector.getMetadata().getName() + "-offset").addToConfig("config.storage.topic", connector.getMetadata().getName() + "-config").addToConfig("status.storage.topic", connector.getMetadata().getName() + "-status").addToConfig("connector.secret.name", secret.getMetadata().getName()).addToConfig("connector.secret.checksum", Secrets.computeChecksum(secret)).withTls(new ClientTlsBuilder().withTrustedCertificates(Collections.emptyList()).build()).withExternalConfiguration(new ExternalConfigurationBuilder().addToVolumes(new ExternalConfigurationVolumeSourceBuilder().withName(EXTERNAL_CONFIG_DIRECTORY).withSecret(new SecretVolumeSourceBuilder().withSecretName(secret.getMetadata().getName()).build()).build()).build());
kcsb.withImage(shardMetadata.getContainerImage());
final KafkaConnect kc = new KafkaConnectBuilder().withApiVersion(Constants.RESOURCE_GROUP_NAME + "/" + KafkaConnect.CONSUMED_VERSION).withMetadata(new ObjectMetaBuilder().withName(connector.getMetadata().getName()).addToAnnotations(STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").build()).withSpec(kcsb.build()).build();
final KafkaConnector kctr = new KafkaConnectorBuilder().withApiVersion(Constants.RESOURCE_GROUP_NAME + "/" + KafkaConnector.CONSUMED_VERSION).withMetadata(new ObjectMetaBuilder().withName(connector.getMetadata().getName()).addToLabels(STRIMZI_DOMAIN + "cluster", connector.getMetadata().getName()).build()).withSpec(new KafkaConnectorSpecBuilder().withClassName(shardMetadata.getConnectorClass()).withTasksMax(1).withPause(false).withConfig(createConfig(configuration, connectorConfiguration.getConnectorSpec())).build()).build();
return List.of(secret, kc, kctr);
}
Aggregations