use of io.strimzi.api.kafka.model.CertAndKeySecretSource in project strimzi by strimzi.
the class KafkaBrokerConfigurationBuilder method withListeners.
/**
* Configures the listeners based on the listeners enabled by the users in the Kafka CR.
*
* @param clusterName Name of the cluster (important for the advertised hostnames)
* @param namespace Namespace (important for generating the advertised hostname)
* @param kafkaListeners The listeners configuration from the Kafka CR
* @param controlPlaneListenerActive Activates the control plane listener (the listener is always configured,
* but this flag tells Kafka to use it for control plane communication)
*
* @return Returns the builder instance
*/
public KafkaBrokerConfigurationBuilder withListeners(String clusterName, String namespace, List<GenericKafkaListener> kafkaListeners, boolean controlPlaneListenerActive) {
List<String> listeners = new ArrayList<>();
List<String> advertisedListeners = new ArrayList<>();
List<String> securityProtocol = new ArrayList<>();
// Control Plane listener
listeners.add("CONTROLPLANE-9090://0.0.0.0:9090");
advertisedListeners.add(String.format("CONTROLPLANE-9090://%s:9090", // Pod name constructed to be templatable for each individual ordinal
DnsNameGenerator.podDnsNameWithoutClusterDomain(namespace, KafkaResources.brokersServiceName(clusterName), KafkaResources.kafkaStatefulSetName(clusterName) + "-${STRIMZI_BROKER_ID}")));
securityProtocol.add("CONTROLPLANE-9090:SSL");
configureControlPlaneListener();
// Replication listener
listeners.add("REPLICATION-9091://0.0.0.0:9091");
advertisedListeners.add(String.format("REPLICATION-9091://%s:9091", // Pod name constructed to be templatable for each individual ordinal
DnsNameGenerator.podDnsNameWithoutClusterDomain(namespace, KafkaResources.brokersServiceName(clusterName), KafkaResources.kafkaStatefulSetName(clusterName) + "-${STRIMZI_BROKER_ID}")));
securityProtocol.add("REPLICATION-9091:SSL");
configureReplicationListener();
for (GenericKafkaListener listener : kafkaListeners) {
int port = listener.getPort();
String listenerName = ListenersUtils.identifier(listener).toUpperCase(Locale.ENGLISH);
String envVarListenerName = ListenersUtils.envVarIdentifier(listener);
printSectionHeader("Listener configuration: " + listenerName);
listeners.add(listenerName + "://0.0.0.0:" + port);
advertisedListeners.add(String.format("%s://${STRIMZI_%s_ADVERTISED_HOSTNAME}:${STRIMZI_%s_ADVERTISED_PORT}", listenerName, envVarListenerName, envVarListenerName));
configureAuthentication(listenerName, securityProtocol, listener.isTls(), listener.getAuth());
configureListener(listenerName, listener.getConfiguration());
if (listener.isTls()) {
CertAndKeySecretSource customServerCert = null;
if (listener.getConfiguration() != null) {
customServerCert = listener.getConfiguration().getBrokerCertChainAndKey();
}
configureTls(listenerName, customServerCert);
}
writer.println();
}
configureOAuthPrincipalBuilderIfNeeded(writer, kafkaListeners);
printSectionHeader("Common listener configuration");
writer.println("listeners=" + String.join(",", listeners));
writer.println("advertised.listeners=" + String.join(",", advertisedListeners));
writer.println("listener.security.protocol.map=" + String.join(",", securityProtocol));
if (controlPlaneListenerActive) {
writer.println("control.plane.listener.name=CONTROLPLANE-9090");
}
writer.println("inter.broker.listener.name=REPLICATION-9091");
writer.println("sasl.enabled.mechanisms=");
writer.println("ssl.secure.random.implementation=SHA1PRNG");
writer.println("ssl.endpoint.identification.algorithm=HTTPS");
writer.println();
return this;
}
use of io.strimzi.api.kafka.model.CertAndKeySecretSource in project strimzi-kafka-operator by strimzi.
the class KafkaBrokerConfigurationBuilder method withListeners.
/**
* Configures the listeners based on the listeners enabled by the users in the Kafka CR.
*
* @param clusterName Name of the cluster (important for the advertised hostnames)
* @param namespace Namespace (important for generating the advertised hostname)
* @param kafkaListeners The listeners configuration from the Kafka CR
* @param controlPlaneListenerActive Activates the control plane listener (the listener is always configured,
* but this flag tells Kafka to use it for control plane communication)
*
* @return Returns the builder instance
*/
public KafkaBrokerConfigurationBuilder withListeners(String clusterName, String namespace, List<GenericKafkaListener> kafkaListeners, boolean controlPlaneListenerActive) {
List<String> listeners = new ArrayList<>();
List<String> advertisedListeners = new ArrayList<>();
List<String> securityProtocol = new ArrayList<>();
// Control Plane listener
listeners.add("CONTROLPLANE-9090://0.0.0.0:9090");
advertisedListeners.add(String.format("CONTROLPLANE-9090://%s:9090", // Pod name constructed to be templatable for each individual ordinal
DnsNameGenerator.podDnsNameWithoutClusterDomain(namespace, KafkaResources.brokersServiceName(clusterName), KafkaResources.kafkaStatefulSetName(clusterName) + "-${STRIMZI_BROKER_ID}")));
securityProtocol.add("CONTROLPLANE-9090:SSL");
configureControlPlaneListener();
// Replication listener
listeners.add("REPLICATION-9091://0.0.0.0:9091");
advertisedListeners.add(String.format("REPLICATION-9091://%s:9091", // Pod name constructed to be templatable for each individual ordinal
DnsNameGenerator.podDnsNameWithoutClusterDomain(namespace, KafkaResources.brokersServiceName(clusterName), KafkaResources.kafkaStatefulSetName(clusterName) + "-${STRIMZI_BROKER_ID}")));
securityProtocol.add("REPLICATION-9091:SSL");
configureReplicationListener();
for (GenericKafkaListener listener : kafkaListeners) {
int port = listener.getPort();
String listenerName = ListenersUtils.identifier(listener).toUpperCase(Locale.ENGLISH);
String envVarListenerName = ListenersUtils.envVarIdentifier(listener);
printSectionHeader("Listener configuration: " + listenerName);
listeners.add(listenerName + "://0.0.0.0:" + port);
advertisedListeners.add(String.format("%s://${STRIMZI_%s_ADVERTISED_HOSTNAME}:${STRIMZI_%s_ADVERTISED_PORT}", listenerName, envVarListenerName, envVarListenerName));
configureAuthentication(listenerName, securityProtocol, listener.isTls(), listener.getAuth());
configureListener(listenerName, listener.getConfiguration());
if (listener.isTls()) {
CertAndKeySecretSource customServerCert = null;
if (listener.getConfiguration() != null) {
customServerCert = listener.getConfiguration().getBrokerCertChainAndKey();
}
configureTls(listenerName, customServerCert);
}
writer.println();
}
configureOAuthPrincipalBuilderIfNeeded(writer, kafkaListeners);
printSectionHeader("Common listener configuration");
writer.println("listeners=" + String.join(",", listeners));
writer.println("advertised.listeners=" + String.join(",", advertisedListeners));
writer.println("listener.security.protocol.map=" + String.join(",", securityProtocol));
if (controlPlaneListenerActive) {
writer.println("control.plane.listener.name=CONTROLPLANE-9090");
}
writer.println("inter.broker.listener.name=REPLICATION-9091");
writer.println("sasl.enabled.mechanisms=");
writer.println("ssl.secure.random.implementation=SHA1PRNG");
writer.println("ssl.endpoint.identification.algorithm=HTTPS");
writer.println();
return this;
}
use of io.strimzi.api.kafka.model.CertAndKeySecretSource in project strimzi by strimzi.
the class KafkaCluster method getNonDataVolumes.
/**
* Generates list of non-data volumes used by Kafka Pods. This includes tmp volumes, mounted secrets and config
* maps.
*
* @param isOpenShift Indicates whether we are on OpenShift or not
*
* @return List of nondata volumes used by the ZooKeeper pods
*/
private List<Volume> getNonDataVolumes(boolean isOpenShift) {
List<Volume> volumeList = new ArrayList<>();
if (rack != null || isExposedWithNodePort()) {
volumeList.add(VolumeUtils.createEmptyDirVolume(INIT_VOLUME_NAME, "1Mi", "Memory"));
}
volumeList.add(createTempDirVolume());
volumeList.add(VolumeUtils.createSecretVolume(CLUSTER_CA_CERTS_VOLUME, AbstractModel.clusterCaCertSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createSecretVolume(BROKER_CERTS_VOLUME, KafkaCluster.brokersSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createSecretVolume(CLIENT_CA_CERTS_VOLUME, KafkaCluster.clientsCaCertSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createConfigMapVolume(logAndMetricsConfigVolumeName, ancillaryConfigMapName));
volumeList.add(VolumeUtils.createEmptyDirVolume("ready-files", "1Ki", "Memory"));
for (GenericKafkaListener listener : listeners) {
if (listener.isTls() && listener.getConfiguration() != null && listener.getConfiguration().getBrokerCertChainAndKey() != null) {
CertAndKeySecretSource secretSource = listener.getConfiguration().getBrokerCertChainAndKey();
Map<String, String> items = new HashMap<>(2);
items.put(secretSource.getKey(), "tls.key");
items.put(secretSource.getCertificate(), "tls.crt");
volumeList.add(VolumeUtils.createSecretVolume("custom-" + ListenersUtils.identifier(listener) + "-certs", secretSource.getSecretName(), items, isOpenShift));
}
if (isListenerWithOAuth(listener)) {
KafkaListenerAuthenticationOAuth oauth = (KafkaListenerAuthenticationOAuth) listener.getAuth();
volumeList.addAll(AuthenticationUtils.configureOauthCertificateVolumes("oauth-" + ListenersUtils.identifier(listener), oauth.getTlsTrustedCertificates(), isOpenShift));
}
if (isListenerWithCustomAuth(listener)) {
KafkaListenerAuthenticationCustom custom = (KafkaListenerAuthenticationCustom) listener.getAuth();
volumeList.addAll(AuthenticationUtils.configureGenericSecretVolumes("custom-listener-" + ListenersUtils.identifier(listener), custom.getSecrets(), isOpenShift));
}
}
if (authorization instanceof KafkaAuthorizationKeycloak) {
KafkaAuthorizationKeycloak keycloakAuthz = (KafkaAuthorizationKeycloak) authorization;
volumeList.addAll(AuthenticationUtils.configureOauthCertificateVolumes("authz-keycloak", keycloakAuthz.getTlsTrustedCertificates(), isOpenShift));
}
return volumeList;
}
use of io.strimzi.api.kafka.model.CertAndKeySecretSource in project strimzi-kafka-operator by strimzi.
the class KafkaCluster method getNonDataVolumes.
/**
* Generates list of non-data volumes used by Kafka Pods. This includes tmp volumes, mounted secrets and config
* maps.
*
* @param isOpenShift Indicates whether we are on OpenShift or not
*
* @return List of nondata volumes used by the ZooKeeper pods
*/
private List<Volume> getNonDataVolumes(boolean isOpenShift) {
List<Volume> volumeList = new ArrayList<>();
if (rack != null || isExposedWithNodePort()) {
volumeList.add(VolumeUtils.createEmptyDirVolume(INIT_VOLUME_NAME, "1Mi", "Memory"));
}
volumeList.add(createTempDirVolume());
volumeList.add(VolumeUtils.createSecretVolume(CLUSTER_CA_CERTS_VOLUME, AbstractModel.clusterCaCertSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createSecretVolume(BROKER_CERTS_VOLUME, KafkaCluster.brokersSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createSecretVolume(CLIENT_CA_CERTS_VOLUME, KafkaCluster.clientsCaCertSecretName(cluster), isOpenShift));
volumeList.add(VolumeUtils.createConfigMapVolume(logAndMetricsConfigVolumeName, ancillaryConfigMapName));
volumeList.add(VolumeUtils.createEmptyDirVolume("ready-files", "1Ki", "Memory"));
for (GenericKafkaListener listener : listeners) {
if (listener.isTls() && listener.getConfiguration() != null && listener.getConfiguration().getBrokerCertChainAndKey() != null) {
CertAndKeySecretSource secretSource = listener.getConfiguration().getBrokerCertChainAndKey();
Map<String, String> items = new HashMap<>(2);
items.put(secretSource.getKey(), "tls.key");
items.put(secretSource.getCertificate(), "tls.crt");
volumeList.add(VolumeUtils.createSecretVolume("custom-" + ListenersUtils.identifier(listener) + "-certs", secretSource.getSecretName(), items, isOpenShift));
}
if (isListenerWithOAuth(listener)) {
KafkaListenerAuthenticationOAuth oauth = (KafkaListenerAuthenticationOAuth) listener.getAuth();
volumeList.addAll(AuthenticationUtils.configureOauthCertificateVolumes("oauth-" + ListenersUtils.identifier(listener), oauth.getTlsTrustedCertificates(), isOpenShift));
}
if (isListenerWithCustomAuth(listener)) {
KafkaListenerAuthenticationCustom custom = (KafkaListenerAuthenticationCustom) listener.getAuth();
volumeList.addAll(AuthenticationUtils.configureGenericSecretVolumes("custom-listener-" + ListenersUtils.identifier(listener), custom.getSecrets(), isOpenShift));
}
}
if (authorization instanceof KafkaAuthorizationKeycloak) {
KafkaAuthorizationKeycloak keycloakAuthz = (KafkaAuthorizationKeycloak) authorization;
volumeList.addAll(AuthenticationUtils.configureOauthCertificateVolumes("authz-keycloak", keycloakAuthz.getTlsTrustedCertificates(), isOpenShift));
}
return volumeList;
}
Aggregations