use of io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder in project strimzi by strimzi.
the class EntityUserOperatorTest method testFromCrdCaValidityAndRenewal.
@ParallelTest
public void testFromCrdCaValidityAndRenewal() {
EntityUserOperatorSpec entityUserOperatorSpec = new EntityUserOperatorSpecBuilder().build();
EntityOperatorSpec entityOperatorSpec = new EntityOperatorSpecBuilder().withUserOperator(entityUserOperatorSpec).build();
CertificateAuthority ca = new CertificateAuthority();
ca.setValidityDays(42);
ca.setRenewalDays(69);
Kafka customValues = new KafkaBuilder(ResourceUtils.createKafka(namespace, cluster, replicas, image, healthDelay, healthTimeout)).editSpec().withEntityOperator(entityOperatorSpec).withClientsCa(ca).endSpec().build();
EntityUserOperator entityUserOperator = EntityUserOperator.fromCrd(new Reconciliation("test", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName()), customValues);
Kafka defaultValues = new KafkaBuilder(ResourceUtils.createKafka(namespace, cluster, replicas, image, healthDelay, healthTimeout)).editSpec().withEntityOperator(entityOperatorSpec).endSpec().build();
EntityUserOperator entityUserOperator2 = EntityUserOperator.fromCrd(new Reconciliation("test", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName()), defaultValues);
assertThat(entityUserOperator.getClientsCaValidityDays(), is(42L));
assertThat(entityUserOperator.getClientsCaRenewalDays(), is(69L));
assertThat(entityUserOperator2.getClientsCaValidityDays(), is(Long.valueOf(CertificateAuthority.DEFAULT_CERTS_VALIDITY_DAYS)));
assertThat(entityUserOperator2.getClientsCaRenewalDays(), is(Long.valueOf(CertificateAuthority.DEFAULT_CERTS_RENEWAL_DAYS)));
}
use of io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder in project strimzi by strimzi.
the class EntityUserOperatorTest method testFromCrdDefault.
@ParallelTest
public void testFromCrdDefault() {
EntityUserOperatorSpec entityUserOperatorSpec = new EntityUserOperatorSpecBuilder().build();
EntityOperatorSpec entityOperatorSpec = new EntityOperatorSpecBuilder().withUserOperator(entityUserOperatorSpec).build();
Kafka resource = new KafkaBuilder(ResourceUtils.createKafka(namespace, cluster, replicas, image, healthDelay, healthTimeout)).editSpec().withEntityOperator(entityOperatorSpec).endSpec().build();
EntityUserOperator entityUserOperator = EntityUserOperator.fromCrd(new Reconciliation("test", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName()), resource);
assertThat(entityUserOperator.getWatchedNamespace(), is(namespace));
assertThat(entityUserOperator.getImage(), is("quay.io/strimzi/operator:latest"));
assertThat(entityUserOperator.getReconciliationIntervalMs(), is(EntityUserOperatorSpec.DEFAULT_FULL_RECONCILIATION_INTERVAL_SECONDS * 1000));
assertThat(entityUserOperator.readinessProbeOptions.getInitialDelaySeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_DELAY));
assertThat(entityUserOperator.readinessProbeOptions.getTimeoutSeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_TIMEOUT));
assertThat(entityUserOperator.livenessProbeOptions.getInitialDelaySeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_DELAY));
assertThat(entityUserOperator.livenessProbeOptions.getTimeoutSeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_TIMEOUT));
assertThat(entityUserOperator.getKafkaBootstrapServers(), is(EntityUserOperator.defaultBootstrapServers(cluster)));
assertThat(entityUserOperator.getLogging(), is(nullValue()));
assertThat(entityUserOperator.getSecretPrefix(), is(EntityUserOperatorSpec.DEFAULT_SECRET_PREFIX));
}
use of io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder in project strimzi-kafka-operator by strimzi.
the class EntityUserOperatorTest method testFromCrdDefault.
@ParallelTest
public void testFromCrdDefault() {
EntityUserOperatorSpec entityUserOperatorSpec = new EntityUserOperatorSpecBuilder().build();
EntityOperatorSpec entityOperatorSpec = new EntityOperatorSpecBuilder().withUserOperator(entityUserOperatorSpec).build();
Kafka resource = new KafkaBuilder(ResourceUtils.createKafka(namespace, cluster, replicas, image, healthDelay, healthTimeout)).editSpec().withEntityOperator(entityOperatorSpec).endSpec().build();
EntityUserOperator entityUserOperator = EntityUserOperator.fromCrd(new Reconciliation("test", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName()), resource);
assertThat(entityUserOperator.getWatchedNamespace(), is(namespace));
assertThat(entityUserOperator.getImage(), is("quay.io/strimzi/operator:latest"));
assertThat(entityUserOperator.getReconciliationIntervalMs(), is(EntityUserOperatorSpec.DEFAULT_FULL_RECONCILIATION_INTERVAL_SECONDS * 1000));
assertThat(entityUserOperator.readinessProbeOptions.getInitialDelaySeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_DELAY));
assertThat(entityUserOperator.readinessProbeOptions.getTimeoutSeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_TIMEOUT));
assertThat(entityUserOperator.livenessProbeOptions.getInitialDelaySeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_DELAY));
assertThat(entityUserOperator.livenessProbeOptions.getTimeoutSeconds(), is(EntityUserOperatorSpec.DEFAULT_HEALTHCHECK_TIMEOUT));
assertThat(entityUserOperator.getKafkaBootstrapServers(), is(EntityUserOperator.defaultBootstrapServers(cluster)));
assertThat(entityUserOperator.getLogging(), is(nullValue()));
assertThat(entityUserOperator.getSecretPrefix(), is(EntityUserOperatorSpec.DEFAULT_SECRET_PREFIX));
}
use of io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder in project strimzi-kafka-operator by strimzi.
the class KafkaAssemblyOperatorTest method data.
public static Iterable<Params> data() {
boolean[] metricsOpenShiftAndEntityOperatorOptions = { true, false };
SingleVolumeStorage[] storageConfig = { new EphemeralStorage(), new PersistentClaimStorageBuilder().withSize("123").withStorageClass("foo").withDeleteClaim(true).build() };
List<Map<String, Object>> configs = asList(null, emptyMap(), singletonMap("foo", "bar"));
List<Params> result = new ArrayList<>();
for (boolean metricsOpenShiftAndEntityOperator : metricsOpenShiftAndEntityOperatorOptions) {
for (Map<String, Object> config : configs) {
for (SingleVolumeStorage storage : storageConfig) {
EntityOperatorSpec eoConfig;
if (metricsOpenShiftAndEntityOperator) {
eoConfig = new EntityOperatorSpecBuilder().withUserOperator(new EntityUserOperatorSpecBuilder().build()).withTopicOperator(new EntityTopicOperatorSpecBuilder().build()).build();
} else {
eoConfig = null;
}
List<GenericKafkaListener> listeners = new ArrayList<>(3);
listeners.add(new GenericKafkaListenerBuilder().withName("plain").withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withNewKafkaListenerAuthenticationScramSha512Auth().endKafkaListenerAuthenticationScramSha512Auth().build());
listeners.add(new GenericKafkaListenerBuilder().withName("tls").withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
if (metricsOpenShiftAndEntityOperator) {
// On OpenShift, use Routes
listeners.add(new GenericKafkaListenerBuilder().withName("external").withPort(9094).withType(KafkaListenerType.ROUTE).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
} else {
// On Kube, use nodeports
listeners.add(new GenericKafkaListenerBuilder().withName("external").withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
}
result.add(new Params(metricsOpenShiftAndEntityOperator, metricsOpenShiftAndEntityOperator, listeners, config, config, storage, storage, eoConfig));
}
}
}
return result;
}
use of io.strimzi.api.kafka.model.EntityUserOperatorSpecBuilder in project strimzi by strimzi.
the class KafkaAssemblyOperatorTest method data.
public static Iterable<Params> data() {
boolean[] metricsOpenShiftAndEntityOperatorOptions = { true, false };
SingleVolumeStorage[] storageConfig = { new EphemeralStorage(), new PersistentClaimStorageBuilder().withSize("123").withStorageClass("foo").withDeleteClaim(true).build() };
List<Map<String, Object>> configs = asList(null, emptyMap(), singletonMap("foo", "bar"));
List<Params> result = new ArrayList<>();
for (boolean metricsOpenShiftAndEntityOperator : metricsOpenShiftAndEntityOperatorOptions) {
for (Map<String, Object> config : configs) {
for (SingleVolumeStorage storage : storageConfig) {
EntityOperatorSpec eoConfig;
if (metricsOpenShiftAndEntityOperator) {
eoConfig = new EntityOperatorSpecBuilder().withUserOperator(new EntityUserOperatorSpecBuilder().build()).withTopicOperator(new EntityTopicOperatorSpecBuilder().build()).build();
} else {
eoConfig = null;
}
List<GenericKafkaListener> listeners = new ArrayList<>(3);
listeners.add(new GenericKafkaListenerBuilder().withName("plain").withPort(9092).withType(KafkaListenerType.INTERNAL).withTls(false).withNewKafkaListenerAuthenticationScramSha512Auth().endKafkaListenerAuthenticationScramSha512Auth().build());
listeners.add(new GenericKafkaListenerBuilder().withName("tls").withPort(9093).withType(KafkaListenerType.INTERNAL).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
if (metricsOpenShiftAndEntityOperator) {
// On OpenShift, use Routes
listeners.add(new GenericKafkaListenerBuilder().withName("external").withPort(9094).withType(KafkaListenerType.ROUTE).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
} else {
// On Kube, use nodeports
listeners.add(new GenericKafkaListenerBuilder().withName("external").withPort(9094).withType(KafkaListenerType.NODEPORT).withTls(true).withNewKafkaListenerAuthenticationTlsAuth().endKafkaListenerAuthenticationTlsAuth().build());
}
result.add(new Params(metricsOpenShiftAndEntityOperator, metricsOpenShiftAndEntityOperator, listeners, config, config, storage, storage, eoConfig));
}
}
}
return result;
}
Aggregations