use of io.fabric8.kubernetes.api.model.storage.StorageClass in project strimzi by strimzi.
the class Storage method fromJson.
/**
* Returns a Storage instance from a corresponding JSON representation
*
* @param json storage JSON representation
* @return Storage instance
*/
public static Storage fromJson(JsonObject json) {
String type = json.getString(Storage.TYPE_FIELD);
if (type == null) {
throw new IllegalArgumentException("Storage '" + Storage.TYPE_FIELD + "' is mandatory");
}
Storage storage = new Storage(StorageType.from(type));
String size = json.getString(Storage.SIZE_FIELD);
if (size != null) {
storage.withSize(new Quantity(size));
}
String storageClass = json.getString(Storage.STORAGE_CLASS_FIELD);
if (storageClass != null) {
storage.withClass(storageClass);
}
if (json.getValue(Storage.DELETE_CLAIM_FIELD) instanceof Boolean) {
boolean isDeleteClaim = json.getBoolean(Storage.DELETE_CLAIM_FIELD);
storage.withDeleteClaim(isDeleteClaim);
}
JsonObject selector = json.getJsonObject(Storage.SELECTOR_FIELD);
if (selector != null) {
JsonObject matchLabelsJson = selector.getJsonObject(Storage.SELECTOR_MATCH_LABELS_FIELD);
Map<String, String> matchLabels = matchLabelsJson.getMap().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> String.valueOf(e.getValue())));
// match-expressions doesn't supported yet, so null first argument
storage.withSelector(new LabelSelector(null, matchLabels));
}
return storage;
}
use of io.fabric8.kubernetes.api.model.storage.StorageClass in project strimzi by strimzi.
the class VolumeResizingTest method testVolumesBoundExpandableStorageClass.
@Test
public void testVolumesBoundExpandableStorageClass() {
Kafka kafka = getKafkaCrd();
KafkaCluster kafkaCluster = KafkaCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false);
// Mock the PVC Operator
PvcOperator mockPvcOps = supplier.pvcOperations;
List<PersistentVolumeClaim> realPvcs = kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage());
for (PersistentVolumeClaim pvc : realPvcs) {
pvc.getSpec().getResources().getRequests().put("storage", new Quantity("10Gi"));
pvc.setStatus(new PersistentVolumeClaimStatusBuilder().withPhase("Bound").withCapacity(pvc.getSpec().getResources().getRequests()).build());
}
when(mockPvcOps.getAsync(eq(namespace), ArgumentMatchers.startsWith("data-"))).thenAnswer(invocation -> {
String pvcName = invocation.getArgument(1);
return Future.succeededFuture(realPvcs.stream().filter(pvc -> pvcName.equals(pvc.getMetadata().getName())).findFirst().orElse(null));
});
ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class);
when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture());
// Mock the StorageClass Operator
StorageClassOperator mockSco = supplier.storageClassOperations;
when(mockSco.getAsync(eq("mysc"))).thenAnswer(invocation -> {
StorageClass sc = new StorageClassBuilder().withNewMetadata().withName("mysc").endMetadata().withAllowVolumeExpansion(true).build();
return Future.succeededFuture(sc);
});
MockKafkaAssemblyOperator kao = new MockKafkaAssemblyOperator(vertx, new PlatformFeaturesAvailability(false, kubernetesVersion), certManager, passwordGenerator, supplier, config);
kao.resizeVolumes(new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, namespace, clusterName), kafka, kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage()), kafkaCluster).onComplete(res -> {
assertThat(res.succeeded(), is(true));
assertThat(pvcCaptor.getAllValues().size(), is(3));
assertThat(pvcCaptor.getAllValues(), is(kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage())));
});
}
use of io.fabric8.kubernetes.api.model.storage.StorageClass in project strimzi by strimzi.
the class VolumeResizingTest method testVolumesResized.
@Test
public void testVolumesResized() {
Kafka kafka = getKafkaCrd();
KafkaCluster kafkaCluster = KafkaCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false);
// Mock the PVC Operator
PvcOperator mockPvcOps = supplier.pvcOperations;
List<PersistentVolumeClaim> realPvcs = kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage());
for (PersistentVolumeClaim pvc : realPvcs) {
pvc.setStatus(new PersistentVolumeClaimStatusBuilder().withPhase("Bound").withCapacity(singletonMap("storage", new Quantity("20Gi"))).build());
}
when(mockPvcOps.getAsync(eq(namespace), ArgumentMatchers.startsWith("data-"))).thenAnswer(invocation -> {
String pvcName = invocation.getArgument(1);
return Future.succeededFuture(realPvcs.stream().filter(pvc -> pvcName.equals(pvc.getMetadata().getName())).findFirst().orElse(null));
});
ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class);
when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture());
// Mock the StorageClass Operator
StorageClassOperator mockSco = supplier.storageClassOperations;
when(mockSco.getAsync(eq("mysc"))).thenAnswer(invocation -> {
StorageClass sc = new StorageClassBuilder().withNewMetadata().withName("mysc").endMetadata().withAllowVolumeExpansion(true).build();
return Future.succeededFuture(sc);
});
MockKafkaAssemblyOperator kao = new MockKafkaAssemblyOperator(vertx, new PlatformFeaturesAvailability(false, kubernetesVersion), certManager, passwordGenerator, supplier, config);
kao.resizeVolumes(new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, namespace, clusterName), kafka, kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage()), kafkaCluster).onComplete(res -> {
assertThat(res.succeeded(), is(true));
assertThat(pvcCaptor.getAllValues().size(), is(3));
assertThat(pvcCaptor.getAllValues(), is(kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage())));
});
}
use of io.fabric8.kubernetes.api.model.storage.StorageClass in project strimzi by strimzi.
the class VolumeResizingTest method testNoExistingVolumes.
@Test
public void testNoExistingVolumes() {
Kafka kafka = getKafkaCrd();
KafkaCluster kafkaCluster = KafkaCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false);
// Mock the PVC Operator
PvcOperator mockPvcOps = supplier.pvcOperations;
when(mockPvcOps.getAsync(eq(namespace), ArgumentMatchers.startsWith("data-"))).thenReturn(Future.succeededFuture());
ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class);
when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture());
// Mock the StorageClass Operator
StorageClassOperator mockSco = supplier.storageClassOperations;
when(mockSco.getAsync(eq("mysc"))).thenAnswer(invocation -> {
StorageClass sc = new StorageClassBuilder().withNewMetadata().withName("mysc").endMetadata().withAllowVolumeExpansion(true).build();
return Future.succeededFuture(sc);
});
MockKafkaAssemblyOperator kao = new MockKafkaAssemblyOperator(vertx, new PlatformFeaturesAvailability(false, kubernetesVersion), certManager, passwordGenerator, supplier, config);
kao.resizeVolumes(new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, namespace, clusterName), kafka, kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage()), kafkaCluster).onComplete(res -> {
assertThat(res.succeeded(), is(true));
assertThat(pvcCaptor.getAllValues().size(), is(3));
assertThat(pvcCaptor.getAllValues(), is(kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage())));
});
}
use of io.fabric8.kubernetes.api.model.storage.StorageClass in project strimzi by strimzi.
the class VolumeResizingTest method testVolumesResizing.
@Test
public void testVolumesResizing() {
Kafka kafka = getKafkaCrd();
KafkaCluster kafkaCluster = KafkaCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, kafka, VERSIONS);
ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false);
// Mock the PVC Operator
PvcOperator mockPvcOps = supplier.pvcOperations;
List<PersistentVolumeClaim> realPvcs = kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage());
for (PersistentVolumeClaim pvc : realPvcs) {
pvc.setStatus(new PersistentVolumeClaimStatusBuilder().withPhase("Bound").withConditions(new PersistentVolumeClaimConditionBuilder().withStatus("True").withType("Resizing").build()).withCapacity(singletonMap("storage", new Quantity("10Gi"))).build());
}
when(mockPvcOps.getAsync(eq(namespace), ArgumentMatchers.startsWith("data-"))).thenAnswer(invocation -> {
String pvcName = invocation.getArgument(1);
return Future.succeededFuture(realPvcs.stream().filter(pvc -> pvcName.equals(pvc.getMetadata().getName())).findFirst().orElse(null));
});
ArgumentCaptor<PersistentVolumeClaim> pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class);
when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture());
// Mock the StorageClass Operator
StorageClassOperator mockSco = supplier.storageClassOperations;
when(mockSco.getAsync(eq("mysc"))).thenAnswer(invocation -> {
StorageClass sc = new StorageClassBuilder().withNewMetadata().withName("mysc").endMetadata().withAllowVolumeExpansion(true).build();
return Future.succeededFuture(sc);
});
MockKafkaAssemblyOperator kao = new MockKafkaAssemblyOperator(vertx, new PlatformFeaturesAvailability(false, kubernetesVersion), certManager, passwordGenerator, supplier, config);
kao.resizeVolumes(new Reconciliation("test-trigger", Kafka.RESOURCE_KIND, namespace, clusterName), kafka, kafkaCluster.generatePersistentVolumeClaims(kafka.getSpec().getKafka().getStorage()), kafkaCluster).onComplete(res -> {
assertThat(res.succeeded(), is(true));
// The volumes are resizing => no reconciliation
assertThat(pvcCaptor.getAllValues().size(), is(0));
});
}
Aggregations