Search in sources :

Example 31 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kas-fleetshard by bf2fc6cc711aee1a0c2a.

the class KafkaClusterTest method testManagedKafkaToKafkaWithSizeChanges.

@Test
void testManagedKafkaToKafkaWithSizeChanges() throws IOException {
    KafkaInstanceConfiguration config = kafkaCluster.getKafkaConfiguration();
    try {
        ObjectMapper objectMapper = new ObjectMapper();
        KafkaInstanceConfiguration clone = objectMapper.readValue(objectMapper.writeValueAsString(config), KafkaInstanceConfiguration.class);
        clone.getKafka().setOneInstancePerNode(false);
        clone.getKafka().setColocateWithZookeeper(false);
        clone.getExporter().setColocateWithZookeeper(false);
        kafkaCluster.setKafkaConfiguration(clone);
        Kafka kafka = kafkaCluster.kafkaFrom(exampleManagedKafka("60Gi"), null);
        Kafka reduced = kafkaCluster.kafkaFrom(exampleManagedKafka("40Gi"), kafka);
        // should not change to a smaller size
        diffToExpected(reduced, "/expected/strimzi.yml");
        Kafka larger = kafkaCluster.kafkaFrom(exampleManagedKafka("80Gi"), kafka);
        // should change to a larger size
        diffToExpected(larger, "/expected/strimzi.yml", "[{\"op\":\"replace\",\"path\":\"/spec/kafka/config/client.quota.callback.static.storage.soft\",\"value\":\"28633115306\"},{\"op\":\"replace\",\"path\":\"/spec/kafka/config/client.quota.callback.static.storage.hard\",\"value\":\"28675058306\"},{\"op\":\"replace\",\"path\":\"/spec/kafka/storage/volumes/0/size\",\"value\":\"39412476546\"}]");
    } finally {
        kafkaCluster.setKafkaConfiguration(config);
    }
}
Also used : ManagedKafkaUtils.exampleManagedKafka(org.bf2.operator.utils.ManagedKafkaUtils.exampleManagedKafka) Kafka(io.strimzi.api.kafka.model.Kafka) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) QuarkusTest(io.quarkus.test.junit.QuarkusTest) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 32 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project kas-fleetshard by bf2fc6cc711aee1a0c2a.

the class LogCollector method saveClusterState.

private static void saveClusterState(Path logpath) throws IOException {
    KubeClient kube = KubeClient.getInstance();
    Files.writeString(logpath.resolve("describe-cluster-nodes.log"), kube.cmdClient().exec(false, false, "describe", "nodes").out());
    Files.writeString(logpath.resolve("all-events.log"), kube.cmdClient().exec(false, false, "get", "events", "--all-namespaces").out());
    Files.writeString(logpath.resolve("pvs.log"), kube.cmdClient().exec(false, false, "describe", "pv").out());
    Files.writeString(logpath.resolve("operator-routes.yml"), kube.cmdClient().exec(false, false, "get", "routes", "-n", FleetShardOperatorManager.OPERATOR_NS, "-o", "yaml").out());
    Files.writeString(logpath.resolve("operator-services.yml"), kube.cmdClient().exec(false, false, "get", "service", "-n", FleetShardOperatorManager.OPERATOR_NS, "-o", "yaml").out());
    Files.writeString(logpath.resolve("kas-fleetshard-operator-pods.yml"), kube.cmdClient().exec(false, false, "get", "pod", "-l", "app=" + FleetShardOperatorManager.OPERATOR_NAME, "--all-namespaces", "-o", "yaml").out());
    Files.writeString(logpath.resolve("strimzi-kafka-pods.yml"), kube.cmdClient().exec(false, false, "get", "pod", "-l", "app.kubernetes.io/managed-by=strimzi-cluster-operator", "--all-namespaces", "-o", "yaml").out());
    Files.writeString(logpath.resolve("managedkafkas.yml"), kube.cmdClient().exec(false, false, "get", "managedkafka", "--all-namespaces", "-o", "yaml").out());
    Files.writeString(logpath.resolve("kafkas.yml"), kube.cmdClient().exec(false, false, "get", "kafka", "-l", "app.kubernetes.io/managed-by=" + FleetShardOperatorManager.OPERATOR_NAME, "--all-namespaces", "-o", "yaml").out());
    Files.writeString(logpath.resolve("pods-managed-by-operator.yml"), kube.cmdClient().exec(false, false, "get", "pods", "-l", "app.kubernetes.io/managed-by=" + FleetShardOperatorManager.OPERATOR_NAME, "--all-namespaces", "-o", "yaml").out());
    Files.writeString(logpath.resolve("operator-namespace-events.yml"), kube.cmdClient().exec(false, false, "get", "events", "-n", FleetShardOperatorManager.OPERATOR_NS).out());
    Files.writeString(logpath.resolve("operator.log"), kube.cmdClient().exec(false, false, "logs", "deployment/" + FleetShardOperatorManager.OPERATOR_NAME, "-n", FleetShardOperatorManager.OPERATOR_NS).out());
    Files.writeString(logpath.resolve("sync.log"), kube.cmdClient().exec(false, false, "logs", "deployment/" + FleetShardOperatorManager.SYNC_NAME, "-n", FleetShardOperatorManager.OPERATOR_NS).out());
    StrimziOperatorManager.getStrimziOperatorPods().forEach(pod -> {
        try {
            Files.writeString(logpath.resolve(pod.getMetadata().getName() + ".log"), kube.cmdClient().exec(false, false, "logs", pod.getMetadata().getName(), "--tail", "-1", "-n", pod.getMetadata().getNamespace()).out());
        } catch (Exception e) {
            LOGGER.warn("Cannot get logs from pod {} in namespace {}", pod.getMetadata().getName(), pod.getMetadata().getNamespace());
        }
    });
}
Also used : KubeClient(org.bf2.test.k8s.KubeClient) IOException(java.io.IOException)

Example 33 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project cos-fleetshard by bf2fc6cc711aee1a0c2a.

the class DebeziumOperandControllerTest method reify.

@Test
void reify() {
    KubernetesClient kubernetesClient = Mockito.mock(KubernetesClient.class);
    DebeziumOperandController controller = new DebeziumOperandController(kubernetesClient, CONFIGURATION);
    final String kcsB64 = Base64.getEncoder().encodeToString("kcs".getBytes(StandardCharsets.UTF_8));
    final String pwdB64 = Base64.getEncoder().encodeToString("orderpw".getBytes(StandardCharsets.UTF_8));
    var spec = Serialization.jsonMapper().createObjectNode().put("database.hostname", "orderdb").put("database.port", "5432").put("database.user", "orderuser").put("database.dbname", "orderdb").put("database.server.name", "dbserver1").put("schema.include.list", "purchaseorder").put("table.include.list", "purchaseorder.outboxevent").put("tombstones.on.delete", "false").put("key.converter", "org.apache.kafka.connect.storage.StringConverter").put("value.converter", "org.apache.kafka.connect.storage.StringConverter").put("transforms", "saga").put("transforms.saga.type", "io.debezium.transforms.outbox.EventRouter").put("transforms.saga.route.topic.replacement", "${routedByValue}.request").put("poll.interval.ms", "100").put("consumer.interceptor.classes", "io.opentracing.contrib.kafka.TracingConsumerInterceptor").put("producer.interceptor.classes", "io.opentracing.contrib.kafka.TracingProducerInterceptor");
    spec.with("data_shape").put("key", "JSON").put("value", "JSON");
    spec.with("database.password").put("kind", "base64").put("value", pwdB64);
    var resources = controller.doReify(new ManagedConnectorBuilder().withMetadata(new ObjectMetaBuilder().withName(DEFAULT_MANAGED_CONNECTOR_ID).build()).withSpec(new ManagedConnectorSpecBuilder().withConnectorId(DEFAULT_MANAGED_CONNECTOR_ID).withDeploymentId(DEFAULT_DEPLOYMENT_ID).withDeployment(new DeploymentSpecBuilder().withConnectorTypeId(DEFAULT_CONNECTOR_TYPE_ID).withSecret("secret").withKafka(new KafkaSpecBuilder().withUrl(DEFAULT_KAFKA_SERVER).build()).withConnectorResourceVersion(DEFAULT_CONNECTOR_REVISION).withDeploymentResourceVersion(DEFAULT_DEPLOYMENT_REVISION).withDesiredState(DESIRED_STATE_READY).build()).build()).build(), new org.bf2.cos.fleetshard.operator.debezium.DebeziumShardMetadataBuilder().withContainerImage(DEFAULT_CONNECTOR_IMAGE).withConnectorClass(PG_CLASS).build(), new ConnectorConfiguration<>(spec, ObjectNode.class), new ServiceAccountSpecBuilder().withClientId(DEFAULT_KAFKA_CLIENT_ID).withClientSecret(kcsB64).build());
    assertThat(resources).anyMatch(DebeziumOperandSupport::isKafkaConnect).anyMatch(DebeziumOperandSupport::isKafkaConnector).anyMatch(DebeziumOperandSupport::isSecret);
    assertThat(resources).filteredOn(DebeziumOperandSupport::isKafkaConnect).hasSize(1).first().isInstanceOfSatisfying(KafkaConnect.class, kc -> {
        assertThat(kc.getSpec().getImage()).isEqualTo(DEFAULT_CONNECTOR_IMAGE);
    });
    assertThat(resources).filteredOn(DebeziumOperandSupport::isKafkaConnector).hasSize(1).first().isInstanceOfSatisfying(KafkaConnector.class, kc -> {
        assertThat(kc.getSpec().getConfig()).containsEntry("database.password", "${file:/opt/kafka/external-configuration/" + DebeziumConstants.EXTERNAL_CONFIG_DIRECTORY + "/" + EXTERNAL_CONFIG_FILE + ":database.password}");
    });
}
Also used : DeploymentSpecBuilder(org.bf2.cos.fleetshard.api.DeploymentSpecBuilder) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ManagedConnectorSpecBuilder(org.bf2.cos.fleetshard.api.ManagedConnectorSpecBuilder) ObjectMetaBuilder(io.fabric8.kubernetes.api.model.ObjectMetaBuilder) ManagedConnectorBuilder(org.bf2.cos.fleetshard.api.ManagedConnectorBuilder) KafkaSpecBuilder(org.bf2.cos.fleetshard.api.KafkaSpecBuilder) ServiceAccountSpecBuilder(org.bf2.cos.fleetshard.api.ServiceAccountSpecBuilder) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 34 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project cos-fleetshard by bf2fc6cc711aee1a0c2a.

the class CamelOperandSupport method createSteps.

public static List<ProcessorKamelet> createSteps(ManagedConnector connector, ConnectorConfiguration<ObjectNode> connectorConfiguration, CamelShardMetadata shardMetadata, Map<String, String> props) {
    String consumes = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/consumes/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getConsumes());
    String produces = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/produces/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getProduces());
    final ArrayNode steps = connectorConfiguration.getProcessorsSpec();
    final List<ProcessorKamelet> stepDefinitions = new ArrayList<>(steps.size() + 2);
    int i = 0;
    if (consumes != null) {
        switch(consumes) {
            case "application/json":
                {
                    String stepName = stepName(i, "cos-decoder-json-action");
                    stepDefinitions.add(new ProcessorKamelet("cos-decoder-json-action", stepName));
                    if (shardMetadata.getConsumesClass() != null) {
                        props.put(kameletProperty("cos-decoder-json-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
                    }
                    i++;
                }
                break;
            case "avro/binary":
                {
                    String stepName = stepName(i, "cos-decoder-avro-action");
                    stepDefinitions.add(new ProcessorKamelet("cos-decoder-avro-action", stepName));
                    if (shardMetadata.getConsumesClass() != null) {
                        props.put(kameletProperty("cos-decoder-avro-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
                    }
                    i++;
                }
                break;
            case "application/x-java-object":
                {
                    String stepName = stepName(i, "cos-decoder-pojo-action");
                    stepDefinitions.add(new ProcessorKamelet("cos-decoder-pojo-action", stepName));
                    if (produces != null) {
                        props.put(kameletProperty("cos-decoder-pojo-action", stepName, "mimeType"), produces);
                    }
                    i++;
                }
                break;
            case "text/plain":
            case "application/octet-stream":
                break;
            default:
                throw new IllegalArgumentException("Unsupported value format " + consumes);
        }
    }
    for (JsonNode step : steps) {
        var element = step.fields().next();
        String templateId = shardMetadata.getKamelets().getProcessors().get(element.getKey());
        if (templateId == null) {
            throw new IllegalArgumentException("Unknown processor: " + element.getKey());
        }
        stepDefinitions.add(new ProcessorKamelet(templateId, stepName(i, templateId)));
        configureStep(props, (ObjectNode) element.getValue(), i, shardMetadata.getKamelets().getProcessors().get(element.getKey()));
        i++;
    }
    if (produces != null) {
        switch(produces) {
            case "application/json":
                {
                    String stepName = stepName(i, "cos-encoder-json-action");
                    stepDefinitions.add(new ProcessorKamelet("cos-encoder-json-action", stepName));
                    if (shardMetadata.getProducesClass() != null) {
                        props.put(kameletProperty("cos-encoder-json-action", stepName, "contentClass"), shardMetadata.getProducesClass());
                    }
                }
                break;
            case "avro/binary":
                {
                    String stepName = stepName(i, "cos-encoder-avro-action");
                    stepDefinitions.add(new ProcessorKamelet("cos-encoder-avro-action", stepName));
                    if (shardMetadata.getProducesClass() != null) {
                        props.put(kameletProperty("cos-encoder-avro-action", stepName, "contentClass"), shardMetadata.getProducesClass());
                    }
                }
                break;
            case "text/plain":
                {
                    stepDefinitions.add(new ProcessorKamelet("cos-encoder-string-action", stepName(i, "cos-encoder-string-action")));
                }
                break;
            case "application/octet-stream":
                {
                    stepDefinitions.add(new ProcessorKamelet("cos-encoder-bytearray-action", stepName(i, "cos-encoder-bytearray-action")));
                }
                break;
            default:
                throw new IllegalArgumentException("Unsupported value format " + produces);
        }
    }
    // If it is a sink, then it consumes from kafka
    if (isSink(shardMetadata)) {
        props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArrayDeserializer");
        if ("application/json".equals(consumes) && hasSchemaRegistry(connector)) {
            props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonDeserializer");
        }
        if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
            props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroDeserializer");
        }
    }
    // If it is a source, then it produces to kafka
    if (isSource(shardMetadata)) {
        props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArraySerializer");
        if ("application/json".equals(produces) && hasSchemaRegistry(connector)) {
            props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonSerializer");
        }
        if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
            props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroSerializer");
        }
    }
    return stepDefinitions;
}
Also used : ConnectorStatusSpec(org.bf2.cos.fleetshard.api.ConnectorStatusSpec) ERROR_HANDLER_LOG_TYPE(org.bf2.cos.fleetshard.operator.camel.CamelConstants.ERROR_HANDLER_LOG_TYPE) StringUtils(org.apache.commons.lang3.StringUtils) KameletBindingStatus(org.bf2.cos.fleetshard.operator.camel.model.KameletBindingStatus) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayList(java.util.ArrayList) ERROR_HANDLER_STOP_URI(org.bf2.cos.fleetshard.operator.camel.CamelConstants.ERROR_HANDLER_STOP_URI) Serialization(io.fabric8.kubernetes.client.utils.Serialization) Locale(java.util.Locale) Map(java.util.Map) JsonNode(com.fasterxml.jackson.databind.JsonNode) ERROR_HANDLER_SINK_CHANNEL_TYPE(org.bf2.cos.fleetshard.operator.camel.CamelConstants.ERROR_HANDLER_SINK_CHANNEL_TYPE) ManagedConnector(org.bf2.cos.fleetshard.api.ManagedConnector) ConnectorConfiguration(org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration) ERROR_HANDLER_DEAD_LETTER_CHANNEL_KAMELET(org.bf2.cos.fleetshard.operator.camel.CamelConstants.ERROR_HANDLER_DEAD_LETTER_CHANNEL_KAMELET) CamelShardMetadata(org.bf2.cos.fleetshard.operator.camel.model.CamelShardMetadata) Iterator(java.util.Iterator) ProcessorKamelet(org.bf2.cos.fleetshard.operator.camel.model.ProcessorKamelet) CaseUtils(org.apache.commons.text.CaseUtils) ServiceAccountSpec(org.bf2.cos.fleetshard.api.ServiceAccountSpec) KameletBinding(org.bf2.cos.fleetshard.operator.camel.model.KameletBinding) StandardCharsets(java.nio.charset.StandardCharsets) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) Base64(java.util.Base64) List(java.util.List) Condition(io.fabric8.kubernetes.api.model.Condition) CONNECTOR_TYPE_SOURCE(org.bf2.cos.fleetshard.operator.camel.CamelConstants.CONNECTOR_TYPE_SOURCE) ERROR_HANDLER_DEAD_LETTER_CHANNEL_KAMELET_ID(org.bf2.cos.fleetshard.operator.camel.CamelConstants.ERROR_HANDLER_DEAD_LETTER_CHANNEL_KAMELET_ID) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) Optional(java.util.Optional) CONNECTOR_TYPE_SINK(org.bf2.cos.fleetshard.operator.camel.CamelConstants.CONNECTOR_TYPE_SINK) Kamelets(org.bf2.cos.fleetshard.operator.camel.model.Kamelets) JacksonUtil.iterator(org.bf2.cos.fleetshard.support.json.JacksonUtil.iterator) ProcessorKamelet(org.bf2.cos.fleetshard.operator.camel.model.ProcessorKamelet) ArrayList(java.util.ArrayList) JsonNode(com.fasterxml.jackson.databind.JsonNode) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode)

Example 35 with Kafka

use of org.bf2.operator.operands.KafkaInstanceConfiguration.Kafka in project cos-fleetshard by bf2fc6cc711aee1a0c2a.

the class AbstractApicurioConverter method getAdditionalConfig.

@Override
public Map<String, String> getAdditionalConfig(ManagedConnector config, ServiceAccountSpec serviceAccountSpec) {
    Map<String, String> additionalConfig = new HashMap<>();
    additionalConfig.put("apicurio.auth.service.url", APICURIO_AUTH_SERVICE_URL);
    additionalConfig.put("apicurio.auth.realm", "rhoas");
    SchemaRegistrySpec schemaRegistrySpec = config.getSpec().getDeployment().getSchemaRegistry();
    if (null == schemaRegistrySpec || null == schemaRegistrySpec.getUrl() || schemaRegistrySpec.getUrl().isBlank()) {
        throw new RuntimeException("Can't create a schema-based connector without providing a valid 'schema_registry'");
    }
    String schemaRegistryURL = schemaRegistrySpec.getUrl();
    additionalConfig.put("apicurio.registry.url", schemaRegistryURL);
    additionalConfig.put("apicurio.auth.client.id", serviceAccountSpec.getClientId());
    additionalConfig.put("apicurio.auth.client.secret", "${dir:/opt/kafka/external-configuration/" + DebeziumConstants.EXTERNAL_CONFIG_DIRECTORY + ":" + DebeziumConstants.KAFKA_CLIENT_SECRET_KEY + "}");
    additionalConfig.put("apicurio.registry.auto-register", "true");
    additionalConfig.put("apicurio.registry.find-latest", "true");
    return additionalConfig;
}
Also used : HashMap(java.util.HashMap) SchemaRegistrySpec(org.bf2.cos.fleetshard.api.SchemaRegistrySpec)

Aggregations

ManagedKafka (org.bf2.operator.resources.v1alpha1.ManagedKafka)45 Kafka (io.strimzi.api.kafka.model.Kafka)31 Test (org.junit.jupiter.api.Test)24 List (java.util.List)19 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)19 QuarkusTest (io.quarkus.test.junit.QuarkusTest)18 Map (java.util.Map)18 ArrayList (java.util.ArrayList)17 Inject (javax.inject.Inject)13 Quantity (io.fabric8.kubernetes.api.model.Quantity)12 Optional (java.util.Optional)11 Collections (java.util.Collections)10 Collectors (java.util.stream.Collectors)10 HashMap (java.util.HashMap)9 Objects (java.util.Objects)9 StrimziManager (org.bf2.operator.managers.StrimziManager)9 Logger (org.jboss.logging.Logger)9 KubernetesClient (io.fabric8.kubernetes.client.KubernetesClient)8 IOException (java.io.IOException)8 ManagedKafkaUtils.exampleManagedKafka (org.bf2.operator.utils.ManagedKafkaUtils.exampleManagedKafka)8