Search in sources :

Example 26 with ConfigModel

use of io.strimzi.kafka.config.model.ConfigModel in project strimzi-kafka-operator by strimzi.

the class ConfigModelTest method testShortValidation.

@Test
public void testShortValidation() {
    ConfigModel cm = new ConfigModel();
    cm.setType(Type.SHORT);
    assertThat(cm.validate("test", "1"), is(emptyList()));
    assertThat(cm.validate("test", Short.valueOf(Short.MAX_VALUE).toString()), is(emptyList()));
    assertThat(cm.validate("test", Short.valueOf(Short.MIN_VALUE).toString()), is(emptyList()));
    assertThat(cm.validate("test", Integer.valueOf((int) Short.MAX_VALUE + 1).toString()), is(singletonList("test has value '32768' which is not a short")));
    assertThat(cm.validate("test", Integer.valueOf((int) Short.MIN_VALUE - 1).toString()), is(singletonList("test has value '-32769' which is not a short")));
    cm.setMinimum(0);
    assertThat(cm.validate("test", "-1"), is(singletonList("test has value -1 which less than the minimum value 0")));
    cm.setMaximum(1);
    assertThat(cm.validate("test", "2"), is(singletonList("test has value 2 which greater than the maximum value 1")));
}
Also used : ConfigModel(io.strimzi.kafka.config.model.ConfigModel) Test(org.junit.jupiter.api.Test)

Example 27 with ConfigModel

use of io.strimzi.kafka.config.model.ConfigModel in project strimzi-kafka-operator by strimzi.

the class KafkaBrokerConfigurationDiff method diff.

/**
 * Computes diff between two maps. Entries in IGNORABLE_PROPERTIES are skipped
 * @param brokerId id of compared broker
 * @param desired desired configuration, may be null if the related ConfigMap does not exist yet or no changes are required
 * @param brokerConfigs current configuration
 * @param configModel default configuration for {@code kafkaVersion} of broker
 * @return Collection of AlterConfigOp containing all entries which were changed from current in desired configuration
 */
private Collection<AlterConfigOp> diff(int brokerId, String desired, Config brokerConfigs, Map<String, ConfigModel> configModel) {
    if (brokerConfigs == null || desired == null) {
        return Collections.emptyList();
    }
    Map<String, String> currentMap;
    Collection<AlterConfigOp> updatedCE = new ArrayList<>();
    currentMap = brokerConfigs.entries().stream().collect(Collectors.toMap(ConfigEntry::name, configEntry -> configEntry.value() == null ? "null" : configEntry.value()));
    OrderedProperties orderedProperties = new OrderedProperties();
    orderedProperties.addStringPairs(desired);
    Map<String, String> desiredMap = orderedProperties.asMap();
    fillPlaceholderValue(desiredMap, Integer.toString(brokerId));
    JsonNode source = patchMapper().configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true).valueToTree(currentMap);
    JsonNode target = patchMapper().configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true).valueToTree(desiredMap);
    JsonNode jsonDiff = JsonDiff.asJson(source, target);
    for (JsonNode d : jsonDiff) {
        String pathValue = d.get("path").asText();
        String pathValueWithoutSlash = pathValue.substring(1);
        Optional<ConfigEntry> optEntry = brokerConfigs.entries().stream().filter(configEntry -> configEntry.name().equals(pathValueWithoutSlash)).findFirst();
        String op = d.get("op").asText();
        if (optEntry.isPresent()) {
            ConfigEntry entry = optEntry.get();
            if ("remove".equals(op)) {
                removeProperty(configModel, updatedCE, pathValueWithoutSlash, entry);
            } else if ("replace".equals(op)) {
                // entry is in the current, desired is updated value
                updateOrAdd(entry.name(), configModel, desiredMap, updatedCE);
            }
        } else {
            if ("add".equals(op)) {
                // entry is not in the current, it is added
                updateOrAdd(pathValueWithoutSlash, configModel, desiredMap, updatedCE);
            }
        }
        if ("remove".equals(op)) {
            // there is a lot of properties set by default - not having them in desired causes very noisy log output
            LOGGER.traceCr(reconciliation, "Kafka Broker {} Config Differs : {}", brokerId, d);
            LOGGER.traceCr(reconciliation, "Current Kafka Broker Config path {} has value {}", pathValueWithoutSlash, lookupPath(source, pathValue));
            LOGGER.traceCr(reconciliation, "Desired Kafka Broker Config path {} has value {}", pathValueWithoutSlash, lookupPath(target, pathValue));
        } else {
            LOGGER.debugCr(reconciliation, "Kafka Broker {} Config Differs : {}", brokerId, d);
            LOGGER.debugCr(reconciliation, "Current Kafka Broker Config path {} has value {}", pathValueWithoutSlash, lookupPath(source, pathValue));
            LOGGER.debugCr(reconciliation, "Desired Kafka Broker Config path {} has value {}", pathValueWithoutSlash, lookupPath(target, pathValue));
        }
    }
    return updatedCE;
}
Also used : Scope(io.strimzi.kafka.config.model.Scope) AbstractJsonDiff(io.strimzi.operator.common.operator.resource.AbstractJsonDiff) Config(org.apache.kafka.clients.admin.Config) ReconciliationLogger(io.strimzi.operator.common.ReconciliationLogger) OrderedProperties(io.strimzi.operator.common.model.OrderedProperties) Collection(java.util.Collection) KafkaVersion(io.strimzi.operator.cluster.model.KafkaVersion) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) JsonDiff(io.fabric8.zjsonpatch.JsonDiff) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) ConfigModel(io.strimzi.kafka.config.model.ConfigModel) Reconciliation(io.strimzi.operator.common.Reconciliation) AlterConfigOp(org.apache.kafka.clients.admin.AlterConfigOp) PatchUtils.patchMapper(io.fabric8.kubernetes.client.internal.PatchUtils.patchMapper) KafkaConfiguration(io.strimzi.operator.cluster.model.KafkaConfiguration) Map(java.util.Map) Optional(java.util.Optional) JsonNode(com.fasterxml.jackson.databind.JsonNode) SerializationFeature(com.fasterxml.jackson.databind.SerializationFeature) Pattern(java.util.regex.Pattern) Collections(java.util.Collections) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) ArrayList(java.util.ArrayList) AlterConfigOp(org.apache.kafka.clients.admin.AlterConfigOp) OrderedProperties(io.strimzi.operator.common.model.OrderedProperties) JsonNode(com.fasterxml.jackson.databind.JsonNode)

Example 28 with ConfigModel

use of io.strimzi.kafka.config.model.ConfigModel in project strimzi-kafka-operator by strimzi.

the class DynamicConfSharedST method generateTestCases.

/**
 * Method, which dynamically generate test cases based on Kafka version
 * @param kafkaVersion specific kafka version
 * @return String generated test cases
 */
@SuppressWarnings({ "checkstyle:CyclomaticComplexity" })
private static Map<String, Object> generateTestCases(String kafkaVersion) {
    Map<String, ConfigModel> dynamicProperties = KafkaUtils.getDynamicConfigurationProperties(kafkaVersion);
    Map<String, Object> testCases = new HashMap<>();
    dynamicProperties.forEach((key, value) -> {
        Type type = value.getType();
        Object stochasticChosenValue;
        switch(type) {
            case STRING:
                switch(key) {
                    case "compression.type":
                        List<String> compressionTypes = Arrays.asList("snappy", "gzip", "lz4", "zstd");
                        stochasticChosenValue = compressionTypes.get(ThreadLocalRandom.current().nextInt(0, compressionTypes.size() - 1));
                        break;
                    case "log.message.timestamp.type":
                        stochasticChosenValue = "LogAppendTime";
                        break;
                    case "ssl.protocol":
                        stochasticChosenValue = "TLSv1.1";
                        break;
                    default:
                        stochasticChosenValue = " ";
                }
                testCases.put(key, stochasticChosenValue);
                break;
            case INT:
            case LONG:
                switch(key) {
                    case "num.recovery.threads.per.data.dir":
                    case "log.cleaner.threads":
                    case "num.network.threads":
                    case "min.insync.replicas":
                    case "num.replica.fetchers":
                    case "num.partitions":
                        stochasticChosenValue = ThreadLocalRandom.current().nextInt(2, 3);
                        break;
                    case "log.cleaner.io.buffer.load.factor":
                    case "log.retention.ms":
                    case "max.connections":
                    case "max.connections.per.ip":
                    case "background.threads":
                        stochasticChosenValue = ThreadLocalRandom.current().nextInt(4, 20);
                        break;
                    default:
                        stochasticChosenValue = ThreadLocalRandom.current().nextInt(100, 50_000);
                }
                testCases.put(key, stochasticChosenValue);
                break;
            case DOUBLE:
                switch(key) {
                    case "log.cleaner.min.cleanable.dirty.ratio":
                    case "log.cleaner.min.cleanable.ratio":
                        stochasticChosenValue = ThreadLocalRandom.current().nextDouble(0, 1);
                        break;
                    default:
                        stochasticChosenValue = ThreadLocalRandom.current().nextDouble(1, 20);
                }
                testCases.put(key, stochasticChosenValue);
                break;
            case BOOLEAN:
                switch(key) {
                    case "unclean.leader.election.enable":
                    case "log.preallocate":
                        stochasticChosenValue = true;
                        break;
                    case "log.message.downconversion.enable":
                        stochasticChosenValue = false;
                        break;
                    default:
                        stochasticChosenValue = ThreadLocalRandom.current().nextInt(2) == 0;
                }
                testCases.put(key, stochasticChosenValue);
                break;
            case LIST:
                // log.cleanup.policy = [delete, compact] -> default delete
                switch(key) {
                    case "log.cleanup.policy":
                        stochasticChosenValue = "compact";
                        break;
                    case "ssl.enabled.protocols":
                        stochasticChosenValue = "TLSv1.1";
                        break;
                    default:
                        stochasticChosenValue = " ";
                }
                testCases.put(key, stochasticChosenValue);
        }
        // skipping these configuration, which doesn't work appear in the kafka pod (TODO: investigate why!)
        testCases.remove("num.recovery.threads.per.data.dir");
        testCases.remove("num.io.threads");
        testCases.remove("log.cleaner.dedupe.buffer.size");
        testCases.remove("num.partitions");
        // skipping these configuration exceptions
        testCases.remove("ssl.cipher.suites");
        testCases.remove("zookeeper.connection.timeout.ms");
        testCases.remove("zookeeper.connect");
    });
    return testCases;
}
Also used : Type(io.strimzi.kafka.config.model.Type) ConfigModel(io.strimzi.kafka.config.model.ConfigModel) HashMap(java.util.HashMap)

Aggregations

ConfigModel (io.strimzi.kafka.config.model.ConfigModel)28 Test (org.junit.jupiter.api.Test)18 Scope (io.strimzi.kafka.config.model.Scope)4 Type (io.strimzi.kafka.config.model.Type)4 ArrayList (java.util.ArrayList)4 Map (java.util.Map)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 SerializationFeature (com.fasterxml.jackson.databind.SerializationFeature)2 PatchUtils.patchMapper (io.fabric8.kubernetes.client.internal.PatchUtils.patchMapper)2 JsonDiff (io.fabric8.zjsonpatch.JsonDiff)2 ConfigModels (io.strimzi.kafka.config.model.ConfigModels)2 KafkaConfiguration (io.strimzi.operator.cluster.model.KafkaConfiguration)2 KafkaVersion (io.strimzi.operator.cluster.model.KafkaVersion)2 Reconciliation (io.strimzi.operator.common.Reconciliation)2 ReconciliationLogger (io.strimzi.operator.common.ReconciliationLogger)2 OrderedProperties (io.strimzi.operator.common.model.OrderedProperties)2 AbstractJsonDiff (io.strimzi.operator.common.operator.resource.AbstractJsonDiff)2 File (java.io.File)2 Method (java.lang.reflect.Method)2