Search in sources :

Example 26 with ConfigDef

use of org.apache.kafka.common.config.ConfigDef in project kafka by apache.

the class ExpiringCredentialRefreshingLoginTest method refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime.

private static ExpiringCredentialRefreshConfig refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(double refreshWindowFactor, short minPeriodSeconds, short bufferSeconds, boolean clientReloginAllowedBeforeLogout) {
    Map<Object, Object> configs = new HashMap<>();
    configs.put(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_FACTOR, refreshWindowFactor);
    configs.put(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_JITTER, 0);
    configs.put(SaslConfigs.SASL_LOGIN_REFRESH_MIN_PERIOD_SECONDS, minPeriodSeconds);
    configs.put(SaslConfigs.SASL_LOGIN_REFRESH_BUFFER_SECONDS, bufferSeconds);
    return new ExpiringCredentialRefreshConfig(new ConfigDef().withClientSaslSupport().parse(configs), clientReloginAllowedBeforeLogout);
}
Also used : HashMap(java.util.HashMap) ConfigDef(org.apache.kafka.common.config.ConfigDef)

Example 27 with ConfigDef

use of org.apache.kafka.common.config.ConfigDef in project kafka by apache.

the class AbstractHerder method validateConnectorConfig.

ConfigInfos validateConnectorConfig(Map<String, String> connectorProps, boolean doLog) {
    if (worker.configTransformer() != null) {
        connectorProps = worker.configTransformer().transform(connectorProps);
    }
    String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
    if (connType == null)
        throw new BadRequestException("Connector config " + connectorProps + " contains no connector type");
    Connector connector = getConnector(connType);
    org.apache.kafka.connect.health.ConnectorType connectorType;
    ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector);
    try {
        ConfigDef baseConfigDef;
        if (connector instanceof SourceConnector) {
            baseConfigDef = SourceConnectorConfig.configDef();
            connectorType = org.apache.kafka.connect.health.ConnectorType.SOURCE;
        } else {
            baseConfigDef = SinkConnectorConfig.configDef();
            SinkConnectorConfig.validate(connectorProps);
            connectorType = org.apache.kafka.connect.health.ConnectorType.SINK;
        }
        ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false);
        Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig(connector, enrichedConfigDef, connectorProps);
        connectorProps.entrySet().stream().filter(e -> e.getValue() == null).map(Map.Entry::getKey).forEach(prop -> validatedConnectorConfig.computeIfAbsent(prop, ConfigValue::new).addErrorMessage("Null value can not be supplied as the configuration value."));
        List<ConfigValue> configValues = new ArrayList<>(validatedConnectorConfig.values());
        Map<String, ConfigKey> configKeys = new LinkedHashMap<>(enrichedConfigDef.configKeys());
        Set<String> allGroups = new LinkedHashSet<>(enrichedConfigDef.groups());
        // do custom connector-specific validation
        ConfigDef configDef = connector.config();
        if (null == configDef) {
            throw new BadRequestException(String.format("%s.config() must return a ConfigDef that is not null.", connector.getClass().getName()));
        }
        Config config = connector.validate(connectorProps);
        if (null == config) {
            throw new BadRequestException(String.format("%s.validate() must return a Config that is not null.", connector.getClass().getName()));
        }
        configKeys.putAll(configDef.configKeys());
        allGroups.addAll(configDef.groups());
        configValues.addAll(config.configValues());
        ConfigInfos configInfos = generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups));
        AbstractConfig connectorConfig = new AbstractConfig(new ConfigDef(), connectorProps, doLog);
        String connName = connectorProps.get(ConnectorConfig.NAME_CONFIG);
        ConfigInfos producerConfigInfos = null;
        ConfigInfos consumerConfigInfos = null;
        ConfigInfos adminConfigInfos = null;
        if (connectorType.equals(org.apache.kafka.connect.health.ConnectorType.SOURCE)) {
            producerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_PRODUCER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.PRODUCER, connectorClientConfigOverridePolicy);
            return mergeConfigInfos(connType, configInfos, producerConfigInfos);
        } else {
            consumerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_CONSUMER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.CONSUMER, connectorClientConfigOverridePolicy);
            // check if topic for dead letter queue exists
            String topic = connectorProps.get(SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG);
            if (topic != null && !topic.isEmpty()) {
                adminConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_ADMIN_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.ADMIN, connectorClientConfigOverridePolicy);
            }
        }
        return mergeConfigInfos(connType, configInfos, consumerConfigInfos, adminConfigInfos);
    } finally {
        Plugins.compareAndSwapLoaders(savedLoader);
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) SourceConnector(org.apache.kafka.connect.source.SourceConnector) Connector(org.apache.kafka.connect.connector.Connector) ConfigValue(org.apache.kafka.common.config.ConfigValue) ConfigKey(org.apache.kafka.common.config.ConfigDef.ConfigKey) Config(org.apache.kafka.common.config.Config) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) AbstractConfig(org.apache.kafka.common.config.AbstractConfig) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SourceConnector(org.apache.kafka.connect.source.SourceConnector) AbstractConfig(org.apache.kafka.common.config.AbstractConfig) BadRequestException(org.apache.kafka.connect.runtime.rest.errors.BadRequestException) ConfigDef(org.apache.kafka.common.config.ConfigDef) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) LinkedHashMap(java.util.LinkedHashMap) ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos)

Example 28 with ConfigDef

use of org.apache.kafka.common.config.ConfigDef in project kafka by apache.

the class MirrorConnectorConfigTest method testNonMutationOfConfigDef.

@Test
public void testNonMutationOfConfigDef() {
    Collection<String> taskSpecificProperties = Arrays.asList(MirrorConnectorConfig.TASK_TOPIC_PARTITIONS, MirrorConnectorConfig.TASK_CONSUMER_GROUPS);
    // Sanity check to make sure that these properties are actually defined for the task config,
    // and that the task config class has been loaded and statically initialized by the JVM
    ConfigDef taskConfigDef = MirrorTaskConfig.TASK_CONFIG_DEF;
    taskSpecificProperties.forEach(taskSpecificProperty -> assertTrue(taskConfigDef.names().contains(taskSpecificProperty), taskSpecificProperty + " should be defined for task ConfigDef"));
    // Ensure that the task config class hasn't accidentally modified the connector config
    ConfigDef connectorConfigDef = MirrorConnectorConfig.CONNECTOR_CONFIG_DEF;
    taskSpecificProperties.forEach(taskSpecificProperty -> assertFalse(connectorConfigDef.names().contains(taskSpecificProperty), taskSpecificProperty + " should not be defined for connector ConfigDef"));
}
Also used : ConfigDef(org.apache.kafka.common.config.ConfigDef) Test(org.junit.jupiter.api.Test)

Example 29 with ConfigDef

use of org.apache.kafka.common.config.ConfigDef in project kafka by apache.

the class QuotaConfigs method ipConfigs.

public static ConfigDef ipConfigs() {
    ConfigDef configDef = new ConfigDef();
    configDef.define(IP_CONNECTION_RATE_OVERRIDE_CONFIG, ConfigDef.Type.INT, Integer.MAX_VALUE, ConfigDef.Range.atLeast(0), ConfigDef.Importance.MEDIUM, IP_CONNECTION_RATE_DOC);
    return configDef;
}
Also used : ConfigDef(org.apache.kafka.common.config.ConfigDef)

Example 30 with ConfigDef

use of org.apache.kafka.common.config.ConfigDef in project kafka by apache.

the class ConfigurationControlManager method isSplittable.

boolean isSplittable(ConfigResource.Type type, String key) {
    ConfigDef configDef = configDefs.get(type);
    if (configDef == null) {
        return false;
    }
    ConfigKey configKey = configDef.configKeys().get(key);
    if (configKey == null) {
        return false;
    }
    return configKey.type == ConfigDef.Type.LIST;
}
Also used : ConfigKey(org.apache.kafka.common.config.ConfigDef.ConfigKey) ConfigDef(org.apache.kafka.common.config.ConfigDef)

Aggregations

ConfigDef (org.apache.kafka.common.config.ConfigDef)69 Config (org.apache.kafka.common.config.Config)29 Test (org.junit.Test)28 Connector (org.apache.kafka.connect.connector.Connector)27 SourceConnector (org.apache.kafka.connect.source.SourceConnector)26 ConnectorConfig (org.apache.kafka.connect.runtime.ConnectorConfig)24 SinkConnector (org.apache.kafka.connect.sink.SinkConnector)23 HashMap (java.util.HashMap)20 ConfigValue (org.apache.kafka.common.config.ConfigValue)19 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)19 TaskConfig (org.apache.kafka.connect.runtime.TaskConfig)17 Map (java.util.Map)14 SinkConnectorConfig (org.apache.kafka.connect.runtime.SinkConnectorConfig)14 WorkerConfig (org.apache.kafka.connect.runtime.WorkerConfig)13 List (java.util.List)12 ArrayList (java.util.ArrayList)10 Herder (org.apache.kafka.connect.runtime.Herder)10 ConnectorInfo (org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo)10 BadRequestException (org.apache.kafka.connect.runtime.rest.errors.BadRequestException)10 AbstractConfig (org.apache.kafka.common.config.AbstractConfig)9