Search in sources :

Example 11 with ConfigInfos

use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.

the class AbstractHerder method validateConnectorConfig.

@Override
public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback, boolean doLog) {
    connectorExecutor.submit(() -> {
        try {
            ConfigInfos result = validateConnectorConfig(connectorProps, doLog);
            callback.onCompletion(null, result);
        } catch (Throwable t) {
            callback.onCompletion(t, null);
        }
    });
}
Also used : ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos)

Example 12 with ConfigInfos

use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.

the class AbstractHerder method validateConnectorConfig.

ConfigInfos validateConnectorConfig(Map<String, String> connectorProps, boolean doLog) {
    if (worker.configTransformer() != null) {
        connectorProps = worker.configTransformer().transform(connectorProps);
    }
    String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
    if (connType == null)
        throw new BadRequestException("Connector config " + connectorProps + " contains no connector type");
    Connector connector = getConnector(connType);
    org.apache.kafka.connect.health.ConnectorType connectorType;
    ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector);
    try {
        ConfigDef baseConfigDef;
        if (connector instanceof SourceConnector) {
            baseConfigDef = SourceConnectorConfig.configDef();
            connectorType = org.apache.kafka.connect.health.ConnectorType.SOURCE;
        } else {
            baseConfigDef = SinkConnectorConfig.configDef();
            SinkConnectorConfig.validate(connectorProps);
            connectorType = org.apache.kafka.connect.health.ConnectorType.SINK;
        }
        ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false);
        Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig(connector, enrichedConfigDef, connectorProps);
        connectorProps.entrySet().stream().filter(e -> e.getValue() == null).map(Map.Entry::getKey).forEach(prop -> validatedConnectorConfig.computeIfAbsent(prop, ConfigValue::new).addErrorMessage("Null value can not be supplied as the configuration value."));
        List<ConfigValue> configValues = new ArrayList<>(validatedConnectorConfig.values());
        Map<String, ConfigKey> configKeys = new LinkedHashMap<>(enrichedConfigDef.configKeys());
        Set<String> allGroups = new LinkedHashSet<>(enrichedConfigDef.groups());
        // do custom connector-specific validation
        ConfigDef configDef = connector.config();
        if (null == configDef) {
            throw new BadRequestException(String.format("%s.config() must return a ConfigDef that is not null.", connector.getClass().getName()));
        }
        Config config = connector.validate(connectorProps);
        if (null == config) {
            throw new BadRequestException(String.format("%s.validate() must return a Config that is not null.", connector.getClass().getName()));
        }
        configKeys.putAll(configDef.configKeys());
        allGroups.addAll(configDef.groups());
        configValues.addAll(config.configValues());
        ConfigInfos configInfos = generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups));
        AbstractConfig connectorConfig = new AbstractConfig(new ConfigDef(), connectorProps, doLog);
        String connName = connectorProps.get(ConnectorConfig.NAME_CONFIG);
        ConfigInfos producerConfigInfos = null;
        ConfigInfos consumerConfigInfos = null;
        ConfigInfos adminConfigInfos = null;
        if (connectorType.equals(org.apache.kafka.connect.health.ConnectorType.SOURCE)) {
            producerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_PRODUCER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.PRODUCER, connectorClientConfigOverridePolicy);
            return mergeConfigInfos(connType, configInfos, producerConfigInfos);
        } else {
            consumerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_CONSUMER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.CONSUMER, connectorClientConfigOverridePolicy);
            // check if topic for dead letter queue exists
            String topic = connectorProps.get(SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG);
            if (topic != null && !topic.isEmpty()) {
                adminConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_ADMIN_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.ADMIN, connectorClientConfigOverridePolicy);
            }
        }
        return mergeConfigInfos(connType, configInfos, consumerConfigInfos, adminConfigInfos);
    } finally {
        Plugins.compareAndSwapLoaders(savedLoader);
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) SourceConnector(org.apache.kafka.connect.source.SourceConnector) Connector(org.apache.kafka.connect.connector.Connector) ConfigValue(org.apache.kafka.common.config.ConfigValue) ConfigKey(org.apache.kafka.common.config.ConfigDef.ConfigKey) Config(org.apache.kafka.common.config.Config) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) AbstractConfig(org.apache.kafka.common.config.AbstractConfig) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SourceConnector(org.apache.kafka.connect.source.SourceConnector) AbstractConfig(org.apache.kafka.common.config.AbstractConfig) BadRequestException(org.apache.kafka.connect.runtime.rest.errors.BadRequestException) ConfigDef(org.apache.kafka.common.config.ConfigDef) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) LinkedHashMap(java.util.LinkedHashMap) ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos)

Example 13 with ConfigInfos

use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.

the class AbstractHerder method mergeConfigInfos.

private static ConfigInfos mergeConfigInfos(String connType, ConfigInfos... configInfosList) {
    int errorCount = 0;
    List<ConfigInfo> configInfoList = new LinkedList<>();
    Set<String> groups = new LinkedHashSet<>();
    for (ConfigInfos configInfos : configInfosList) {
        if (configInfos != null) {
            errorCount += configInfos.errorCount();
            configInfoList.addAll(configInfos.values());
            groups.addAll(configInfos.groups());
        }
    }
    return new ConfigInfos(connType, errorCount, new ArrayList<>(groups), configInfoList);
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ConfigInfo(org.apache.kafka.connect.runtime.rest.entities.ConfigInfo) LinkedList(java.util.LinkedList) ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos)

Example 14 with ConfigInfos

use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.

the class EmbeddedConnectCluster method validateConnectorConfig.

/**
 * Validate a given connector configuration. If the configuration validates or
 * has a configuration error, an instance of {@link ConfigInfos} is returned. If the validation fails
 * an exception is thrown.
 *
 * @param connClassName the name of the connector class
 * @param connConfig    the intended configuration
 * @throws ConnectRestException if the REST api returns error status
 * @throws ConnectException if the configuration fails to serialize/deserialize or if the request failed to send
 */
public ConfigInfos validateConnectorConfig(String connClassName, Map<String, String> connConfig) {
    String url = endpointForResource(String.format("connector-plugins/%s/config/validate", connClassName));
    String response = putConnectorConfig(url, connConfig);
    ConfigInfos configInfos;
    try {
        configInfos = new ObjectMapper().readValue(response, ConfigInfos.class);
    } catch (IOException e) {
        throw new ConnectException("Unable deserialize response into a ConfigInfos object");
    }
    return configInfos;
}
Also used : IOException(java.io.IOException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 15 with ConfigInfos

use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.

the class ConnectorPluginsResourceTest method testValidateConfigWithSimpleName.

@Test
public void testValidateConfigWithSimpleName() throws Throwable {
    Capture<Callback<ConfigInfos>> configInfosCallback = EasyMock.newCapture();
    herder.validateConnectorConfig(EasyMock.eq(props), EasyMock.capture(configInfosCallback), EasyMock.anyBoolean());
    PowerMock.expectLastCall().andAnswer((IAnswer<ConfigInfos>) () -> {
        ConfigDef connectorConfigDef = ConnectorConfig.configDef();
        List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props);
        Connector connector = new ConnectorPluginsResourceTestConnector();
        Config config = connector.validate(props);
        ConfigDef configDef = connector.config();
        Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys();
        List<ConfigValue> configValues = config.configValues();
        Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys);
        resultConfigKeys.putAll(connectorConfigDef.configKeys());
        configValues.addAll(connectorConfigValues);
        ConfigInfos configInfos = AbstractHerder.generateResult(ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test"));
        configInfosCallback.getValue().onCompletion(null, configInfos);
        return null;
    });
    PowerMock.replayAll();
    // make a request to connector-plugins resource using just the simple class name.
    ConfigInfos configInfos = connectorPluginsResource.validateConfigs(ConnectorPluginsResourceTestConnector.class.getSimpleName(), props);
    assertEquals(CONFIG_INFOS.name(), configInfos.name());
    assertEquals(0, configInfos.errorCount());
    assertEquals(CONFIG_INFOS.groups(), configInfos.groups());
    assertEquals(new HashSet<>(CONFIG_INFOS.values()), new HashSet<>(configInfos.values()));
    PowerMock.verifyAll();
}
Also used : TestSourceConnector(org.apache.kafka.connect.runtime.TestSourceConnector) SourceConnector(org.apache.kafka.connect.source.SourceConnector) VerifiableSourceConnector(org.apache.kafka.connect.tools.VerifiableSourceConnector) MockSinkConnector(org.apache.kafka.connect.tools.MockSinkConnector) MockSourceConnector(org.apache.kafka.connect.tools.MockSourceConnector) VerifiableSinkConnector(org.apache.kafka.connect.tools.VerifiableSinkConnector) MockConnector(org.apache.kafka.connect.tools.MockConnector) Connector(org.apache.kafka.connect.connector.Connector) SchemaSourceConnector(org.apache.kafka.connect.tools.SchemaSourceConnector) SinkConnector(org.apache.kafka.connect.sink.SinkConnector) TestSinkConnector(org.apache.kafka.connect.runtime.TestSinkConnector) Callback(org.apache.kafka.connect.util.Callback) WorkerConfig(org.apache.kafka.connect.runtime.WorkerConfig) ConnectorConfig(org.apache.kafka.connect.runtime.ConnectorConfig) Config(org.apache.kafka.common.config.Config) Arrays.asList(java.util.Arrays.asList) List(java.util.List) LinkedList(java.util.LinkedList) ConfigDef(org.apache.kafka.common.config.ConfigDef) Map(java.util.Map) HashMap(java.util.HashMap) ConfigInfos(org.apache.kafka.connect.runtime.rest.entities.ConfigInfos) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

ConfigInfos (org.apache.kafka.connect.runtime.rest.entities.ConfigInfos)29 HashMap (java.util.HashMap)23 Test (org.junit.Test)20 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)20 Map (java.util.Map)16 ConfigDef (org.apache.kafka.common.config.ConfigDef)13 ConfigValue (org.apache.kafka.common.config.ConfigValue)13 Connector (org.apache.kafka.connect.connector.Connector)13 SourceConnector (org.apache.kafka.connect.source.SourceConnector)13 List (java.util.List)12 LinkedList (java.util.LinkedList)11 ArrayList (java.util.ArrayList)10 ConfigInfo (org.apache.kafka.connect.runtime.rest.entities.ConfigInfo)9 Config (org.apache.kafka.common.config.Config)8 ConnectorConfig (org.apache.kafka.connect.runtime.ConnectorConfig)7 BadRequestException (org.apache.kafka.connect.runtime.rest.errors.BadRequestException)7 SinkConnector (org.apache.kafka.connect.sink.SinkConnector)7 MockConnector (org.apache.kafka.connect.tools.MockConnector)7 MockSinkConnector (org.apache.kafka.connect.tools.MockSinkConnector)7 MockSourceConnector (org.apache.kafka.connect.tools.MockSourceConnector)7