use of org.apache.kafka.common.config.AbstractConfig in project kafka by apache.
the class AbstractHerder method validateConnectorConfig.
ConfigInfos validateConnectorConfig(Map<String, String> connectorProps, boolean doLog) {
if (worker.configTransformer() != null) {
connectorProps = worker.configTransformer().transform(connectorProps);
}
String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
if (connType == null)
throw new BadRequestException("Connector config " + connectorProps + " contains no connector type");
Connector connector = getConnector(connType);
org.apache.kafka.connect.health.ConnectorType connectorType;
ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector);
try {
ConfigDef baseConfigDef;
if (connector instanceof SourceConnector) {
baseConfigDef = SourceConnectorConfig.configDef();
connectorType = org.apache.kafka.connect.health.ConnectorType.SOURCE;
} else {
baseConfigDef = SinkConnectorConfig.configDef();
SinkConnectorConfig.validate(connectorProps);
connectorType = org.apache.kafka.connect.health.ConnectorType.SINK;
}
ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false);
Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig(connector, enrichedConfigDef, connectorProps);
connectorProps.entrySet().stream().filter(e -> e.getValue() == null).map(Map.Entry::getKey).forEach(prop -> validatedConnectorConfig.computeIfAbsent(prop, ConfigValue::new).addErrorMessage("Null value can not be supplied as the configuration value."));
List<ConfigValue> configValues = new ArrayList<>(validatedConnectorConfig.values());
Map<String, ConfigKey> configKeys = new LinkedHashMap<>(enrichedConfigDef.configKeys());
Set<String> allGroups = new LinkedHashSet<>(enrichedConfigDef.groups());
// do custom connector-specific validation
ConfigDef configDef = connector.config();
if (null == configDef) {
throw new BadRequestException(String.format("%s.config() must return a ConfigDef that is not null.", connector.getClass().getName()));
}
Config config = connector.validate(connectorProps);
if (null == config) {
throw new BadRequestException(String.format("%s.validate() must return a Config that is not null.", connector.getClass().getName()));
}
configKeys.putAll(configDef.configKeys());
allGroups.addAll(configDef.groups());
configValues.addAll(config.configValues());
ConfigInfos configInfos = generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups));
AbstractConfig connectorConfig = new AbstractConfig(new ConfigDef(), connectorProps, doLog);
String connName = connectorProps.get(ConnectorConfig.NAME_CONFIG);
ConfigInfos producerConfigInfos = null;
ConfigInfos consumerConfigInfos = null;
ConfigInfos adminConfigInfos = null;
if (connectorType.equals(org.apache.kafka.connect.health.ConnectorType.SOURCE)) {
producerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_PRODUCER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.PRODUCER, connectorClientConfigOverridePolicy);
return mergeConfigInfos(connType, configInfos, producerConfigInfos);
} else {
consumerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_CONSUMER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.CONSUMER, connectorClientConfigOverridePolicy);
// check if topic for dead letter queue exists
String topic = connectorProps.get(SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG);
if (topic != null && !topic.isEmpty()) {
adminConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_ADMIN_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.ADMIN, connectorClientConfigOverridePolicy);
}
}
return mergeConfigInfos(connType, configInfos, consumerConfigInfos, adminConfigInfos);
} finally {
Plugins.compareAndSwapLoaders(savedLoader);
}
}
use of org.apache.kafka.common.config.AbstractConfig in project kafka by apache.
the class FileStreamSinkConnector method start.
@Override
public void start(Map<String, String> props) {
AbstractConfig parsedConfig = new AbstractConfig(CONFIG_DEF, props);
filename = parsedConfig.getString(FILE_CONFIG);
}
use of org.apache.kafka.common.config.AbstractConfig in project kafka by apache.
the class PluginsTest method shouldShareStaticValuesBetweenSamePlugin.
@Test
public void shouldShareStaticValuesBetweenSamePlugin() {
// Plugins are not isolated from other instances of their own class.
TestPlugins.assertAvailable();
Converter firstPlugin = plugins.newPlugin(TestPlugins.ALIASED_STATIC_FIELD, new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class);
assertInstanceOf(SamplingTestPlugin.class, firstPlugin, "Cannot collect samples");
Converter secondPlugin = plugins.newPlugin(TestPlugins.ALIASED_STATIC_FIELD, new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class);
assertInstanceOf(SamplingTestPlugin.class, secondPlugin, "Cannot collect samples");
assertSame(((SamplingTestPlugin) firstPlugin).otherSamples(), ((SamplingTestPlugin) secondPlugin).otherSamples());
}
use of org.apache.kafka.common.config.AbstractConfig in project kafka by apache.
the class PluginsTest method newPluginShouldInstantiateWithPluginClassLoader.
@Test
public void newPluginShouldInstantiateWithPluginClassLoader() {
TestPlugins.assertAvailable();
Converter plugin = plugins.newPlugin(TestPlugins.ALIASED_STATIC_FIELD, new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class);
assertInstanceOf(SamplingTestPlugin.class, plugin, "Cannot collect samples");
Map<String, SamplingTestPlugin> samples = ((SamplingTestPlugin) plugin).flatten();
assertPluginClassLoaderAlwaysActive(samples);
}
use of org.apache.kafka.common.config.AbstractConfig in project kafka by apache.
the class PluginsTest method shouldThrowIfPluginThrows.
@Test
public void shouldThrowIfPluginThrows() {
TestPlugins.assertAvailable();
assertThrows(ConnectException.class, () -> plugins.newPlugin(TestPlugins.ALWAYS_THROW_EXCEPTION, new AbstractConfig(new ConfigDef(), Collections.emptyMap()), Converter.class));
}
Aggregations