use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.
the class AbstractHerder method validateConnectorConfig.
@Override
public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback, boolean doLog) {
connectorExecutor.submit(() -> {
try {
ConfigInfos result = validateConnectorConfig(connectorProps, doLog);
callback.onCompletion(null, result);
} catch (Throwable t) {
callback.onCompletion(t, null);
}
});
}
use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.
the class AbstractHerder method validateConnectorConfig.
ConfigInfos validateConnectorConfig(Map<String, String> connectorProps, boolean doLog) {
if (worker.configTransformer() != null) {
connectorProps = worker.configTransformer().transform(connectorProps);
}
String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
if (connType == null)
throw new BadRequestException("Connector config " + connectorProps + " contains no connector type");
Connector connector = getConnector(connType);
org.apache.kafka.connect.health.ConnectorType connectorType;
ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector);
try {
ConfigDef baseConfigDef;
if (connector instanceof SourceConnector) {
baseConfigDef = SourceConnectorConfig.configDef();
connectorType = org.apache.kafka.connect.health.ConnectorType.SOURCE;
} else {
baseConfigDef = SinkConnectorConfig.configDef();
SinkConnectorConfig.validate(connectorProps);
connectorType = org.apache.kafka.connect.health.ConnectorType.SINK;
}
ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false);
Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig(connector, enrichedConfigDef, connectorProps);
connectorProps.entrySet().stream().filter(e -> e.getValue() == null).map(Map.Entry::getKey).forEach(prop -> validatedConnectorConfig.computeIfAbsent(prop, ConfigValue::new).addErrorMessage("Null value can not be supplied as the configuration value."));
List<ConfigValue> configValues = new ArrayList<>(validatedConnectorConfig.values());
Map<String, ConfigKey> configKeys = new LinkedHashMap<>(enrichedConfigDef.configKeys());
Set<String> allGroups = new LinkedHashSet<>(enrichedConfigDef.groups());
// do custom connector-specific validation
ConfigDef configDef = connector.config();
if (null == configDef) {
throw new BadRequestException(String.format("%s.config() must return a ConfigDef that is not null.", connector.getClass().getName()));
}
Config config = connector.validate(connectorProps);
if (null == config) {
throw new BadRequestException(String.format("%s.validate() must return a Config that is not null.", connector.getClass().getName()));
}
configKeys.putAll(configDef.configKeys());
allGroups.addAll(configDef.groups());
configValues.addAll(config.configValues());
ConfigInfos configInfos = generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups));
AbstractConfig connectorConfig = new AbstractConfig(new ConfigDef(), connectorProps, doLog);
String connName = connectorProps.get(ConnectorConfig.NAME_CONFIG);
ConfigInfos producerConfigInfos = null;
ConfigInfos consumerConfigInfos = null;
ConfigInfos adminConfigInfos = null;
if (connectorType.equals(org.apache.kafka.connect.health.ConnectorType.SOURCE)) {
producerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_PRODUCER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.PRODUCER, connectorClientConfigOverridePolicy);
return mergeConfigInfos(connType, configInfos, producerConfigInfos);
} else {
consumerConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_CONSUMER_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.CONSUMER, connectorClientConfigOverridePolicy);
// check if topic for dead letter queue exists
String topic = connectorProps.get(SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG);
if (topic != null && !topic.isEmpty()) {
adminConfigInfos = validateClientOverrides(connName, ConnectorConfig.CONNECTOR_CLIENT_ADMIN_OVERRIDES_PREFIX, connectorConfig, ProducerConfig.configDef(), connector.getClass(), connectorType, ConnectorClientConfigRequest.ClientType.ADMIN, connectorClientConfigOverridePolicy);
}
}
return mergeConfigInfos(connType, configInfos, consumerConfigInfos, adminConfigInfos);
} finally {
Plugins.compareAndSwapLoaders(savedLoader);
}
}
use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.
the class AbstractHerder method mergeConfigInfos.
private static ConfigInfos mergeConfigInfos(String connType, ConfigInfos... configInfosList) {
int errorCount = 0;
List<ConfigInfo> configInfoList = new LinkedList<>();
Set<String> groups = new LinkedHashSet<>();
for (ConfigInfos configInfos : configInfosList) {
if (configInfos != null) {
errorCount += configInfos.errorCount();
configInfoList.addAll(configInfos.values());
groups.addAll(configInfos.groups());
}
}
return new ConfigInfos(connType, errorCount, new ArrayList<>(groups), configInfoList);
}
use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.
the class EmbeddedConnectCluster method validateConnectorConfig.
/**
* Validate a given connector configuration. If the configuration validates or
* has a configuration error, an instance of {@link ConfigInfos} is returned. If the validation fails
* an exception is thrown.
*
* @param connClassName the name of the connector class
* @param connConfig the intended configuration
* @throws ConnectRestException if the REST api returns error status
* @throws ConnectException if the configuration fails to serialize/deserialize or if the request failed to send
*/
public ConfigInfos validateConnectorConfig(String connClassName, Map<String, String> connConfig) {
String url = endpointForResource(String.format("connector-plugins/%s/config/validate", connClassName));
String response = putConnectorConfig(url, connConfig);
ConfigInfos configInfos;
try {
configInfos = new ObjectMapper().readValue(response, ConfigInfos.class);
} catch (IOException e) {
throw new ConnectException("Unable deserialize response into a ConfigInfos object");
}
return configInfos;
}
use of org.apache.kafka.connect.runtime.rest.entities.ConfigInfos in project kafka by apache.
the class ConnectorPluginsResourceTest method testValidateConfigWithSimpleName.
@Test
public void testValidateConfigWithSimpleName() throws Throwable {
Capture<Callback<ConfigInfos>> configInfosCallback = EasyMock.newCapture();
herder.validateConnectorConfig(EasyMock.eq(props), EasyMock.capture(configInfosCallback), EasyMock.anyBoolean());
PowerMock.expectLastCall().andAnswer((IAnswer<ConfigInfos>) () -> {
ConfigDef connectorConfigDef = ConnectorConfig.configDef();
List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props);
Connector connector = new ConnectorPluginsResourceTestConnector();
Config config = connector.validate(props);
ConfigDef configDef = connector.config();
Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys();
List<ConfigValue> configValues = config.configValues();
Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys);
resultConfigKeys.putAll(connectorConfigDef.configKeys());
configValues.addAll(connectorConfigValues);
ConfigInfos configInfos = AbstractHerder.generateResult(ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test"));
configInfosCallback.getValue().onCompletion(null, configInfos);
return null;
});
PowerMock.replayAll();
// make a request to connector-plugins resource using just the simple class name.
ConfigInfos configInfos = connectorPluginsResource.validateConfigs(ConnectorPluginsResourceTestConnector.class.getSimpleName(), props);
assertEquals(CONFIG_INFOS.name(), configInfos.name());
assertEquals(0, configInfos.errorCount());
assertEquals(CONFIG_INFOS.groups(), configInfos.groups());
assertEquals(new HashSet<>(CONFIG_INFOS.values()), new HashSet<>(configInfos.values()));
PowerMock.verifyAll();
}
Aggregations