use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class Plugins method newConfigProvider.
public ConfigProvider newConfigProvider(AbstractConfig config, String providerPrefix, ClassLoaderUsage classLoaderUsage) {
String classPropertyName = providerPrefix + ".class";
Map<String, String> originalConfig = config.originalsStrings();
if (!originalConfig.containsKey(classPropertyName)) {
// This configuration does not define the config provider via the specified property name
return null;
}
Class<? extends ConfigProvider> klass = null;
switch(classLoaderUsage) {
case CURRENT_CLASSLOADER:
// Attempt to load first with the current classloader, and plugins as a fallback.
klass = pluginClassFromConfig(config, classPropertyName, ConfigProvider.class, delegatingLoader.configProviders());
break;
case PLUGINS:
// Attempt to load with the plugin class loader, which uses the current classloader as a fallback
String configProviderClassOrAlias = originalConfig.get(classPropertyName);
try {
klass = pluginClass(delegatingLoader, configProviderClassOrAlias, ConfigProvider.class);
} catch (ClassNotFoundException e) {
throw new ConnectException("Failed to find any class that implements ConfigProvider and which name matches " + configProviderClassOrAlias + ", available ConfigProviders are: " + pluginNames(delegatingLoader.configProviders()));
}
break;
}
if (klass == null) {
throw new ConnectException("Unable to initialize the ConfigProvider specified in '" + classPropertyName + "'");
}
// Configure the ConfigProvider
String configPrefix = providerPrefix + ".param.";
Map<String, Object> configProviderConfig = config.originalsWithPrefix(configPrefix);
ConfigProvider plugin;
ClassLoader savedLoader = compareAndSwapLoaders(klass.getClassLoader());
try {
plugin = newPlugin(klass);
plugin.configure(configProviderConfig);
} finally {
compareAndSwapLoaders(savedLoader);
}
return plugin;
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class KafkaConfigBackingStore method putTaskConfigs.
/**
* Write these task configurations and associated commit messages, unless an inconsistency is found that indicates
* that we would be leaving one of the referenced connectors with an inconsistent state.
*
* @param connector the connector to write task configuration
* @param configs list of task configurations for the connector
* @throws ConnectException if the task configurations do not resolve inconsistencies found in the existing root
* and task configurations.
*/
@Override
public void putTaskConfigs(String connector, List<Map<String, String>> configs) {
// any outstanding lagging data to consume.
try {
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.error("Failed to write root configuration to Kafka: ", e);
throw new ConnectException("Error writing root configuration to Kafka", e);
}
int taskCount = configs.size();
// Start sending all the individual updates
int index = 0;
for (Map<String, String> taskConfig : configs) {
Struct connectConfig = new Struct(TASK_CONFIGURATION_V0);
connectConfig.put("properties", taskConfig);
byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig);
log.debug("Writing configuration for connector '{}' task {}", connector, index);
ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index);
configLog.send(TASK_KEY(connectorTaskId), serializedConfig);
index++;
}
// the end of the log
try {
// Read to end to ensure all the task configs have been written
if (taskCount > 0) {
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
}
// Write the commit message
Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0);
connectConfig.put("tasks", taskCount);
byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig);
log.debug("Writing commit for connector '{}' with {} tasks.", connector, taskCount);
configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig);
// Read to end to ensure all the commit messages have been written
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.error("Failed to write root configuration to Kafka: ", e);
throw new ConnectException("Error writing root configuration to Kafka", e);
}
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class KafkaConfigBackingStore method removeConnectorConfig.
/**
* Remove configuration for a given connector.
* @param connector name of the connector to remove
*/
@Override
public void removeConnectorConfig(String connector) {
log.debug("Removing connector configuration for connector '{}'", connector);
try {
configLog.send(CONNECTOR_KEY(connector), null);
configLog.send(TARGET_STATE_KEY(connector), null);
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.error("Failed to remove connector configuration from Kafka: ", e);
throw new ConnectException("Error removing connector configuration from Kafka", e);
}
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class KafkaConfigBackingStore method putRestartRequest.
@Override
public void putRestartRequest(RestartRequest restartRequest) {
log.debug("Writing {} to Kafka", restartRequest);
String key = RESTART_KEY(restartRequest.connectorName());
Struct value = new Struct(RESTART_REQUEST_V0);
value.put(INCLUDE_TASKS_FIELD_NAME, restartRequest.includeTasks());
value.put(ONLY_FAILED_FIELD_NAME, restartRequest.onlyFailed());
byte[] serializedValue = converter.fromConnectData(topic, value.schema(), value);
try {
configLog.send(key, serializedValue);
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.error("Failed to write {} to Kafka: ", restartRequest, e);
throw new ConnectException("Error writing " + restartRequest + " to Kafka", e);
}
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class KafkaConfigBackingStore method updateConnectorConfig.
private void updateConnectorConfig(String connector, byte[] serializedConfig) {
try {
configLog.send(CONNECTOR_KEY(connector), serializedConfig);
configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.error("Failed to write connector configuration to Kafka: ", e);
throw new ConnectException("Error writing connector configuration to Kafka", e);
}
}
Aggregations