use of org.apache.kafka.connect.storage.HeaderConverter in project apache-kafka-on-k8s by banzaicloud.
the class Plugins method newHeaderConverter.
/**
* If the given configuration defines a {@link HeaderConverter} using the named configuration property, return a new configured
* instance.
*
* @param config the configuration containing the {@link Converter}'s configuration; may not be null
* @param classPropertyName the name of the property that contains the name of the {@link Converter} class; may not be null
* @param classLoaderUsage which classloader should be used
* @return the instantiated and configured {@link HeaderConverter}; null if the configuration did not define the specified property
* @throws ConnectException if the {@link HeaderConverter} implementation class could not be found
*/
public HeaderConverter newHeaderConverter(AbstractConfig config, String classPropertyName, ClassLoaderUsage classLoaderUsage) {
if (!config.originals().containsKey(classPropertyName)) {
// This configuration does not define the header converter via the specified property name
return null;
}
HeaderConverter plugin = null;
switch(classLoaderUsage) {
case CURRENT_CLASSLOADER:
// Attempt to load first with the current classloader, and plugins as a fallback.
// Note: we can't use config.getConfiguredInstance because we have to remove the property prefixes
// before calling config(...)
plugin = getInstance(config, classPropertyName, HeaderConverter.class);
break;
case PLUGINS:
// Attempt to load with the plugin class loader, which uses the current classloader as a fallback
String converterClassOrAlias = config.getClass(classPropertyName).getName();
Class<? extends HeaderConverter> klass;
try {
klass = pluginClass(delegatingLoader, converterClassOrAlias, HeaderConverter.class);
} catch (ClassNotFoundException e) {
throw new ConnectException("Failed to find any class that implements HeaderConverter and which name matches " + converterClassOrAlias + ", available header converters are: " + pluginNames(delegatingLoader.headerConverters()));
}
plugin = newPlugin(klass);
}
if (plugin == null) {
throw new ConnectException("Unable to instantiate the Converter specified in '" + classPropertyName + "'");
}
String configPrefix = classPropertyName + ".";
Map<String, Object> converterConfig = config.originalsWithPrefix(configPrefix);
converterConfig.put(ConverterConfig.TYPE_CONFIG, ConverterType.HEADER.getName());
plugin.configure(converterConfig);
return plugin;
}
use of org.apache.kafka.connect.storage.HeaderConverter in project apache-kafka-on-k8s by banzaicloud.
the class Worker method startTask.
/**
* Start a task managed by this worker.
*
* @param id the task ID.
* @param connProps the connector properties.
* @param taskProps the tasks properties.
* @param statusListener a listener for the runtime status transitions of the task.
* @param initialState the initial state of the connector.
* @return true if the task started successfully.
*/
public boolean startTask(ConnectorTaskId id, Map<String, String> connProps, Map<String, String> taskProps, TaskStatus.Listener statusListener, TargetState initialState) {
log.info("Creating task {}", id);
if (tasks.containsKey(id))
throw new ConnectException("Task already exists in this worker: " + id);
final WorkerTask workerTask;
ClassLoader savedLoader = plugins.currentThreadLoader();
try {
final ConnectorConfig connConfig = new ConnectorConfig(plugins, connProps);
String connType = connConfig.getString(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
ClassLoader connectorLoader = plugins.delegatingLoader().connectorLoader(connType);
savedLoader = Plugins.compareAndSwapLoaders(connectorLoader);
final TaskConfig taskConfig = new TaskConfig(taskProps);
final Class<? extends Task> taskClass = taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class);
final Task task = plugins.newTask(taskClass);
log.info("Instantiated task {} with version {} of type {}", id, task.version(), taskClass.getName());
// By maintaining connector's specific class loader for this thread here, we first
// search for converters within the connector dependencies.
// If any of these aren't found, that means the connector didn't configure specific converters,
// so we should instantiate based upon the worker configuration
Converter keyConverter = plugins.newConverter(connConfig, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
Converter valueConverter = plugins.newConverter(connConfig, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
HeaderConverter headerConverter = plugins.newHeaderConverter(connConfig, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
if (keyConverter == null) {
keyConverter = plugins.newConverter(config, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
}
if (valueConverter == null) {
valueConverter = plugins.newConverter(config, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
}
if (headerConverter == null) {
headerConverter = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
}
workerTask = buildWorkerTask(connConfig, id, task, statusListener, initialState, keyConverter, valueConverter, headerConverter, connectorLoader);
workerTask.initialize(taskConfig);
Plugins.compareAndSwapLoaders(savedLoader);
} catch (Throwable t) {
log.error("Failed to start task {}", id, t);
// Can't be put in a finally block because it needs to be swapped before the call on
// statusListener
Plugins.compareAndSwapLoaders(savedLoader);
workerMetricsGroup.recordTaskFailure();
statusListener.onFailure(id, t);
return false;
}
WorkerTask existing = tasks.putIfAbsent(id, workerTask);
if (existing != null)
throw new ConnectException("Task already exists in this worker: " + id);
executor.submit(workerTask);
if (workerTask instanceof WorkerSourceTask) {
sourceTaskOffsetCommitter.schedule(id, (WorkerSourceTask) workerTask);
}
workerMetricsGroup.recordTaskSuccess();
return true;
}
use of org.apache.kafka.connect.storage.HeaderConverter in project kafka by apache.
the class PluginsTest method shouldInstantiateAndConfigureDefaultHeaderConverter.
@Test
public void shouldInstantiateAndConfigureDefaultHeaderConverter() {
props.remove(WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG);
createConfig();
// Because it's not explicitly set on the supplied configuration, the logic to use the current classloader for the connector
// will exit immediately, and so this method always returns null
HeaderConverter headerConverter = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
assertNull(headerConverter);
// But we should always find it (or the worker's default) when using the plugins classloader ...
headerConverter = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
assertNotNull(headerConverter);
assertTrue(headerConverter instanceof SimpleHeaderConverter);
}
use of org.apache.kafka.connect.storage.HeaderConverter in project kafka by apache.
the class PluginsTest method newHeaderConverterShouldConfigureWithPluginClassLoader.
@Test
public void newHeaderConverterShouldConfigureWithPluginClassLoader() {
TestPlugins.assertAvailable();
props.put(WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, TestPlugins.SAMPLING_HEADER_CONVERTER);
ClassLoader classLoader = plugins.delegatingLoader().pluginClassLoader(TestPlugins.SAMPLING_HEADER_CONVERTER);
ClassLoader savedLoader = Plugins.compareAndSwapLoaders(classLoader);
createConfig();
Plugins.compareAndSwapLoaders(savedLoader);
HeaderConverter plugin = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
assertInstanceOf(SamplingTestPlugin.class, plugin, "Cannot collect samples");
Map<String, SamplingTestPlugin> samples = ((SamplingTestPlugin) plugin).flatten();
// HeaderConverter::configure was called
assertTrue(samples.containsKey("configure"));
assertPluginClassLoaderAlwaysActive(samples);
}
use of org.apache.kafka.connect.storage.HeaderConverter in project kafka by apache.
the class Worker method startTask.
/**
* Start a task managed by this worker.
*
* @param id the task ID.
* @param connProps the connector properties.
* @param taskProps the tasks properties.
* @param statusListener a listener for the runtime status transitions of the task.
* @param initialState the initial state of the connector.
* @return true if the task started successfully.
*/
public boolean startTask(ConnectorTaskId id, ClusterConfigState configState, Map<String, String> connProps, Map<String, String> taskProps, TaskStatus.Listener statusListener, TargetState initialState) {
final WorkerTask workerTask;
final TaskStatus.Listener taskStatusListener = workerMetricsGroup.wrapStatusListener(statusListener);
try (LoggingContext loggingContext = LoggingContext.forTask(id)) {
log.info("Creating task {}", id);
if (tasks.containsKey(id))
throw new ConnectException("Task already exists in this worker: " + id);
connectorStatusMetricsGroup.recordTaskAdded(id);
ClassLoader savedLoader = plugins.currentThreadLoader();
try {
String connType = connProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
ClassLoader connectorLoader = plugins.delegatingLoader().connectorLoader(connType);
savedLoader = Plugins.compareAndSwapLoaders(connectorLoader);
final ConnectorConfig connConfig = new ConnectorConfig(plugins, connProps);
final TaskConfig taskConfig = new TaskConfig(taskProps);
final Class<? extends Task> taskClass = taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class);
final Task task = plugins.newTask(taskClass);
log.info("Instantiated task {} with version {} of type {}", id, task.version(), taskClass.getName());
// By maintaining connector's specific class loader for this thread here, we first
// search for converters within the connector dependencies.
// If any of these aren't found, that means the connector didn't configure specific converters,
// so we should instantiate based upon the worker configuration
Converter keyConverter = plugins.newConverter(connConfig, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
Converter valueConverter = plugins.newConverter(connConfig, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
HeaderConverter headerConverter = plugins.newHeaderConverter(connConfig, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
if (keyConverter == null) {
keyConverter = plugins.newConverter(config, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
log.info("Set up the key converter {} for task {} using the worker config", keyConverter.getClass(), id);
} else {
log.info("Set up the key converter {} for task {} using the connector config", keyConverter.getClass(), id);
}
if (valueConverter == null) {
valueConverter = plugins.newConverter(config, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
log.info("Set up the value converter {} for task {} using the worker config", valueConverter.getClass(), id);
} else {
log.info("Set up the value converter {} for task {} using the connector config", valueConverter.getClass(), id);
}
if (headerConverter == null) {
headerConverter = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
log.info("Set up the header converter {} for task {} using the worker config", headerConverter.getClass(), id);
} else {
log.info("Set up the header converter {} for task {} using the connector config", headerConverter.getClass(), id);
}
workerTask = buildWorkerTask(configState, connConfig, id, task, taskStatusListener, initialState, keyConverter, valueConverter, headerConverter, connectorLoader);
workerTask.initialize(taskConfig);
Plugins.compareAndSwapLoaders(savedLoader);
} catch (Throwable t) {
log.error("Failed to start task {}", id, t);
// Can't be put in a finally block because it needs to be swapped before the call on
// statusListener
Plugins.compareAndSwapLoaders(savedLoader);
connectorStatusMetricsGroup.recordTaskRemoved(id);
taskStatusListener.onFailure(id, t);
return false;
}
WorkerTask existing = tasks.putIfAbsent(id, workerTask);
if (existing != null)
throw new ConnectException("Task already exists in this worker: " + id);
executor.submit(workerTask);
if (workerTask instanceof WorkerSourceTask) {
sourceTaskOffsetCommitter.schedule(id, (WorkerSourceTask) workerTask);
}
return true;
}
}
Aggregations