Search in sources :

Example 61 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Worker method connectorClientConfigOverrides.

private static Map<String, Object> connectorClientConfigOverrides(ConnectorTaskId id, ConnectorConfig connConfig, Class<? extends Connector> connectorClass, String clientConfigPrefix, ConnectorType connectorType, ConnectorClientConfigRequest.ClientType clientType, ConnectorClientConfigOverridePolicy connectorClientConfigOverridePolicy) {
    Map<String, Object> clientOverrides = connConfig.originalsWithPrefix(clientConfigPrefix);
    ConnectorClientConfigRequest connectorClientConfigRequest = new ConnectorClientConfigRequest(id.connector(), connectorType, connectorClass, clientOverrides, clientType);
    List<ConfigValue> configValues = connectorClientConfigOverridePolicy.validate(connectorClientConfigRequest);
    List<ConfigValue> errorConfigs = configValues.stream().filter(configValue -> configValue.errorMessages().size() > 0).collect(Collectors.toList());
    // These should be caught when the herder validates the connector configuration, but just in case
    if (errorConfigs.size() > 0) {
        throw new ConnectException("Client Config Overrides not allowed " + errorConfigs);
    }
    return clientOverrides;
}
Also used : LoggingContext(org.apache.kafka.connect.util.LoggingContext) SinkUtils(org.apache.kafka.connect.util.SinkUtils) JsonConverterConfig(org.apache.kafka.connect.json.JsonConverterConfig) Plugins(org.apache.kafka.connect.runtime.isolation.Plugins) LoggerFactory(org.slf4j.LoggerFactory) ConnectorType(org.apache.kafka.connect.health.ConnectorType) OffsetStorageWriter(org.apache.kafka.connect.storage.OffsetStorageWriter) ErrorHandlingMetrics(org.apache.kafka.connect.runtime.errors.ErrorHandlingMetrics) CloseableOffsetStorageReader(org.apache.kafka.connect.storage.CloseableOffsetStorageReader) ErrorReporter(org.apache.kafka.connect.runtime.errors.ErrorReporter) Converter(org.apache.kafka.connect.storage.Converter) Map(java.util.Map) DeadLetterQueueReporter(org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter) OffsetBackingStore(org.apache.kafka.connect.storage.OffsetBackingStore) OffsetStorageReader(org.apache.kafka.connect.storage.OffsetStorageReader) ClassLoaderUsage(org.apache.kafka.connect.runtime.isolation.Plugins.ClassLoaderUsage) ConnectUtils(org.apache.kafka.connect.util.ConnectUtils) TopicCreationGroup(org.apache.kafka.connect.util.TopicCreationGroup) Time(org.apache.kafka.common.utils.Time) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) ConfigValue(org.apache.kafka.common.config.ConfigValue) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) List(java.util.List) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) Task(org.apache.kafka.connect.connector.Task) JsonConverter(org.apache.kafka.connect.json.JsonConverter) ConnectorClientConfigOverridePolicy(org.apache.kafka.connect.connector.policy.ConnectorClientConfigOverridePolicy) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) SourceTask(org.apache.kafka.connect.source.SourceTask) OffsetStorageReaderImpl(org.apache.kafka.connect.storage.OffsetStorageReaderImpl) Connector(org.apache.kafka.connect.connector.Connector) ConfigProvider(org.apache.kafka.common.config.provider.ConfigProvider) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) TopicAdmin(org.apache.kafka.connect.util.TopicAdmin) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) LogReporter(org.apache.kafka.connect.runtime.errors.LogReporter) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) WorkerErrantRecordReporter(org.apache.kafka.connect.runtime.errors.WorkerErrantRecordReporter) MetricGroup(org.apache.kafka.connect.runtime.ConnectMetrics.MetricGroup) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ExecutorService(java.util.concurrent.ExecutorService) SinkTask(org.apache.kafka.connect.sink.SinkTask) Utils(org.apache.kafka.common.utils.Utils) Callback(org.apache.kafka.connect.util.Callback) Logger(org.slf4j.Logger) ConnectorClientConfigRequest(org.apache.kafka.connect.connector.policy.ConnectorClientConfigRequest) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) ClusterConfigState(org.apache.kafka.connect.runtime.distributed.ClusterConfigState) MetricNameTemplate(org.apache.kafka.common.MetricNameTemplate) TimeUnit(java.util.concurrent.TimeUnit) ConnectException(org.apache.kafka.connect.errors.ConnectException) RetryWithToleranceOperator(org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator) Collections(java.util.Collections) ConnectorClientConfigRequest(org.apache.kafka.connect.connector.policy.ConnectorClientConfigRequest) ConfigValue(org.apache.kafka.common.config.ConfigValue) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 62 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class ConnectClusterStateImpl method connectors.

@Override
public Collection<String> connectors() {
    FutureCallback<Collection<String>> connectorsCallback = new FutureCallback<>();
    herder.connectors(connectorsCallback);
    try {
        return connectorsCallback.get(herderRequestTimeoutMs, TimeUnit.MILLISECONDS);
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        throw new ConnectException("Failed to retrieve list of connectors", e);
    }
}
Also used : Collection(java.util.Collection) ExecutionException(java.util.concurrent.ExecutionException) FutureCallback(org.apache.kafka.connect.util.FutureCallback) TimeoutException(java.util.concurrent.TimeoutException) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 63 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Plugins method newInternalConverter.

/**
 * Load an internal converter, used by the worker for (de)serializing data in internal topics.
 *
 * @param isKey           whether the converter is a key converter
 * @param className       the class name of the converter
 * @param converterConfig the properties to configure the converter with
 * @return the instantiated and configured {@link Converter}; never null
 * @throws ConnectException if the {@link Converter} implementation class could not be found
 */
public Converter newInternalConverter(boolean isKey, String className, Map<String, String> converterConfig) {
    Class<? extends Converter> klass;
    try {
        klass = pluginClass(delegatingLoader, className, Converter.class);
    } catch (ClassNotFoundException e) {
        throw new ConnectException("Failed to load internal converter class " + className);
    }
    Converter plugin;
    ClassLoader savedLoader = compareAndSwapLoaders(klass.getClassLoader());
    try {
        plugin = newPlugin(klass);
        plugin.configure(converterConfig, isKey);
    } finally {
        compareAndSwapLoaders(savedLoader);
    }
    return plugin;
}
Also used : HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) Converter(org.apache.kafka.connect.storage.Converter) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 64 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class DistributedHerder method halt.

// public for testing
public void halt() {
    synchronized (this) {
        // Clean up any connectors and tasks that are still running.
        log.info("Stopping connectors and tasks that are still assigned to this worker.");
        List<Callable<Void>> callables = new ArrayList<>();
        for (String connectorName : new ArrayList<>(worker.connectorNames())) {
            callables.add(getConnectorStoppingCallable(connectorName));
        }
        for (ConnectorTaskId taskId : new ArrayList<>(worker.taskIds())) {
            callables.add(getTaskStoppingCallable(taskId));
        }
        startAndStop(callables);
        member.stop();
        // Explicitly fail any outstanding requests so they actually get a response and get an
        // understandable reason for their failure.
        DistributedHerderRequest request = requests.pollFirst();
        while (request != null) {
            request.callback().onCompletion(new ConnectException("Worker is shutting down"), null);
            request = requests.pollFirst();
        }
        stopServices();
    }
}
Also used : ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) ArrayList(java.util.ArrayList) Callable(java.util.concurrent.Callable) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 65 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Plugins method newPlugin.

public <T> T newPlugin(String klassName, AbstractConfig config, Class<T> pluginKlass) {
    T plugin;
    Class<? extends T> klass;
    try {
        klass = pluginClass(delegatingLoader, klassName, pluginKlass);
    } catch (ClassNotFoundException e) {
        String msg = String.format("Failed to find any class that implements %s and which " + "name matches %s", pluginKlass, klassName);
        throw new ConnectException(msg);
    }
    ClassLoader savedLoader = compareAndSwapLoaders(klass.getClassLoader());
    try {
        plugin = newPlugin(klass);
        if (plugin instanceof Versioned) {
            Versioned versionedPlugin = (Versioned) plugin;
            if (Utils.isBlank(versionedPlugin.version())) {
                throw new ConnectException("Version not defined for '" + klassName + "'");
            }
        }
        if (plugin instanceof Configurable) {
            ((Configurable) plugin).configure(config.originals());
        }
    } finally {
        compareAndSwapLoaders(savedLoader);
    }
    return plugin;
}
Also used : Versioned(org.apache.kafka.connect.components.Versioned) Configurable(org.apache.kafka.common.Configurable) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Aggregations

ConnectException (org.apache.kafka.connect.errors.ConnectException)184 HashMap (java.util.HashMap)38 IOException (java.io.IOException)28 Map (java.util.Map)28 ArrayList (java.util.ArrayList)23 Test (org.junit.Test)23 ExecutionException (java.util.concurrent.ExecutionException)22 TimeoutException (java.util.concurrent.TimeoutException)17 SQLException (java.sql.SQLException)16 SourceRecord (org.apache.kafka.connect.source.SourceRecord)14 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)14 Connector (org.apache.kafka.connect.connector.Connector)12 ConfigException (org.apache.kafka.common.config.ConfigException)11 TopicPartition (org.apache.kafka.common.TopicPartition)10 ConnectorTaskId (org.apache.kafka.connect.util.ConnectorTaskId)10 Collection (java.util.Collection)8 HashSet (java.util.HashSet)8 Set (java.util.Set)8 NotFoundException (org.apache.kafka.connect.errors.NotFoundException)8 SinkRecord (org.apache.kafka.connect.sink.SinkRecord)8