Search in sources :

Example 1 with LoggingContext

use of org.apache.kafka.connect.util.LoggingContext in project kafka by apache.

the class Worker method startTask.

/**
 * Start a task managed by this worker.
 *
 * @param id the task ID.
 * @param connProps the connector properties.
 * @param taskProps the tasks properties.
 * @param statusListener a listener for the runtime status transitions of the task.
 * @param initialState the initial state of the connector.
 * @return true if the task started successfully.
 */
public boolean startTask(ConnectorTaskId id, ClusterConfigState configState, Map<String, String> connProps, Map<String, String> taskProps, TaskStatus.Listener statusListener, TargetState initialState) {
    final WorkerTask workerTask;
    final TaskStatus.Listener taskStatusListener = workerMetricsGroup.wrapStatusListener(statusListener);
    try (LoggingContext loggingContext = LoggingContext.forTask(id)) {
        log.info("Creating task {}", id);
        if (tasks.containsKey(id))
            throw new ConnectException("Task already exists in this worker: " + id);
        connectorStatusMetricsGroup.recordTaskAdded(id);
        ClassLoader savedLoader = plugins.currentThreadLoader();
        try {
            String connType = connProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
            ClassLoader connectorLoader = plugins.delegatingLoader().connectorLoader(connType);
            savedLoader = Plugins.compareAndSwapLoaders(connectorLoader);
            final ConnectorConfig connConfig = new ConnectorConfig(plugins, connProps);
            final TaskConfig taskConfig = new TaskConfig(taskProps);
            final Class<? extends Task> taskClass = taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class);
            final Task task = plugins.newTask(taskClass);
            log.info("Instantiated task {} with version {} of type {}", id, task.version(), taskClass.getName());
            // By maintaining connector's specific class loader for this thread here, we first
            // search for converters within the connector dependencies.
            // If any of these aren't found, that means the connector didn't configure specific converters,
            // so we should instantiate based upon the worker configuration
            Converter keyConverter = plugins.newConverter(connConfig, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
            Converter valueConverter = plugins.newConverter(connConfig, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
            HeaderConverter headerConverter = plugins.newHeaderConverter(connConfig, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.CURRENT_CLASSLOADER);
            if (keyConverter == null) {
                keyConverter = plugins.newConverter(config, WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
                log.info("Set up the key converter {} for task {} using the worker config", keyConverter.getClass(), id);
            } else {
                log.info("Set up the key converter {} for task {} using the connector config", keyConverter.getClass(), id);
            }
            if (valueConverter == null) {
                valueConverter = plugins.newConverter(config, WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
                log.info("Set up the value converter {} for task {} using the worker config", valueConverter.getClass(), id);
            } else {
                log.info("Set up the value converter {} for task {} using the connector config", valueConverter.getClass(), id);
            }
            if (headerConverter == null) {
                headerConverter = plugins.newHeaderConverter(config, WorkerConfig.HEADER_CONVERTER_CLASS_CONFIG, ClassLoaderUsage.PLUGINS);
                log.info("Set up the header converter {} for task {} using the worker config", headerConverter.getClass(), id);
            } else {
                log.info("Set up the header converter {} for task {} using the connector config", headerConverter.getClass(), id);
            }
            workerTask = buildWorkerTask(configState, connConfig, id, task, taskStatusListener, initialState, keyConverter, valueConverter, headerConverter, connectorLoader);
            workerTask.initialize(taskConfig);
            Plugins.compareAndSwapLoaders(savedLoader);
        } catch (Throwable t) {
            log.error("Failed to start task {}", id, t);
            // Can't be put in a finally block because it needs to be swapped before the call on
            // statusListener
            Plugins.compareAndSwapLoaders(savedLoader);
            connectorStatusMetricsGroup.recordTaskRemoved(id);
            taskStatusListener.onFailure(id, t);
            return false;
        }
        WorkerTask existing = tasks.putIfAbsent(id, workerTask);
        if (existing != null)
            throw new ConnectException("Task already exists in this worker: " + id);
        executor.submit(workerTask);
        if (workerTask instanceof WorkerSourceTask) {
            sourceTaskOffsetCommitter.schedule(id, (WorkerSourceTask) workerTask);
        }
        return true;
    }
}
Also used : Task(org.apache.kafka.connect.connector.Task) SourceTask(org.apache.kafka.connect.source.SourceTask) SinkTask(org.apache.kafka.connect.sink.SinkTask) LoggingContext(org.apache.kafka.connect.util.LoggingContext) Converter(org.apache.kafka.connect.storage.Converter) JsonConverter(org.apache.kafka.connect.json.JsonConverter) HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 2 with LoggingContext

use of org.apache.kafka.connect.util.LoggingContext in project kafka by apache.

the class Worker method connectorTaskConfigs.

/**
 * Get a list of updated task properties for the tasks of this connector.
 *
 * @param connName the connector name.
 * @return a list of updated tasks properties.
 */
public List<Map<String, String>> connectorTaskConfigs(String connName, ConnectorConfig connConfig) {
    List<Map<String, String>> result = new ArrayList<>();
    try (LoggingContext loggingContext = LoggingContext.forConnector(connName)) {
        log.trace("Reconfiguring connector tasks for {}", connName);
        WorkerConnector workerConnector = connectors.get(connName);
        if (workerConnector == null)
            throw new ConnectException("Connector " + connName + " not found in this worker.");
        int maxTasks = connConfig.getInt(ConnectorConfig.TASKS_MAX_CONFIG);
        Map<String, String> connOriginals = connConfig.originalsStrings();
        Connector connector = workerConnector.connector();
        ClassLoader savedLoader = plugins.currentThreadLoader();
        try {
            savedLoader = Plugins.compareAndSwapLoaders(workerConnector.loader());
            String taskClassName = connector.taskClass().getName();
            for (Map<String, String> taskProps : connector.taskConfigs(maxTasks)) {
                // Ensure we don't modify the connector's copy of the config
                Map<String, String> taskConfig = new HashMap<>(taskProps);
                taskConfig.put(TaskConfig.TASK_CLASS_CONFIG, taskClassName);
                if (connOriginals.containsKey(SinkTask.TOPICS_CONFIG)) {
                    taskConfig.put(SinkTask.TOPICS_CONFIG, connOriginals.get(SinkTask.TOPICS_CONFIG));
                }
                if (connOriginals.containsKey(SinkTask.TOPICS_REGEX_CONFIG)) {
                    taskConfig.put(SinkTask.TOPICS_REGEX_CONFIG, connOriginals.get(SinkTask.TOPICS_REGEX_CONFIG));
                }
                result.add(taskConfig);
            }
        } finally {
            Plugins.compareAndSwapLoaders(savedLoader);
        }
    }
    return result;
}
Also used : Connector(org.apache.kafka.connect.connector.Connector) LoggingContext(org.apache.kafka.connect.util.LoggingContext) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 3 with LoggingContext

use of org.apache.kafka.connect.util.LoggingContext in project kafka by apache.

the class Worker method awaitStopTask.

private void awaitStopTask(ConnectorTaskId taskId, long timeout) {
    try (LoggingContext loggingContext = LoggingContext.forTask(taskId)) {
        WorkerTask task = tasks.remove(taskId);
        if (task == null) {
            log.warn("Ignoring await stop request for non-present task {}", taskId);
            return;
        }
        if (!task.awaitStop(timeout)) {
            log.error("Graceful stop of task {} failed.", task.id());
            task.cancel();
        } else {
            log.debug("Graceful stop of task {} succeeded.", task.id());
        }
        try {
            task.removeMetrics();
        } finally {
            connectorStatusMetricsGroup.recordTaskRemoved(taskId);
        }
    }
}
Also used : LoggingContext(org.apache.kafka.connect.util.LoggingContext)

Example 4 with LoggingContext

use of org.apache.kafka.connect.util.LoggingContext in project kafka by apache.

the class Worker method awaitStopConnector.

private void awaitStopConnector(String connName, long timeout) {
    try (LoggingContext loggingContext = LoggingContext.forConnector(connName)) {
        WorkerConnector connector = connectors.remove(connName);
        if (connector == null) {
            log.warn("Ignoring await stop request for non-present connector {}", connName);
            return;
        }
        if (!connector.awaitShutdown(timeout)) {
            log.error("Connector ‘{}’ failed to properly shut down, has become unresponsive, and " + "may be consuming external resources. Correct the configuration for " + "this connector or remove the connector. After fixing the connector, it " + "may be necessary to restart this worker to release any consumed " + "resources.", connName);
            connector.cancel();
        } else {
            log.debug("Graceful stop of connector {} succeeded.", connName);
        }
    }
}
Also used : LoggingContext(org.apache.kafka.connect.util.LoggingContext)

Example 5 with LoggingContext

use of org.apache.kafka.connect.util.LoggingContext in project kafka by apache.

the class Worker method startConnector.

/**
 * Start a connector managed by this worker.
 *
 * @param connName the connector name.
 * @param connProps the properties of the connector.
 * @param ctx the connector runtime context.
 * @param statusListener a listener for the runtime status transitions of the connector.
 * @param initialState the initial state of the connector.
 * @param onConnectorStateChange invoked when the initial state change of the connector is completed
 */
public void startConnector(String connName, Map<String, String> connProps, CloseableConnectorContext ctx, ConnectorStatus.Listener statusListener, TargetState initialState, Callback<TargetState> onConnectorStateChange) {
    final ConnectorStatus.Listener connectorStatusListener = workerMetricsGroup.wrapStatusListener(statusListener);
    try (LoggingContext loggingContext = LoggingContext.forConnector(connName)) {
        if (connectors.containsKey(connName)) {
            onConnectorStateChange.onCompletion(new ConnectException("Connector with name " + connName + " already exists"), null);
            return;
        }
        final WorkerConnector workerConnector;
        ClassLoader savedLoader = plugins.currentThreadLoader();
        try {
            // By the time we arrive here, CONNECTOR_CLASS_CONFIG has been validated already
            // Getting this value from the unparsed map will allow us to instantiate the
            // right config (source or sink)
            final String connClass = connProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
            ClassLoader connectorLoader = plugins.delegatingLoader().connectorLoader(connClass);
            savedLoader = Plugins.compareAndSwapLoaders(connectorLoader);
            log.info("Creating connector {} of type {}", connName, connClass);
            final Connector connector = plugins.newConnector(connClass);
            final ConnectorConfig connConfig = ConnectUtils.isSinkConnector(connector) ? new SinkConnectorConfig(plugins, connProps) : new SourceConnectorConfig(plugins, connProps, config.topicCreationEnable());
            final OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetBackingStore, connName, internalKeyConverter, internalValueConverter);
            workerConnector = new WorkerConnector(connName, connector, connConfig, ctx, metrics, connectorStatusListener, offsetReader, connectorLoader);
            log.info("Instantiated connector {} with version {} of type {}", connName, connector.version(), connector.getClass());
            workerConnector.transitionTo(initialState, onConnectorStateChange);
            Plugins.compareAndSwapLoaders(savedLoader);
        } catch (Throwable t) {
            log.error("Failed to start connector {}", connName, t);
            // Can't be put in a finally block because it needs to be swapped before the call on
            // statusListener
            Plugins.compareAndSwapLoaders(savedLoader);
            connectorStatusListener.onFailure(connName, t);
            onConnectorStateChange.onCompletion(t, null);
            return;
        }
        WorkerConnector existing = connectors.putIfAbsent(connName, workerConnector);
        if (existing != null) {
            onConnectorStateChange.onCompletion(new ConnectException("Connector with name " + connName + " already exists"), null);
            // shutdown() on it) here because it hasn't actually started running yet
            return;
        }
        executor.submit(workerConnector);
        log.info("Finished creating connector {}", connName);
    }
}
Also used : Connector(org.apache.kafka.connect.connector.Connector) LoggingContext(org.apache.kafka.connect.util.LoggingContext) OffsetStorageReaderImpl(org.apache.kafka.connect.storage.OffsetStorageReaderImpl) CloseableOffsetStorageReader(org.apache.kafka.connect.storage.CloseableOffsetStorageReader) OffsetStorageReader(org.apache.kafka.connect.storage.OffsetStorageReader) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Aggregations

LoggingContext (org.apache.kafka.connect.util.LoggingContext)10 ConnectException (org.apache.kafka.connect.errors.ConnectException)3 Connector (org.apache.kafka.connect.connector.Connector)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 Task (org.apache.kafka.connect.connector.Task)1 JsonConverter (org.apache.kafka.connect.json.JsonConverter)1 SinkTask (org.apache.kafka.connect.sink.SinkTask)1 SourceTask (org.apache.kafka.connect.source.SourceTask)1 CloseableOffsetStorageReader (org.apache.kafka.connect.storage.CloseableOffsetStorageReader)1 Converter (org.apache.kafka.connect.storage.Converter)1 HeaderConverter (org.apache.kafka.connect.storage.HeaderConverter)1 OffsetStorageReader (org.apache.kafka.connect.storage.OffsetStorageReader)1 OffsetStorageReaderImpl (org.apache.kafka.connect.storage.OffsetStorageReaderImpl)1