Search in sources :

Example 26 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class FileOffsetBackingStore method save.

protected void save() {
    try {
        ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(file));
        Map<byte[], byte[]> raw = new HashMap<>();
        for (Map.Entry<ByteBuffer, ByteBuffer> mapEntry : data.entrySet()) {
            byte[] key = (mapEntry.getKey() != null) ? mapEntry.getKey().array() : null;
            byte[] value = (mapEntry.getValue() != null) ? mapEntry.getValue().array() : null;
            raw.put(key, value);
        }
        os.writeObject(raw);
        os.close();
    } catch (IOException e) {
        throw new ConnectException(e);
    }
}
Also used : HashMap(java.util.HashMap) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectOutputStream(java.io.ObjectOutputStream) HashMap(java.util.HashMap) Map(java.util.Map) ByteBuffer(java.nio.ByteBuffer) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 27 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class FileOffsetBackingStore method load.

@SuppressWarnings("unchecked")
private void load() {
    try {
        ObjectInputStream is = new ObjectInputStream(new FileInputStream(file));
        Object obj = is.readObject();
        if (!(obj instanceof HashMap))
            throw new ConnectException("Expected HashMap but found " + obj.getClass());
        Map<byte[], byte[]> raw = (Map<byte[], byte[]>) obj;
        data = new HashMap<>();
        for (Map.Entry<byte[], byte[]> mapEntry : raw.entrySet()) {
            ByteBuffer key = (mapEntry.getKey() != null) ? ByteBuffer.wrap(mapEntry.getKey()) : null;
            ByteBuffer value = (mapEntry.getValue() != null) ? ByteBuffer.wrap(mapEntry.getValue()) : null;
            data.put(key, value);
        }
        is.close();
    } catch (FileNotFoundException | EOFException e) {
    // FileNotFoundException: Ignore, may be new.
    // EOFException: Ignore, this means the file was missing or corrupt
    } catch (IOException | ClassNotFoundException e) {
        throw new ConnectException(e);
    }
}
Also used : HashMap(java.util.HashMap) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) FileInputStream(java.io.FileInputStream) EOFException(java.io.EOFException) HashMap(java.util.HashMap) Map(java.util.Map) ObjectInputStream(java.io.ObjectInputStream) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 28 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Worker method buildWorkerTask.

private WorkerTask buildWorkerTask(ConnectorConfig connConfig, ConnectorTaskId id, Task task, TaskStatus.Listener statusListener, TargetState initialState, Converter keyConverter, Converter valueConverter) {
    // Decide which type of worker task we need based on the type of task.
    if (task instanceof SourceTask) {
        TransformationChain<SourceRecord> transformationChain = new TransformationChain<>(connConfig.<SourceRecord>transformations());
        OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter);
        OffsetStorageWriter offsetWriter = new OffsetStorageWriter(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter);
        KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(producerProps);
        return new WorkerSourceTask(id, (SourceTask) task, statusListener, initialState, keyConverter, valueConverter, transformationChain, producer, offsetReader, offsetWriter, config, time);
    } else if (task instanceof SinkTask) {
        TransformationChain<SinkRecord> transformationChain = new TransformationChain<>(connConfig.<SinkRecord>transformations());
        return new WorkerSinkTask(id, (SinkTask) task, statusListener, initialState, config, keyConverter, valueConverter, transformationChain, time);
    } else {
        log.error("Tasks must be a subclass of either SourceTask or SinkTask", task);
        throw new ConnectException("Tasks must be a subclass of either SourceTask or SinkTask");
    }
}
Also used : OffsetStorageWriter(org.apache.kafka.connect.storage.OffsetStorageWriter) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) SinkTask(org.apache.kafka.connect.sink.SinkTask) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) SourceRecord(org.apache.kafka.connect.source.SourceRecord) OffsetStorageReaderImpl(org.apache.kafka.connect.storage.OffsetStorageReaderImpl) SourceTask(org.apache.kafka.connect.source.SourceTask) OffsetStorageReader(org.apache.kafka.connect.storage.OffsetStorageReader) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 29 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Worker method startTask.

/**
     * Start a task managed by this worker.
     *
     * @param id the task ID.
     * @param connProps the connector properties.
     * @param taskProps the tasks properties.
     * @param statusListener a listener for the runtime status transitions of the task.
     * @param initialState the initial state of the connector.
     * @return true if the task started successfully.
     */
public boolean startTask(ConnectorTaskId id, Map<String, String> connProps, Map<String, String> taskProps, TaskStatus.Listener statusListener, TargetState initialState) {
    log.info("Creating task {}", id);
    if (tasks.containsKey(id))
        throw new ConnectException("Task already exists in this worker: " + id);
    final WorkerTask workerTask;
    try {
        final ConnectorConfig connConfig = new ConnectorConfig(connProps);
        final TaskConfig taskConfig = new TaskConfig(taskProps);
        final Class<? extends Task> taskClass = taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class);
        final Task task = connectorFactory.newTask(taskClass);
        log.info("Instantiated task {} with version {} of type {}", id, task.version(), taskClass.getName());
        Converter keyConverter = connConfig.getConfiguredInstance(WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, Converter.class);
        if (keyConverter != null)
            keyConverter.configure(connConfig.originalsWithPrefix("key.converter."), true);
        else
            keyConverter = defaultKeyConverter;
        Converter valueConverter = connConfig.getConfiguredInstance(WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, Converter.class);
        if (valueConverter != null)
            valueConverter.configure(connConfig.originalsWithPrefix("value.converter."), false);
        else
            valueConverter = defaultValueConverter;
        workerTask = buildWorkerTask(connConfig, id, task, statusListener, initialState, keyConverter, valueConverter);
        workerTask.initialize(taskConfig);
    } catch (Throwable t) {
        log.error("Failed to start task {}", id, t);
        statusListener.onFailure(id, t);
        return false;
    }
    WorkerTask existing = tasks.putIfAbsent(id, workerTask);
    if (existing != null)
        throw new ConnectException("Task already exists in this worker: " + id);
    executor.submit(workerTask);
    if (workerTask instanceof WorkerSourceTask) {
        sourceTaskOffsetCommitter.schedule(id, (WorkerSourceTask) workerTask);
    }
    return true;
}
Also used : SinkTask(org.apache.kafka.connect.sink.SinkTask) Task(org.apache.kafka.connect.connector.Task) SourceTask(org.apache.kafka.connect.source.SourceTask) Converter(org.apache.kafka.connect.storage.Converter) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 30 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class Worker method startConnector.

/**
     * Start a connector managed by this worker.
     *
     * @param connName the connector name.
     * @param connProps the properties of the connector.
     * @param ctx the connector runtime context.
     * @param statusListener a listener for the runtime status transitions of the connector.
     * @param initialState the initial state of the connector.
     * @return true if the connector started successfully.
     */
public boolean startConnector(String connName, Map<String, String> connProps, ConnectorContext ctx, ConnectorStatus.Listener statusListener, TargetState initialState) {
    if (connectors.containsKey(connName))
        throw new ConnectException("Connector with name " + connName + " already exists");
    final WorkerConnector workerConnector;
    try {
        final ConnectorConfig connConfig = new ConnectorConfig(connProps);
        final String connClass = connConfig.getString(ConnectorConfig.CONNECTOR_CLASS_CONFIG);
        log.info("Creating connector {} of type {}", connName, connClass);
        final Connector connector = connectorFactory.newConnector(connClass);
        workerConnector = new WorkerConnector(connName, connector, ctx, statusListener);
        log.info("Instantiated connector {} with version {} of type {}", connName, connector.version(), connector.getClass());
        workerConnector.initialize(connConfig);
        workerConnector.transitionTo(initialState);
    } catch (Throwable t) {
        log.error("Failed to start connector {}", connName, t);
        statusListener.onFailure(connName, t);
        return false;
    }
    WorkerConnector existing = connectors.putIfAbsent(connName, workerConnector);
    if (existing != null)
        throw new ConnectException("Connector with name " + connName + " already exists");
    log.info("Finished creating connector {}", connName);
    return true;
}
Also used : Connector(org.apache.kafka.connect.connector.Connector) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Aggregations

ConnectException (org.apache.kafka.connect.errors.ConnectException)42 HashMap (java.util.HashMap)7 Map (java.util.Map)7 ArrayList (java.util.ArrayList)6 TimeoutException (java.util.concurrent.TimeoutException)6 IOException (java.io.IOException)5 Connector (org.apache.kafka.connect.connector.Connector)5 ExecutionException (java.util.concurrent.ExecutionException)4 NotFoundException (org.apache.kafka.connect.errors.NotFoundException)4 ConnectorTaskId (org.apache.kafka.connect.util.ConnectorTaskId)4 Test (org.junit.Test)4 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)4 ByteBuffer (java.nio.ByteBuffer)3 AlreadyExistsException (org.apache.kafka.connect.errors.AlreadyExistsException)3 BadRequestException (org.apache.kafka.connect.runtime.rest.errors.BadRequestException)3 SinkRecord (org.apache.kafka.connect.sink.SinkRecord)3 SourceRecord (org.apache.kafka.connect.source.SourceRecord)3 ThreadedTest (org.apache.kafka.connect.util.ThreadedTest)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2