Search in sources :

Example 36 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class WorkerTest method testReconfigureConnectorTasks.

@Test
public void testReconfigureConnectorTasks() throws Exception {
    expectStartStorage();
    // Create
    Connector connector = PowerMock.createMock(Connector.class);
    ConnectorContext ctx = PowerMock.createMock(ConnectorContext.class);
    EasyMock.expect(connectorFactory.newConnector(WorkerTestConnector.class.getName())).andReturn(connector);
    EasyMock.expect(connector.version()).andReturn("1.0");
    Map<String, String> props = new HashMap<>();
    props.put(SinkConnectorConfig.TOPICS_CONFIG, "foo,bar");
    props.put(ConnectorConfig.TASKS_MAX_CONFIG, "1");
    props.put(ConnectorConfig.NAME_CONFIG, CONNECTOR_ID);
    props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, WorkerTestConnector.class.getName());
    connector.initialize(EasyMock.anyObject(ConnectorContext.class));
    EasyMock.expectLastCall();
    connector.start(props);
    EasyMock.expectLastCall();
    connectorStatusListener.onStartup(CONNECTOR_ID);
    EasyMock.expectLastCall();
    // Reconfigure
    EasyMock.<Class<? extends Task>>expect(connector.taskClass()).andReturn(TestSourceTask.class);
    Map<String, String> taskProps = new HashMap<>();
    taskProps.put("foo", "bar");
    EasyMock.expect(connector.taskConfigs(2)).andReturn(Arrays.asList(taskProps, taskProps));
    // Remove
    connector.stop();
    EasyMock.expectLastCall();
    connectorStatusListener.onShutdown(CONNECTOR_ID);
    EasyMock.expectLastCall();
    expectStopStorage();
    PowerMock.replayAll();
    worker = new Worker(WORKER_ID, new MockTime(), connectorFactory, config, offsetBackingStore);
    worker.start();
    assertEquals(Collections.emptySet(), worker.connectorNames());
    worker.startConnector(CONNECTOR_ID, props, ctx, connectorStatusListener, TargetState.STARTED);
    assertEquals(new HashSet<>(Arrays.asList(CONNECTOR_ID)), worker.connectorNames());
    try {
        worker.startConnector(CONNECTOR_ID, props, ctx, connectorStatusListener, TargetState.STARTED);
        fail("Should have thrown exception when trying to add connector with same name.");
    } catch (ConnectException e) {
    // expected
    }
    List<Map<String, String>> taskConfigs = worker.connectorTaskConfigs(CONNECTOR_ID, 2, Arrays.asList("foo", "bar"));
    Map<String, String> expectedTaskProps = new HashMap<>();
    expectedTaskProps.put("foo", "bar");
    expectedTaskProps.put(TaskConfig.TASK_CLASS_CONFIG, TestSourceTask.class.getName());
    expectedTaskProps.put(SinkTask.TOPICS_CONFIG, "foo,bar");
    assertEquals(2, taskConfigs.size());
    assertEquals(expectedTaskProps, taskConfigs.get(0));
    assertEquals(expectedTaskProps, taskConfigs.get(1));
    worker.stopConnector(CONNECTOR_ID);
    assertEquals(Collections.emptySet(), worker.connectorNames());
    // Nothing should be left, so this should effectively be a nop
    worker.stop();
    PowerMock.verifyAll();
}
Also used : Connector(org.apache.kafka.connect.connector.Connector) HashMap(java.util.HashMap) ConnectorContext(org.apache.kafka.connect.connector.ConnectorContext) HashMap(java.util.HashMap) Map(java.util.Map) MockTime(org.apache.kafka.connect.util.MockTime) ConnectException(org.apache.kafka.connect.errors.ConnectException) ThreadedTest(org.apache.kafka.connect.util.ThreadedTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 37 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class WorkerTest method testStartAndStopConnector.

@Test
public void testStartAndStopConnector() throws Exception {
    expectStartStorage();
    // Create
    Connector connector = PowerMock.createMock(Connector.class);
    ConnectorContext ctx = PowerMock.createMock(ConnectorContext.class);
    EasyMock.expect(connectorFactory.newConnector(WorkerTestConnector.class.getName())).andReturn(connector);
    EasyMock.expect(connector.version()).andReturn("1.0");
    Map<String, String> props = new HashMap<>();
    props.put(SinkConnectorConfig.TOPICS_CONFIG, "foo,bar");
    props.put(ConnectorConfig.TASKS_MAX_CONFIG, "1");
    props.put(ConnectorConfig.NAME_CONFIG, CONNECTOR_ID);
    props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, WorkerTestConnector.class.getName());
    connector.initialize(EasyMock.anyObject(ConnectorContext.class));
    EasyMock.expectLastCall();
    connector.start(props);
    EasyMock.expectLastCall();
    connectorStatusListener.onStartup(CONNECTOR_ID);
    EasyMock.expectLastCall();
    // Remove
    connector.stop();
    EasyMock.expectLastCall();
    connectorStatusListener.onShutdown(CONNECTOR_ID);
    EasyMock.expectLastCall();
    expectStopStorage();
    PowerMock.replayAll();
    worker = new Worker(WORKER_ID, new MockTime(), connectorFactory, config, offsetBackingStore);
    worker.start();
    assertEquals(Collections.emptySet(), worker.connectorNames());
    worker.startConnector(CONNECTOR_ID, props, ctx, connectorStatusListener, TargetState.STARTED);
    assertEquals(new HashSet<>(Arrays.asList(CONNECTOR_ID)), worker.connectorNames());
    try {
        worker.startConnector(CONNECTOR_ID, props, ctx, connectorStatusListener, TargetState.STARTED);
        fail("Should have thrown exception when trying to add connector with same name.");
    } catch (ConnectException e) {
    // expected
    }
    worker.stopConnector(CONNECTOR_ID);
    assertEquals(Collections.emptySet(), worker.connectorNames());
    // Nothing should be left, so this should effectively be a nop
    worker.stop();
    PowerMock.verifyAll();
}
Also used : Connector(org.apache.kafka.connect.connector.Connector) HashMap(java.util.HashMap) ConnectorContext(org.apache.kafka.connect.connector.ConnectorContext) MockTime(org.apache.kafka.connect.util.MockTime) ConnectException(org.apache.kafka.connect.errors.ConnectException) ThreadedTest(org.apache.kafka.connect.util.ThreadedTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 38 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class RestServer method stop.

public void stop() {
    log.info("Stopping REST server");
    try {
        jettyServer.stop();
        jettyServer.join();
    } catch (Exception e) {
        throw new ConnectException("Unable to stop REST server", e);
    } finally {
        jettyServer.destroy();
    }
    log.info("REST server stopped");
}
Also used : ConnectRestException(org.apache.kafka.connect.runtime.rest.errors.ConnectRestException) IOException(java.io.IOException) ConnectException(org.apache.kafka.connect.errors.ConnectException) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 39 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class OffsetStorageReaderImpl method offsets.

@Override
@SuppressWarnings("unchecked")
public <T> Map<Map<String, T>, Map<String, Object>> offsets(Collection<Map<String, T>> partitions) {
    // Serialize keys so backing store can work with them
    Map<ByteBuffer, Map<String, T>> serializedToOriginal = new HashMap<>(partitions.size());
    for (Map<String, T> key : partitions) {
        try {
            // Offsets are treated as schemaless, their format is only validated here (and the returned value below)
            OffsetUtils.validateFormat(key);
            byte[] keySerialized = keyConverter.fromConnectData(namespace, null, Arrays.asList(namespace, key));
            ByteBuffer keyBuffer = (keySerialized != null) ? ByteBuffer.wrap(keySerialized) : null;
            serializedToOriginal.put(keyBuffer, key);
        } catch (Throwable t) {
            log.error("CRITICAL: Failed to serialize partition key when getting offsets for task with " + "namespace {}. No value for this data will be returned, which may break the " + "task or cause it to skip some data.", namespace, t);
        }
    }
    // Get serialized key -> serialized value from backing store
    Map<ByteBuffer, ByteBuffer> raw;
    try {
        raw = backingStore.get(serializedToOriginal.keySet(), null).get();
    } catch (Exception e) {
        log.error("Failed to fetch offsets from namespace {}: ", namespace, e);
        throw new ConnectException("Failed to fetch offsets.", e);
    }
    // Deserialize all the values and map back to the original keys
    Map<Map<String, T>, Map<String, Object>> result = new HashMap<>(partitions.size());
    for (Map.Entry<ByteBuffer, ByteBuffer> rawEntry : raw.entrySet()) {
        try {
            // Since null could be a valid key, explicitly check whether map contains the key
            if (!serializedToOriginal.containsKey(rawEntry.getKey())) {
                log.error("Should be able to map {} back to a requested partition-offset key, backing " + "store may have returned invalid data", rawEntry.getKey());
                continue;
            }
            Map<String, T> origKey = serializedToOriginal.get(rawEntry.getKey());
            SchemaAndValue deserializedSchemaAndValue = valueConverter.toConnectData(namespace, rawEntry.getValue() != null ? rawEntry.getValue().array() : null);
            Object deserializedValue = deserializedSchemaAndValue.value();
            OffsetUtils.validateFormat(deserializedValue);
            result.put(origKey, (Map<String, Object>) deserializedValue);
        } catch (Throwable t) {
            log.error("CRITICAL: Failed to deserialize offset data when getting offsets for task with" + " namespace {}. No value for this data will be returned, which may break the " + "task or cause it to skip some data. This could either be due to an error in " + "the connector implementation or incompatible schema.", namespace, t);
        }
    }
    return result;
}
Also used : HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) ConnectException(org.apache.kafka.connect.errors.ConnectException) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) Map(java.util.Map) HashMap(java.util.HashMap) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 40 with ConnectException

use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.

the class ConnectorsResourceTest method testListConnectorsNotSynced.

@Test(expected = ConnectException.class)
public void testListConnectorsNotSynced() throws Throwable {
    final Capture<Callback<Collection<String>>> cb = Capture.newInstance();
    herder.connectors(EasyMock.capture(cb));
    expectAndCallbackException(cb, new ConnectException("not synced"));
    PowerMock.replayAll();
    // throws
    connectorsResource.listConnectors(FORWARD);
}
Also used : Callback(org.apache.kafka.connect.util.Callback) ConnectException(org.apache.kafka.connect.errors.ConnectException) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

ConnectException (org.apache.kafka.connect.errors.ConnectException)42 HashMap (java.util.HashMap)7 Map (java.util.Map)7 ArrayList (java.util.ArrayList)6 TimeoutException (java.util.concurrent.TimeoutException)6 IOException (java.io.IOException)5 Connector (org.apache.kafka.connect.connector.Connector)5 ExecutionException (java.util.concurrent.ExecutionException)4 NotFoundException (org.apache.kafka.connect.errors.NotFoundException)4 ConnectorTaskId (org.apache.kafka.connect.util.ConnectorTaskId)4 Test (org.junit.Test)4 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)4 ByteBuffer (java.nio.ByteBuffer)3 AlreadyExistsException (org.apache.kafka.connect.errors.AlreadyExistsException)3 BadRequestException (org.apache.kafka.connect.runtime.rest.errors.BadRequestException)3 SinkRecord (org.apache.kafka.connect.sink.SinkRecord)3 SourceRecord (org.apache.kafka.connect.source.SourceRecord)3 ThreadedTest (org.apache.kafka.connect.util.ThreadedTest)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2