Search in sources :

Example 61 with ExecutionException

use of java.util.concurrent.ExecutionException in project kafka by apache.

the class KafkaConfigBackingStore method putTaskConfigs.

/**
     * Write these task configurations and associated commit messages, unless an inconsistency is found that indicates
     * that we would be leaving one of the referenced connectors with an inconsistent state.
     *
     * @param connector the connector to write task configuration
     * @param configs list of task configurations for the connector
     * @throws ConnectException if the task configurations do not resolve inconsistencies found in the existing root
     *                          and task configurations.
     */
@Override
public void putTaskConfigs(String connector, List<Map<String, String>> configs) {
    // any outstanding lagging data to consume.
    try {
        configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        log.error("Failed to write root configuration to Kafka: ", e);
        throw new ConnectException("Error writing root configuration to Kafka", e);
    }
    int taskCount = configs.size();
    // Start sending all the individual updates
    int index = 0;
    for (Map<String, String> taskConfig : configs) {
        Struct connectConfig = new Struct(TASK_CONFIGURATION_V0);
        connectConfig.put("properties", taskConfig);
        byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig);
        log.debug("Writing configuration for task " + index + " configuration: " + taskConfig);
        ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index);
        configLog.send(TASK_KEY(connectorTaskId), serializedConfig);
        index++;
    }
    // the end of the log
    try {
        // Read to end to ensure all the task configs have been written
        if (taskCount > 0) {
            configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
        }
        // Write the commit message
        Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0);
        connectConfig.put("tasks", taskCount);
        byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig);
        log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks.");
        configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig);
        // Read to end to ensure all the commit messages have been written
        configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        log.error("Failed to write root configuration to Kafka: ", e);
        throw new ConnectException("Error writing root configuration to Kafka", e);
    }
}
Also used : ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) ConnectException(org.apache.kafka.connect.errors.ConnectException) Struct(org.apache.kafka.connect.data.Struct)

Example 62 with ExecutionException

use of java.util.concurrent.ExecutionException in project kafka by apache.

the class KafkaConfigBackingStore method updateConnectorConfig.

private void updateConnectorConfig(String connector, byte[] serializedConfig) {
    try {
        configLog.send(CONNECTOR_KEY(connector), serializedConfig);
        configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS);
    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        log.error("Failed to write connector configuration to Kafka: ", e);
        throw new ConnectException("Error writing connector configuration to Kafka", e);
    }
}
Also used : ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 63 with ExecutionException

use of java.util.concurrent.ExecutionException in project kafka by apache.

the class MockProducerTest method testManualCompletion.

@Test
public void testManualCompletion() throws Exception {
    MockProducer<byte[], byte[]> producer = new MockProducer<>(false, new MockSerializer(), new MockSerializer());
    ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(topic, "key1".getBytes(), "value1".getBytes());
    ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(topic, "key2".getBytes(), "value2".getBytes());
    Future<RecordMetadata> md1 = producer.send(record1);
    assertFalse("Send shouldn't have completed", md1.isDone());
    Future<RecordMetadata> md2 = producer.send(record2);
    assertFalse("Send shouldn't have completed", md2.isDone());
    assertTrue("Complete the first request", producer.completeNext());
    assertFalse("Requst should be successful", isError(md1));
    assertFalse("Second request still incomplete", md2.isDone());
    IllegalArgumentException e = new IllegalArgumentException("blah");
    assertTrue("Complete the second request with an error", producer.errorNext(e));
    try {
        md2.get();
        fail("Expected error to be thrown");
    } catch (ExecutionException err) {
        assertEquals(e, err.getCause());
    }
    assertFalse("No more requests to complete", producer.completeNext());
    Future<RecordMetadata> md3 = producer.send(record1);
    Future<RecordMetadata> md4 = producer.send(record2);
    assertTrue("Requests should not be completed.", !md3.isDone() && !md4.isDone());
    producer.flush();
    assertTrue("Requests should be completed.", md3.isDone() && md4.isDone());
}
Also used : MockSerializer(org.apache.kafka.test.MockSerializer) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 64 with ExecutionException

use of java.util.concurrent.ExecutionException in project kafka by apache.

the class DistributedHerderTest method testRestartUnknownTask.

@Test
public void testRestartUnknownTask() throws Exception {
    // get the initial assignment
    EasyMock.expect(member.memberId()).andStubReturn("member");
    expectRebalance(1, Collections.<String>emptyList(), Collections.<ConnectorTaskId>emptyList());
    expectPostRebalanceCatchup(SNAPSHOT);
    member.poll(EasyMock.anyInt());
    PowerMock.expectLastCall();
    member.wakeup();
    PowerMock.expectLastCall();
    member.ensureActive();
    PowerMock.expectLastCall();
    member.poll(EasyMock.anyInt());
    PowerMock.expectLastCall();
    PowerMock.replayAll();
    FutureCallback<Void> callback = new FutureCallback<>();
    herder.tick();
    herder.restartTask(new ConnectorTaskId("blah", 0), callback);
    herder.tick();
    try {
        callback.get(1000L, TimeUnit.MILLISECONDS);
        fail("Expected NotLeaderException to be raised");
    } catch (ExecutionException e) {
        assertTrue(e.getCause() instanceof NotFoundException);
    }
    PowerMock.verifyAll();
}
Also used : ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) NotFoundException(org.apache.kafka.connect.errors.NotFoundException) ExecutionException(java.util.concurrent.ExecutionException) FutureCallback(org.apache.kafka.connect.util.FutureCallback) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 65 with ExecutionException

use of java.util.concurrent.ExecutionException in project kafka by apache.

the class StandaloneHerderTest method testRestartTaskFailureOnStart.

@Test
public void testRestartTaskFailureOnStart() throws Exception {
    ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0);
    expectAdd(SourceSink.SOURCE);
    Map<String, String> connectorConfig = connectorConfig(SourceSink.SOURCE);
    expectConfigValidation(connectorConfig);
    worker.stopAndAwaitTask(taskId);
    EasyMock.expectLastCall();
    worker.startTask(taskId, connectorConfig, taskConfig(SourceSink.SOURCE), herder, TargetState.STARTED);
    EasyMock.expectLastCall().andReturn(false);
    PowerMock.replayAll();
    herder.putConnectorConfig(CONNECTOR_NAME, connectorConfig, false, createCallback);
    FutureCallback<Void> cb = new FutureCallback<>();
    herder.restartTask(taskId, cb);
    try {
        cb.get(1000L, TimeUnit.MILLISECONDS);
        fail("Expected restart callback to raise an exception");
    } catch (ExecutionException exception) {
        assertEquals(ConnectException.class, exception.getCause().getClass());
    }
    PowerMock.verifyAll();
}
Also used : ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) ExecutionException(java.util.concurrent.ExecutionException) FutureCallback(org.apache.kafka.connect.util.FutureCallback) ConnectException(org.apache.kafka.connect.errors.ConnectException) Test(org.junit.Test)

Aggregations

ExecutionException (java.util.concurrent.ExecutionException)1341 IOException (java.io.IOException)367 Test (org.junit.Test)335 TimeoutException (java.util.concurrent.TimeoutException)258 ArrayList (java.util.ArrayList)237 Future (java.util.concurrent.Future)218 ExecutorService (java.util.concurrent.ExecutorService)152 CountDownLatch (java.util.concurrent.CountDownLatch)103 List (java.util.List)98 CancellationException (java.util.concurrent.CancellationException)98 Callable (java.util.concurrent.Callable)97 Test (org.testng.annotations.Test)78 HashMap (java.util.HashMap)69 Map (java.util.Map)65 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)64 RejectedExecutionException (java.util.concurrent.RejectedExecutionException)63 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)56 ParallelTest (com.hazelcast.test.annotation.ParallelTest)47 QuickTest (com.hazelcast.test.annotation.QuickTest)47 UncheckedExecutionException (com.google.common.util.concurrent.UncheckedExecutionException)46