Search in sources :

Example 46 with StreamsException

use of org.apache.kafka.streams.errors.StreamsException in project kafka by apache.

the class StreamThreadTest method runAndVerifyFailedStreamThreadRecording.

public void runAndVerifyFailedStreamThreadRecording(final boolean shouldFail) {
    final Consumer<byte[], byte[]> consumer = EasyMock.createNiceMock(Consumer.class);
    final ConsumerGroupMetadata consumerGroupMetadata = mock(ConsumerGroupMetadata.class);
    expect(consumer.groupMetadata()).andStubReturn(consumerGroupMetadata);
    expect(consumerGroupMetadata.groupInstanceId()).andReturn(Optional.empty());
    EasyMock.replay(consumer, consumerGroupMetadata);
    final TaskManager taskManager = EasyMock.createNiceMock(TaskManager.class);
    expect(taskManager.producerClientIds()).andStubReturn(Collections.emptySet());
    final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, StreamsConfig.METRICS_LATEST, mockTime);
    final TopologyMetadata topologyMetadata = new TopologyMetadata(internalTopologyBuilder, config);
    topologyMetadata.buildAndRewriteTopology();
    final StreamThread thread = new StreamThread(mockTime, config, null, consumer, consumer, null, null, taskManager, streamsMetrics, topologyMetadata, CLIENT_ID, new LogContext(""), new AtomicInteger(), new AtomicLong(Long.MAX_VALUE), new LinkedList<>(), null, (e, b) -> {
    }, null) {

        @Override
        void runOnce() {
            setState(StreamThread.State.PENDING_SHUTDOWN);
            if (shouldFail) {
                throw new StreamsException(Thread.currentThread().getName());
            }
        }
    };
    EasyMock.replay(taskManager);
    thread.updateThreadMetadata("metadata");
    thread.run();
    final Metric failedThreads = StreamsTestUtils.getMetricByName(metrics.metrics(), "failed-stream-threads", "stream-metrics");
    assertThat(failedThreads.metricValue(), is(shouldFail ? 1.0 : 0.0));
}
Also used : ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) AtomicLong(java.util.concurrent.atomic.AtomicLong) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StreamsException(org.apache.kafka.streams.errors.StreamsException) LogContext(org.apache.kafka.common.utils.LogContext) Metric(org.apache.kafka.common.Metric) KafkaMetric(org.apache.kafka.common.metrics.KafkaMetric) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl)

Example 47 with StreamsException

use of org.apache.kafka.streams.errors.StreamsException in project kafka by apache.

the class StoreChangelogReaderTest method shouldThrowIfEndOffsetsFail.

@Test
public void shouldThrowIfEndOffsetsFail() {
    EasyMock.expect(storeMetadata.offset()).andReturn(10L).anyTimes();
    EasyMock.replay(activeStateManager, storeMetadata, store);
    final MockAdminClient adminClient = new MockAdminClient() {

        @Override
        public ListOffsetsResult listOffsets(final Map<TopicPartition, OffsetSpec> topicPartitionOffsets, final ListOffsetsOptions options) {
            throw kaboom;
        }
    };
    adminClient.updateEndOffsets(Collections.singletonMap(tp, 0L));
    final StoreChangelogReader changelogReader = new StoreChangelogReader(time, config, logContext, adminClient, consumer, callback);
    changelogReader.register(tp, activeStateManager);
    final StreamsException thrown = assertThrows(StreamsException.class, () -> changelogReader.restore(Collections.emptyMap()));
    assertEquals(kaboom, thrown.getCause());
}
Also used : ListOffsetsOptions(org.apache.kafka.clients.admin.ListOffsetsOptions) StreamsException(org.apache.kafka.streams.errors.StreamsException) MockAdminClient(org.apache.kafka.clients.admin.MockAdminClient) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) Map(java.util.Map) Collections.singletonMap(java.util.Collections.singletonMap) Test(org.junit.Test)

Example 48 with StreamsException

use of org.apache.kafka.streams.errors.StreamsException in project kafka by apache.

the class StoreChangelogReaderTest method shouldThrowIfPositionFail.

@Test
public void shouldThrowIfPositionFail() {
    final TaskId taskId = new TaskId(0, 0);
    EasyMock.expect(activeStateManager.taskId()).andReturn(taskId);
    EasyMock.expect(storeMetadata.offset()).andReturn(10L).anyTimes();
    EasyMock.replay(activeStateManager, storeMetadata, store);
    final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.EARLIEST) {

        @Override
        public long position(final TopicPartition partition) {
            throw kaboom;
        }
    };
    adminClient.updateEndOffsets(Collections.singletonMap(tp, 10L));
    final StoreChangelogReader changelogReader = new StoreChangelogReader(time, config, logContext, adminClient, consumer, callback);
    changelogReader.register(tp, activeStateManager);
    final StreamsException thrown = assertThrows(StreamsException.class, () -> changelogReader.restore(Collections.singletonMap(taskId, mock(Task.class))));
    assertEquals(kaboom, thrown.getCause());
}
Also used : TaskId(org.apache.kafka.streams.processor.TaskId) TopicPartition(org.apache.kafka.common.TopicPartition) StreamsException(org.apache.kafka.streams.errors.StreamsException) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Test(org.junit.Test)

Example 49 with StreamsException

use of org.apache.kafka.streams.errors.StreamsException in project kafka by apache.

the class StoreChangelogReaderTest method shouldThrowIfUnsubscribeFail.

@Test
public void shouldThrowIfUnsubscribeFail() {
    EasyMock.replay(stateManager, storeMetadata, store);
    final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.EARLIEST) {

        @Override
        public void unsubscribe() {
            throw kaboom;
        }
    };
    final StoreChangelogReader changelogReader = new StoreChangelogReader(time, config, logContext, adminClient, consumer, callback);
    final StreamsException thrown = assertThrows(StreamsException.class, changelogReader::clear);
    assertEquals(kaboom, thrown.getCause());
}
Also used : StreamsException(org.apache.kafka.streams.errors.StreamsException) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Test(org.junit.Test)

Example 50 with StreamsException

use of org.apache.kafka.streams.errors.StreamsException in project kafka by apache.

the class StoreChangelogReaderTest method shouldThrowIfCommittedOffsetsFail.

@Test
public void shouldThrowIfCommittedOffsetsFail() {
    final TaskId taskId = new TaskId(0, 0);
    EasyMock.expect(stateManager.taskId()).andReturn(taskId);
    EasyMock.expect(stateManager.changelogAsSource(tp)).andReturn(true).anyTimes();
    EasyMock.expect(storeMetadata.offset()).andReturn(10L).anyTimes();
    EasyMock.replay(stateManager, storeMetadata, store);
    final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.EARLIEST) {

        @Override
        public Map<TopicPartition, OffsetAndMetadata> committed(final Set<TopicPartition> partitions) {
            throw kaboom;
        }
    };
    adminClient.updateEndOffsets(Collections.singletonMap(tp, 10L));
    final StoreChangelogReader changelogReader = new StoreChangelogReader(time, config, logContext, adminClient, consumer, callback);
    changelogReader.setMainConsumer(consumer);
    changelogReader.register(tp, stateManager);
    final StreamsException thrown = assertThrows(StreamsException.class, () -> changelogReader.restore(Collections.singletonMap(taskId, mock(Task.class))));
    assertEquals(kaboom, thrown.getCause());
}
Also used : TaskId(org.apache.kafka.streams.processor.TaskId) Utils.mkSet(org.apache.kafka.common.utils.Utils.mkSet) Set(java.util.Set) TopicPartition(org.apache.kafka.common.TopicPartition) StreamsException(org.apache.kafka.streams.errors.StreamsException) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Test(org.junit.Test)

Aggregations

StreamsException (org.apache.kafka.streams.errors.StreamsException)186 Test (org.junit.Test)90 KafkaException (org.apache.kafka.common.KafkaException)41 TopicPartition (org.apache.kafka.common.TopicPartition)38 TimeoutException (org.apache.kafka.common.errors.TimeoutException)36 HashMap (java.util.HashMap)27 Map (java.util.Map)25 HashSet (java.util.HashSet)18 Properties (java.util.Properties)17 TaskId (org.apache.kafka.streams.processor.TaskId)14 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)13 StreamsConfig (org.apache.kafka.streams.StreamsConfig)12 ArrayList (java.util.ArrayList)11 ExecutionException (java.util.concurrent.ExecutionException)11 TaskMigratedException (org.apache.kafka.streams.errors.TaskMigratedException)11 IOException (java.io.IOException)10 Set (java.util.Set)10 LogContext (org.apache.kafka.common.utils.LogContext)10 MockTime (org.apache.kafka.common.utils.MockTime)10 StateStore (org.apache.kafka.streams.processor.StateStore)10