Search in sources :

Example 56 with Arrays.asList

use of java.util.Arrays.asList in project kafka by apache.

the class RequestResponseTest method testSerialization.

@Test
public void testSerialization() {
    Map<ApiKeys, List<Short>> toSkip = new HashMap<>();
    // It's not possible to create a MetadataRequest v0 via the builder
    toSkip.put(METADATA, singletonList((short) 0));
    // DescribeLogDirsResponse v0, v1 and v2 don't have a top level error field
    toSkip.put(DESCRIBE_LOG_DIRS, Arrays.asList((short) 0, (short) 1, (short) 2));
    // ElectLeaders v0 does not have a top level error field, when accessing it, it defaults to NONE
    toSkip.put(ELECT_LEADERS, singletonList((short) 0));
    for (ApiKeys apikey : ApiKeys.values()) {
        for (short version : apikey.allVersions()) {
            if (toSkip.containsKey(apikey) && toSkip.get(apikey).contains(version))
                continue;
            AbstractRequest request = getRequest(apikey, version);
            checkRequest(request);
            checkErrorResponse(request, unknownServerException);
            checkResponse(getResponse(apikey, version), version);
        }
    }
}
Also used : ApiKeys(org.apache.kafka.common.protocol.ApiKeys) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) LinkedList(java.util.LinkedList) ArrayList(java.util.ArrayList) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) Test(org.junit.jupiter.api.Test)

Example 57 with Arrays.asList

use of java.util.Arrays.asList in project kafka by apache.

the class KStreamImplTest method testNumProcesses.

// specifically testing the deprecated variant
@SuppressWarnings({ "unchecked", "deprecation" })
@Test
public void testNumProcesses() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> source1 = builder.stream(Arrays.asList("topic-1", "topic-2"), stringConsumed);
    final KStream<String, String> source2 = builder.stream(Arrays.asList("topic-3", "topic-4"), stringConsumed);
    final KStream<String, String> stream1 = source1.filter((key, value) -> true).filterNot((key, value) -> false);
    final KStream<String, Integer> stream2 = stream1.mapValues((ValueMapper<String, Integer>) Integer::valueOf);
    final KStream<String, Integer> stream3 = source2.flatMapValues((ValueMapper<String, Iterable<Integer>>) value -> Collections.singletonList(Integer.valueOf(value)));
    final KStream<String, Integer>[] streams2 = stream2.branch((key, value) -> (value % 2) == 0, (key, value) -> true);
    final KStream<String, Integer>[] streams3 = stream3.branch((key, value) -> (value % 2) == 0, (key, value) -> true);
    final int anyWindowSize = 1;
    final StreamJoined<String, Integer, Integer> joined = StreamJoined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer());
    final KStream<String, Integer> stream4 = streams2[0].join(streams3[0], Integer::sum, JoinWindows.of(ofMillis(anyWindowSize)), joined);
    streams2[1].join(streams3[1], Integer::sum, JoinWindows.of(ofMillis(anyWindowSize)), joined);
    stream4.to("topic-5");
    streams2[1].through("topic-6").process(new MockProcessorSupplier<>());
    streams2[1].repartition().process(new MockProcessorSupplier<>());
    assertEquals(// sources
    2 + // stream1
    2 + // stream2
    1 + // stream3
    1 + 1 + // streams2
    2 + 1 + // streams3
    2 + // stream2-stream3 joins
    5 * 2 + // to
    1 + // through
    2 + // process
    1 + // repartition
    3 + // process
    1, TopologyWrapper.getInternalTopologyBuilder(builder.build()).setApplicationId("X").buildTopology().processors().size());
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KStream(org.apache.kafka.streams.kstream.KStream) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) Test(org.junit.Test)

Example 58 with Arrays.asList

use of java.util.Arrays.asList in project kafka by apache.

the class FetcherTest method testMultipleAbortMarkers.

@Test
public void testMultipleAbortMarkers() {
    buildFetcher(OffsetResetStrategy.EARLIEST, new ByteArrayDeserializer(), new ByteArrayDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    int currentOffset = 0;
    currentOffset += appendTransactionalRecords(buffer, 1L, currentOffset, new SimpleRecord(time.milliseconds(), "abort1-1".getBytes(), "value".getBytes()), new SimpleRecord(time.milliseconds(), "abort1-2".getBytes(), "value".getBytes()));
    currentOffset += abortTransaction(buffer, 1L, currentOffset);
    // Duplicate abort -- should be ignored.
    currentOffset += abortTransaction(buffer, 1L, currentOffset);
    // Now commit a transaction.
    currentOffset += appendTransactionalRecords(buffer, 1L, currentOffset, new SimpleRecord(time.milliseconds(), "commit1-1".getBytes(), "value".getBytes()), new SimpleRecord(time.milliseconds(), "commit1-2".getBytes(), "value".getBytes()));
    commitTransaction(buffer, 1L, currentOffset);
    buffer.flip();
    List<FetchResponseData.AbortedTransaction> abortedTransactions = Collections.singletonList(new FetchResponseData.AbortedTransaction().setProducerId(1).setFirstOffset(0));
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    assertFalse(fetcher.hasCompletedFetches());
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(records, abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(time.timer(0));
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetchedRecords();
    assertTrue(fetchedRecords.containsKey(tp0));
    assertEquals(fetchedRecords.get(tp0).size(), 2);
    List<ConsumerRecord<byte[], byte[]>> fetchedConsumerRecords = fetchedRecords.get(tp0);
    Set<String> committedKeys = new HashSet<>(Arrays.asList("commit1-1", "commit1-2"));
    Set<String> actuallyCommittedKeys = new HashSet<>();
    for (ConsumerRecord<byte[], byte[]> consumerRecord : fetchedConsumerRecords) {
        actuallyCommittedKeys.add(new String(consumerRecord.key(), StandardCharsets.UTF_8));
    }
    assertEquals(actuallyCommittedKeys, committedKeys);
}
Also used : ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) FetchResponseData(org.apache.kafka.common.message.FetchResponseData) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) ArrayList(java.util.ArrayList) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) HashSet(java.util.HashSet) Test(org.junit.jupiter.api.Test)

Example 59 with Arrays.asList

use of java.util.Arrays.asList in project kafka by apache.

the class FetcherTest method testReadCommittedAbortMarkerWithNoData.

@Test
public void testReadCommittedAbortMarkerWithNoData() {
    buildFetcher(OffsetResetStrategy.EARLIEST, new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    long producerId = 1L;
    abortTransaction(buffer, producerId, 5L);
    appendTransactionalRecords(buffer, producerId, 6L, new SimpleRecord("6".getBytes(), null), new SimpleRecord("7".getBytes(), null), new SimpleRecord("8".getBytes(), null));
    commitTransaction(buffer, producerId, 9L);
    buffer.flip();
    // send the fetch
    assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    assertEquals(1, fetcher.sendFetches());
    // prepare the response. the aborted transactions begin at offsets which are no longer in the log
    List<FetchResponseData.AbortedTransaction> abortedTransactions = Collections.singletonList(new FetchResponseData.AbortedTransaction().setProducerId(producerId).setFirstOffset(0L));
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(time.timer(0));
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetchedRecords();
    assertTrue(allFetchedRecords.containsKey(tp0));
    List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
    assertEquals(3, fetchedRecords.size());
    assertEquals(Arrays.asList(6L, 7L, 8L), collectRecordOffsets(fetchedRecords));
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) FetchResponseData(org.apache.kafka.common.message.FetchResponseData) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) ArrayList(java.util.ArrayList) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) Test(org.junit.jupiter.api.Test)

Example 60 with Arrays.asList

use of java.util.Arrays.asList in project kafka by apache.

the class WorkerSinkTaskTest method testPollRedeliveryWithConsumerRebalance.

@Test
public void testPollRedeliveryWithConsumerRebalance() throws Exception {
    createTask(initialState);
    expectInitializeTask();
    expectTaskGetTopic(true);
    expectPollInitialAssignment();
    // If a retriable exception is thrown, we should redeliver the same batch, pausing the consumer in the meantime
    expectConsumerPoll(1);
    expectConversionAndTransformation(1);
    sinkTask.put(EasyMock.anyObject());
    EasyMock.expectLastCall().andThrow(new RetriableException("retry"));
    // Pause
    EasyMock.expect(consumer.assignment()).andReturn(INITIAL_ASSIGNMENT);
    consumer.pause(INITIAL_ASSIGNMENT);
    PowerMock.expectLastCall();
    // Empty consumer poll (all partitions are paused) with rebalance; one new partition is assigned
    EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andAnswer(() -> {
        rebalanceListener.getValue().onPartitionsRevoked(Collections.emptySet());
        rebalanceListener.getValue().onPartitionsAssigned(Collections.singleton(TOPIC_PARTITION3));
        return ConsumerRecords.empty();
    });
    Set<TopicPartition> newAssignment = new HashSet<>(Arrays.asList(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3));
    EasyMock.expect(consumer.assignment()).andReturn(newAssignment).times(3);
    EasyMock.expect(consumer.position(TOPIC_PARTITION3)).andReturn(FIRST_OFFSET);
    sinkTask.open(Collections.singleton(TOPIC_PARTITION3));
    EasyMock.expectLastCall();
    // All partitions are re-paused in order to pause any newly-assigned partitions so that redelivery efforts can continue
    consumer.pause(newAssignment);
    EasyMock.expectLastCall();
    sinkTask.put(EasyMock.anyObject());
    EasyMock.expectLastCall().andThrow(new RetriableException("retry"));
    // Next delivery attempt fails again
    expectConsumerPoll(0);
    sinkTask.put(EasyMock.anyObject());
    EasyMock.expectLastCall().andThrow(new RetriableException("retry"));
    // Non-empty consumer poll; all initially-assigned partitions are revoked in rebalance, and new partitions are allowed to resume
    ConsumerRecord<byte[], byte[]> newRecord = new ConsumerRecord<>(TOPIC, PARTITION3, FIRST_OFFSET, RAW_KEY, RAW_VALUE);
    EasyMock.expect(consumer.poll(Duration.ofMillis(EasyMock.anyLong()))).andAnswer(() -> {
        rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
        rebalanceListener.getValue().onPartitionsAssigned(Collections.emptyList());
        return new ConsumerRecords<>(Collections.singletonMap(TOPIC_PARTITION3, Collections.singletonList(newRecord)));
    });
    newAssignment = Collections.singleton(TOPIC_PARTITION3);
    EasyMock.expect(consumer.assignment()).andReturn(new HashSet<>(newAssignment)).times(3);
    final Map<TopicPartition, OffsetAndMetadata> offsets = INITIAL_ASSIGNMENT.stream().collect(Collectors.toMap(Function.identity(), tp -> new OffsetAndMetadata(FIRST_OFFSET)));
    sinkTask.preCommit(offsets);
    EasyMock.expectLastCall().andReturn(offsets);
    sinkTask.close(INITIAL_ASSIGNMENT);
    EasyMock.expectLastCall();
    // All partitions are resumed, as all previously paused-for-redelivery partitions were revoked
    newAssignment.forEach(tp -> {
        consumer.resume(Collections.singleton(tp));
        EasyMock.expectLastCall();
    });
    expectConversionAndTransformation(1);
    sinkTask.put(EasyMock.anyObject());
    EasyMock.expectLastCall();
    PowerMock.replayAll();
    workerTask.initialize(TASK_CONFIG);
    workerTask.initializeAndStart();
    workerTask.iteration();
    workerTask.iteration();
    workerTask.iteration();
    workerTask.iteration();
    workerTask.iteration();
    PowerMock.verifyAll();
}
Also used : Arrays(java.util.Arrays) MockTime(org.apache.kafka.common.utils.MockTime) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Schema(org.apache.kafka.connect.data.Schema) Collections.singleton(java.util.Collections.singleton) Arrays.asList(java.util.Arrays.asList) RecordBatch(org.apache.kafka.common.record.RecordBatch) Converter(org.apache.kafka.connect.storage.Converter) After(org.junit.After) Duration(java.time.Duration) Map(java.util.Map) OffsetCommitCallback(org.apache.kafka.clients.consumer.OffsetCommitCallback) MetricName(org.apache.kafka.common.MetricName) Assert.fail(org.junit.Assert.fail) IExpectationSetters(org.easymock.IExpectationSetters) TimestampType(org.apache.kafka.common.record.TimestampType) TopicPartition(org.apache.kafka.common.TopicPartition) Time(org.apache.kafka.common.utils.Time) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) Set(java.util.Set) RetryWithToleranceOperatorTest(org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperatorTest) PowerMock(org.powermock.api.easymock.PowerMock) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Assert.assertFalse(org.junit.Assert.assertFalse) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Whitebox(org.powermock.reflect.Whitebox) ConnectorTaskId(org.apache.kafka.connect.util.ConnectorTaskId) Headers(org.apache.kafka.common.header.Headers) RunWith(org.junit.runner.RunWith) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Assert.assertSame(org.junit.Assert.assertSame) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) StandaloneConfig(org.apache.kafka.connect.runtime.standalone.StandaloneConfig) HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) MetricGroup(org.apache.kafka.connect.runtime.ConnectMetrics.MetricGroup) PowerMockRunner(org.powermock.modules.junit4.PowerMockRunner) PowerMockIgnore(org.powermock.core.classloader.annotations.PowerMockIgnore) StringConverter(org.apache.kafka.connect.storage.StringConverter) ExecutorService(java.util.concurrent.ExecutorService) SinkConnector(org.apache.kafka.connect.sink.SinkConnector) SinkTask(org.apache.kafka.connect.sink.SinkTask) Before(org.junit.Before) Capture(org.easymock.Capture) Iterator(java.util.Iterator) PluginClassLoader(org.apache.kafka.connect.runtime.isolation.PluginClassLoader) SchemaAndValue(org.apache.kafka.connect.data.SchemaAndValue) ClusterConfigState(org.apache.kafka.connect.runtime.distributed.ClusterConfigState) Mock(org.powermock.api.easymock.annotation.Mock) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) EasyMock(org.easymock.EasyMock) Assert.assertNotEquals(org.junit.Assert.assertNotEquals) StatusBackingStore(org.apache.kafka.connect.storage.StatusBackingStore) TimeUnit(java.util.concurrent.TimeUnit) RetriableException(org.apache.kafka.connect.errors.RetriableException) CaptureType(org.easymock.CaptureType) Assert.assertNull(org.junit.Assert.assertNull) ConnectException(org.apache.kafka.connect.errors.ConnectException) SinkTaskMetricsGroup(org.apache.kafka.connect.runtime.WorkerSinkTask.SinkTaskMetricsGroup) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) TopicPartition(org.apache.kafka.common.TopicPartition) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) RetriableException(org.apache.kafka.connect.errors.RetriableException) HashSet(java.util.HashSet) RetryWithToleranceOperatorTest(org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperatorTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

Arrays.asList (java.util.Arrays.asList)65 List (java.util.List)61 ArrayList (java.util.ArrayList)48 Test (org.junit.Test)32 Arrays (java.util.Arrays)19 Collections.singletonList (java.util.Collections.singletonList)18 Map (java.util.Map)15 Test (org.junit.jupiter.api.Test)15 HashMap (java.util.HashMap)14 Collection (java.util.Collection)12 Collectors (java.util.stream.Collectors)12 Method (java.lang.reflect.Method)9 Optional (java.util.Optional)9 Collections.emptyList (java.util.Collections.emptyList)8 TopicPartition (org.apache.kafka.common.TopicPartition)8 Collections (java.util.Collections)7 Employee (com.artezio.arttime.datamodel.Employee)6 HourType (com.artezio.arttime.datamodel.HourType)6 Hours (com.artezio.arttime.datamodel.Hours)6 Project (com.artezio.arttime.datamodel.Project)6