Search in sources :

Example 1 with Acknowledgment

use of org.springframework.kafka.support.Acknowledgment in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testCommitFailsOnRevoke.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
void testCommitFailsOnRevoke() throws Exception {
    ConsumerFactory<Integer, String> cf = mock(ConsumerFactory.class);
    Consumer<Integer, String> consumer = mock(Consumer.class);
    given(cf.createConsumer(eq("grp"), eq("clientId"), isNull(), any())).willReturn(consumer);
    Map<String, Object> cfProps = new LinkedHashMap<>();
    given(cf.getConfigurationProperties()).willReturn(cfProps);
    final Map<TopicPartition, List<ConsumerRecord<Integer, String>>> records = new HashMap<>();
    TopicPartition topicPartition0 = new TopicPartition("foo", 0);
    records.put(topicPartition0, Arrays.asList(new ConsumerRecord<>("foo", 0, 0L, 1, "foo"), new ConsumerRecord<>("foo", 0, 1L, 1, "bar")));
    records.put(new TopicPartition("foo", 1), Arrays.asList(new ConsumerRecord<>("foo", 1, 0L, 1, "foo"), new ConsumerRecord<>("foo", 1, 1L, 1, "bar")));
    ConsumerRecords<Integer, String> consumerRecords = new ConsumerRecords<>(records);
    ConsumerRecords<Integer, String> emptyRecords = new ConsumerRecords<>(Collections.emptyMap());
    AtomicBoolean first = new AtomicBoolean(true);
    AtomicInteger rebalance = new AtomicInteger();
    AtomicReference<ConsumerRebalanceListener> rebal = new AtomicReference<>();
    CountDownLatch latch = new CountDownLatch(2);
    given(consumer.poll(any(Duration.class))).willAnswer(i -> {
        Thread.sleep(50);
        int call = rebalance.getAndIncrement();
        if (call == 0) {
            rebal.get().onPartitionsRevoked(Collections.emptyList());
            rebal.get().onPartitionsAssigned(records.keySet());
        } else if (call == 1) {
            rebal.get().onPartitionsRevoked(Collections.singletonList(topicPartition0));
            rebal.get().onPartitionsAssigned(Collections.emptyList());
        }
        latch.countDown();
        return first.getAndSet(false) ? consumerRecords : emptyRecords;
    });
    willAnswer(invoc -> {
        rebal.set(invoc.getArgument(1));
        return null;
    }).given(consumer).subscribe(any(Collection.class), any(ConsumerRebalanceListener.class));
    List<Map<TopicPartition, OffsetAndMetadata>> commits = new ArrayList<>();
    AtomicBoolean firstCommit = new AtomicBoolean(true);
    AtomicInteger commitCount = new AtomicInteger();
    willAnswer(invoc -> {
        commits.add(invoc.getArgument(0, Map.class));
        if (!firstCommit.getAndSet(false)) {
            throw new CommitFailedException();
        }
        return null;
    }).given(consumer).commitSync(any(), any());
    ContainerProperties containerProps = new ContainerProperties("foo");
    containerProps.setGroupId("grp");
    containerProps.setAckMode(AckMode.MANUAL);
    containerProps.setClientId("clientId");
    containerProps.setIdleEventInterval(100L);
    AtomicReference<Acknowledgment> acknowledgment = new AtomicReference<>();
    containerProps.setMessageListener((AcknowledgingMessageListener<Object, Object>) (rec, ack) -> acknowledgment.set(ack));
    containerProps.setConsumerRebalanceListener(new ConsumerAwareRebalanceListener() {

        @Override
        public void onPartitionsRevokedBeforeCommit(Consumer<?, ?> consumer, Collection<TopicPartition> partitions) {
            if (acknowledgment.get() != null) {
                acknowledgment.get().acknowledge();
            }
        }
    });
    Properties consumerProps = new Properties();
    containerProps.setKafkaConsumerProperties(consumerProps);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.start();
    assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(container.getAssignedPartitions()).hasSize(1);
    container.stop();
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) LinkedHashMap(java.util.LinkedHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Properties(java.util.Properties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) LinkedHashMap(java.util.LinkedHashMap) Acknowledgment(org.springframework.kafka.support.Acknowledgment) ArrayList(java.util.ArrayList) List(java.util.List) AtomicReference(java.util.concurrent.atomic.AtomicReference) Duration(java.time.Duration) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection) Map(java.util.Map) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) LinkedHashMap(java.util.LinkedHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) Test(org.junit.jupiter.api.Test)

Example 2 with Acknowledgment

use of org.springframework.kafka.support.Acknowledgment in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testInOrderAckPauseUntilAcked.

@SuppressWarnings("unchecked")
private void testInOrderAckPauseUntilAcked(AckMode ackMode, boolean batch) throws Exception {
    ConsumerFactory<Integer, String> cf = mock(ConsumerFactory.class);
    Consumer<Integer, String> consumer = mock(Consumer.class);
    given(cf.createConsumer(eq("grp"), eq("clientId"), isNull(), any())).willReturn(consumer);
    Map<TopicPartition, List<ConsumerRecord<Integer, String>>> records1 = new HashMap<>();
    records1.put(new TopicPartition("foo", 0), Arrays.asList(new ConsumerRecord<>("foo", 0, 0L, 1, "foo"), new ConsumerRecord<>("foo", 0, 1L, 1, "bar"), new ConsumerRecord<>("foo", 0, 2L, 1, "baz"), new ConsumerRecord<>("foo", 0, 3L, 1, "qux")));
    ConsumerRecords<Integer, String> consumerRecords1 = new ConsumerRecords<>(records1);
    Map<TopicPartition, List<ConsumerRecord<Integer, String>>> records2 = new HashMap<>();
    records2.put(new TopicPartition("foo", 0), Arrays.asList(new ConsumerRecord<>("foo", 0, 4L, 1, "fiz")));
    ConsumerRecords<Integer, String> consumerRecords2 = new ConsumerRecords<>(records2);
    ConsumerRecords<Integer, String> empty = new ConsumerRecords<>(Collections.emptyMap());
    AtomicBoolean paused = new AtomicBoolean();
    AtomicBoolean polledWhilePaused = new AtomicBoolean();
    AtomicReference<Collection<TopicPartition>> pausedParts = new AtomicReference<>(Collections.emptySet());
    final CountDownLatch pauseLatch = new CountDownLatch(1);
    willAnswer(inv -> {
        paused.set(true);
        pausedParts.set(inv.getArgument(0));
        pauseLatch.countDown();
        return null;
    }).given(consumer).pause(any());
    willAnswer(inv -> {
        paused.set(false);
        pausedParts.set(Collections.emptySet());
        return null;
    }).given(consumer).resume(any());
    willAnswer(inv -> {
        return pausedParts.get();
    }).given(consumer).paused();
    willAnswer(inv -> {
        return Collections.singleton(new TopicPartition("foo", 0));
    }).given(consumer).assignment();
    AtomicInteger polled = new AtomicInteger();
    given(consumer.poll(any(Duration.class))).willAnswer(i -> {
        Thread.sleep(50);
        if (paused.get()) {
            polledWhilePaused.set(true);
            return empty;
        } else {
            if (polled.incrementAndGet() == 1) {
                return consumerRecords1;
            } else if (polled.get() == 2) {
                return consumerRecords2;
            }
            return empty;
        }
    });
    TopicPartitionOffset topicPartition = new TopicPartitionOffset("foo", 0);
    ContainerProperties containerProps = new ContainerProperties(topicPartition);
    containerProps.setGroupId("grp");
    containerProps.setAckMode(AckMode.MANUAL);
    containerProps.setAsyncAcks(true);
    containerProps.setCommitLogLevel(Level.WARN);
    final CountDownLatch latch1 = new CountDownLatch(4);
    final CountDownLatch latch2 = new CountDownLatch(5);
    final List<Acknowledgment> acks = new ArrayList<>();
    if (batch) {
        BatchAcknowledgingMessageListener<Integer, String> batchML = (data, ack) -> {
            acks.add(ack);
            data.forEach(rec -> {
                latch1.countDown();
                latch2.countDown();
                if (latch2.getCount() == 0) {
                    ack.acknowledge();
                }
            });
        };
        containerProps.setMessageListener(batchML);
    } else {
        AcknowledgingMessageListener<Integer, String> messageListener = (data, ack) -> {
            latch1.countDown();
            latch2.countDown();
            acks.add(ack);
            if (latch1.getCount() == 0 && records1.values().size() > 0 && records1.values().iterator().next().size() == 4) {
                acks.get(3).acknowledge();
                acks.get(2).acknowledge();
                acks.get(1).acknowledge();
            }
            if (latch2.getCount() == 0) {
                acks.get(4).acknowledge();
            }
        };
        containerProps.setMessageListener(messageListener);
    }
    final CountDownLatch commitLatch = new CountDownLatch(2);
    final List<Long> committed = new ArrayList<>();
    willAnswer(inv -> {
        Map<TopicPartition, OffsetAndMetadata> offsets = inv.getArgument(0);
        committed.add(offsets.values().iterator().next().offset());
        commitLatch.countDown();
        return null;
    }).given(consumer).commitSync(anyMap(), any());
    containerProps.setClientId("clientId");
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.start();
    assertThat(latch1.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(pauseLatch.await(10, TimeUnit.SECONDS)).isTrue();
    acks.get(0).acknowledge();
    assertThat(latch2.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(commitLatch.await(10, TimeUnit.SECONDS)).isTrue();
    assertThat(committed.get(0)).isEqualTo(4L);
    assertThat(committed.get(1)).isEqualTo(5L);
    assertThat(polledWhilePaused.get()).isTrue();
    verify(consumer, times(2)).commitSync(any(), any());
    verify(consumer).commitSync(Map.of(new TopicPartition("foo", 0), new OffsetAndMetadata(4L)), Duration.ofMinutes(1));
    verify(consumer).commitSync(Map.of(new TopicPartition("foo", 0), new OffsetAndMetadata(5L)), Duration.ofMinutes(1));
    verify(consumer).pause(any());
    verify(consumer).resume(any());
    container.stop();
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) LinkedHashMap(java.util.LinkedHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Acknowledgment(org.springframework.kafka.support.Acknowledgment) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) ArrayList(java.util.ArrayList) List(java.util.List) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) AtomicReference(java.util.concurrent.atomic.AtomicReference) Duration(java.time.Duration) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection)

Example 3 with Acknowledgment

use of org.springframework.kafka.support.Acknowledgment in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testDelegateType.

@Test
public void testDelegateType() throws Exception {
    Map<String, Object> props = KafkaTestUtils.consumerProps("delegate", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic3);
    containerProps.setShutdownTimeout(60_000L);
    final AtomicReference<StackTraceElement[]> trace = new AtomicReference<>();
    final CountDownLatch latch1 = new CountDownLatch(1);
    containerProps.setMessageListener((MessageListener<Integer, String>) record -> {
        trace.set(new RuntimeException().getStackTrace());
        latch1.countDown();
    });
    ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
    scheduler.setPoolSize(10);
    scheduler.initialize();
    containerProps.setConsumerTaskExecutor(scheduler);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.setBeanName("delegate");
    AtomicReference<List<TopicPartitionOffset>> offsets = new AtomicReference<>();
    container.setApplicationEventPublisher(e -> {
        if (e instanceof ConsumerStoppingEvent) {
            ConsumerStoppingEvent event = (ConsumerStoppingEvent) e;
            offsets.set(event.getPartitions().stream().map(p -> new TopicPartitionOffset(p.topic(), p.partition(), event.getConsumer().position(p, Duration.ofMillis(10_000)))).collect(Collectors.toList()));
        }
    });
    assertThat(container.getGroupId()).isEqualTo("delegate");
    container.start();
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic3);
    template.sendDefault(0, 0, "foo");
    template.flush();
    assertThat(latch1.await(10, TimeUnit.SECONDS)).isTrue();
    // Stack traces are environment dependent - verified in eclipse
    // assertThat(trace.get()[1].getMethodName()).contains("invokeRecordListener");
    container.stop();
    List<TopicPartitionOffset> list = offsets.get();
    assertThat(list).isNotNull();
    list.forEach(tpio -> {
        if (tpio.getPartition() == 0) {
            assertThat(tpio.getOffset()).isEqualTo(1);
        } else {
            assertThat(tpio.getOffset()).isEqualTo(0);
        }
    });
    final CountDownLatch latch2 = new CountDownLatch(1);
    FilteringMessageListenerAdapter<Integer, String> filtering = new FilteringMessageListenerAdapter<>(m -> {
        trace.set(new RuntimeException().getStackTrace());
        latch2.countDown();
    }, d -> false);
    // two levels of nesting
    filtering = new FilteringMessageListenerAdapter<>(filtering, d -> false);
    container.getContainerProperties().setMessageListener(filtering);
    container.start();
    assertThat(KafkaTestUtils.getPropertyValue(container, "listenerConsumer.listenerType")).isEqualTo(ListenerType.SIMPLE);
    template.sendDefault(0, 0, "foo");
    assertThat(latch2.await(10, TimeUnit.SECONDS)).isTrue();
    // verify that the container called the right method - avoiding the creation of an Acknowledgment
    // assertThat(trace.get()[1].getMethodName()).contains("onMessage"); // onMessage(d, a, c) (inner)
    // assertThat(trace.get()[2].getMethodName()).contains("onMessage"); // bridge
    // assertThat(trace.get()[3].getMethodName()).contains("onMessage"); // onMessage(d, a, c) (outer)
    // assertThat(trace.get()[4].getMethodName()).contains("onMessage"); // onMessage(d)
    // assertThat(trace.get()[5].getMethodName()).contains("onMessage"); // bridge
    // assertThat(trace.get()[6].getMethodName()).contains("invokeRecordListener");
    container.stop();
    final CountDownLatch latch3 = new CountDownLatch(1);
    filtering = new FilteringMessageListenerAdapter<>((AcknowledgingConsumerAwareMessageListener<Integer, String>) (d, a, c) -> {
        trace.set(new RuntimeException().getStackTrace());
        latch3.countDown();
    }, d -> false);
    container.getContainerProperties().setMessageListener(filtering);
    container.start();
    assertThat(KafkaTestUtils.getPropertyValue(container, "listenerConsumer.listenerType")).isEqualTo(ListenerType.ACKNOWLEDGING_CONSUMER_AWARE);
    template.sendDefault(0, 0, "foo");
    assertThat(latch3.await(10, TimeUnit.SECONDS)).isTrue();
    // verify that the container called the 3 arg method directly
    // int i = 0;
    // if (trace.get()[1].getClassName().endsWith("AcknowledgingConsumerAwareMessageListener")) {
    // // this frame does not appear in eclise, but does in gradle.\
    // i++;
    // }
    // assertThat(trace.get()[i + 1].getMethodName()).contains("onMessage"); // onMessage(d, a, c)
    // assertThat(trace.get()[i + 2].getMethodName()).contains("onMessage"); // bridge
    // assertThat(trace.get()[i + 3].getMethodName()).contains("invokeRecordListener");
    container.stop();
    long t = System.currentTimeMillis();
    container.stop();
    assertThat(System.currentTimeMillis() - t).isLessThan(5000L);
    pf.destroy();
    scheduler.shutdown();
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArrayList(java.util.ArrayList) List(java.util.List) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) AtomicReference(java.util.concurrent.atomic.AtomicReference) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Test(org.junit.jupiter.api.Test)

Example 4 with Acknowledgment

use of org.springframework.kafka.support.Acknowledgment in project spring-kafka by spring-projects.

the class MessagingMessageListenerAdapterTests method testFallbackType.

@Test
void testFallbackType() {
    final class MyAdapter extends MessagingMessageListenerAdapter<String, String> implements AcknowledgingMessageListener<String, String> {

        private MyAdapter() {
            super(null, null);
        }

        @Override
        public void onMessage(ConsumerRecord<String, String> data, Acknowledgment acknowledgment) {
            toMessagingMessage(data, acknowledgment, null);
        }
    }
    MyAdapter adapter = new MyAdapter();
    adapter.setFallbackType(String.class);
    RecordMessageConverter converter = mock(RecordMessageConverter.class);
    ConsumerRecord<String, String> cr = new ConsumerRecord<>("foo", 1, 1L, null, null);
    Acknowledgment ack = mock(Acknowledgment.class);
    willReturn(new GenericMessage<>("foo")).given(converter).toMessage(cr, ack, null, String.class);
    adapter.setMessageConverter(converter);
    adapter.onMessage(cr, ack);
    verify(converter).toMessage(cr, ack, null, String.class);
}
Also used : RecordMessageConverter(org.springframework.kafka.support.converter.RecordMessageConverter) Acknowledgment(org.springframework.kafka.support.Acknowledgment) AcknowledgingMessageListener(org.springframework.kafka.listener.AcknowledgingMessageListener) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Test(org.junit.jupiter.api.Test)

Example 5 with Acknowledgment

use of org.springframework.kafka.support.Acknowledgment in project spring-kafka by spring-projects.

the class KafkaTemplateTests method testWithMessage.

@Test
void testWithMessage() {
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
    Message<String> message1 = MessageBuilder.withPayload("foo-message").setHeader(KafkaHeaders.TOPIC, INT_KEY_TOPIC).setHeader(KafkaHeaders.PARTITION_ID, 0).setHeader("foo", "bar").setHeader(KafkaHeaders.RECEIVED_TOPIC, "dummy").build();
    template.send(message1);
    ConsumerRecord<Integer, String> r1 = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC);
    assertThat(r1).has(value("foo-message"));
    Iterator<Header> iterator = r1.headers().iterator();
    assertThat(iterator.hasNext()).isTrue();
    Header next = iterator.next();
    assertThat(next.key()).isEqualTo("foo");
    assertThat(new String(next.value())).isEqualTo("bar");
    assertThat(iterator.hasNext()).isTrue();
    next = iterator.next();
    assertThat(next.key()).isEqualTo(DefaultKafkaHeaderMapper.JSON_TYPES);
    assertThat(iterator.hasNext()).as("Expected no more headers").isFalse();
    Message<String> message2 = MessageBuilder.withPayload("foo-message-2").setHeader(KafkaHeaders.TOPIC, INT_KEY_TOPIC).setHeader(KafkaHeaders.PARTITION_ID, 0).setHeader(KafkaHeaders.TIMESTAMP, 1487694048615L).setHeader("foo", "bar").build();
    template.send(message2);
    ConsumerRecord<Integer, String> r2 = KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC);
    assertThat(r2).has(value("foo-message-2"));
    assertThat(r2).has(timestamp(1487694048615L));
    MessagingMessageConverter messageConverter = new MessagingMessageConverter();
    Acknowledgment ack = mock(Acknowledgment.class);
    Consumer<?, ?> mockConsumer = mock(Consumer.class);
    KafkaUtils.setConsumerGroupId("test.group.id");
    Message<?> recordToMessage = messageConverter.toMessage(r2, ack, mockConsumer, String.class);
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(1487694048615L);
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(INT_KEY_TOPIC);
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT)).isSameAs(ack);
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.CONSUMER)).isSameAs(mockConsumer);
    assertThat(recordToMessage.getHeaders().get("foo")).isEqualTo("bar");
    assertThat(recordToMessage.getPayload()).isEqualTo("foo-message-2");
    assertThat(recordToMessage.getHeaders().get(KafkaHeaders.GROUP_ID)).isEqualTo("test.group.id");
    KafkaUtils.clearConsumerGroupId();
    pf.destroy();
}
Also used : MessagingMessageConverter(org.springframework.kafka.support.converter.MessagingMessageConverter) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Header(org.apache.kafka.common.header.Header) Acknowledgment(org.springframework.kafka.support.Acknowledgment) Test(org.junit.jupiter.api.Test)

Aggregations

Acknowledgment (org.springframework.kafka.support.Acknowledgment)25 Test (org.junit.jupiter.api.Test)20 DefaultKafkaConsumerFactory (org.springframework.kafka.core.DefaultKafkaConsumerFactory)17 DefaultKafkaProducerFactory (org.springframework.kafka.core.DefaultKafkaProducerFactory)15 KafkaTemplate (org.springframework.kafka.core.KafkaTemplate)15 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)12 MessageHeaders (org.springframework.messaging.MessageHeaders)12 CountDownLatch (java.util.concurrent.CountDownLatch)11 QueueChannel (org.springframework.integration.channel.QueueChannel)11 ContainerProperties (org.springframework.kafka.listener.ContainerProperties)11 KafkaMessageListenerContainer (org.springframework.kafka.listener.KafkaMessageListenerContainer)11 Message (org.springframework.messaging.Message)11 Type (java.lang.reflect.Type)10 List (java.util.List)10 ErrorMessage (org.springframework.messaging.support.ErrorMessage)10 ArrayList (java.util.ArrayList)8 HashMap (java.util.HashMap)8 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)8 MessagingMessageConverter (org.springframework.kafka.support.converter.MessagingMessageConverter)8 Duration (java.time.Duration)7