Search in sources :

Example 1 with ProducerFactory

use of org.springframework.kafka.core.ProducerFactory in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testDefinedPartitions.

@Test
public void testDefinedPartitions() throws Exception {
    this.logger.info("Start defined parts");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test13", "false", embeddedKafka);
    TopicPartitionOffset topic1Partition0 = new TopicPartitionOffset(topic13, 0, 0L);
    CountDownLatch initialConsumersLatch = new CountDownLatch(2);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props) {

        @Override
        protected KafkaConsumer<Integer, String> createKafkaConsumer(Map<String, Object> configs) {
            assertThat(configs).containsKey(ConsumerConfig.MAX_POLL_RECORDS_CONFIG);
            return new KafkaConsumer<Integer, String>(props) {

                @Override
                public ConsumerRecords<Integer, String> poll(Duration timeout) {
                    try {
                        return super.poll(timeout);
                    } finally {
                        initialConsumersLatch.countDown();
                    }
                }
            };
        }
    };
    ContainerProperties container1Props = new ContainerProperties(topic1Partition0);
    CountDownLatch latch1 = new CountDownLatch(2);
    container1Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part: " + message);
        latch1.countDown();
    });
    Properties defaultProperties = new Properties();
    defaultProperties.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "42");
    Properties consumerProperties = new Properties(defaultProperties);
    container1Props.setKafkaConsumerProperties(consumerProperties);
    CountDownLatch stubbingComplete1 = new CountDownLatch(1);
    KafkaMessageListenerContainer<Integer, String> container1 = spyOnContainer(new KafkaMessageListenerContainer<>(cf, container1Props), stubbingComplete1);
    container1.setBeanName("b1");
    container1.start();
    CountDownLatch stopLatch1 = new CountDownLatch(1);
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch1.countDown();
        }
    }).given(spyOnConsumer(container1)).commitSync(anyMap(), any());
    stubbingComplete1.countDown();
    TopicPartitionOffset topic1Partition1 = new TopicPartitionOffset(topic13, 1, 0L);
    ContainerProperties container2Props = new ContainerProperties(topic1Partition1);
    CountDownLatch latch2 = new CountDownLatch(2);
    container2Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part: " + message);
        latch2.countDown();
    });
    container2Props.setKafkaConsumerProperties(consumerProperties);
    CountDownLatch stubbingComplete2 = new CountDownLatch(1);
    KafkaMessageListenerContainer<Integer, String> container2 = spyOnContainer(new KafkaMessageListenerContainer<>(cf, container2Props), stubbingComplete2);
    container2.setBeanName("b2");
    container2.start();
    CountDownLatch stopLatch2 = new CountDownLatch(1);
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch2.countDown();
        }
    }).given(spyOnConsumer(container2)).commitSync(anyMap(), any());
    stubbingComplete2.countDown();
    assertThat(initialConsumersLatch.await(20, TimeUnit.SECONDS)).isTrue();
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic13);
    template.sendDefault(0, 0, "foo");
    template.sendDefault(1, 2, "bar");
    template.sendDefault(0, 0, "baz");
    template.sendDefault(1, 2, "qux");
    template.flush();
    assertThat(latch1.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(latch2.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(stopLatch1.await(60, TimeUnit.SECONDS)).isTrue();
    container1.stop();
    assertThat(stopLatch2.await(60, TimeUnit.SECONDS)).isTrue();
    container2.stop();
    cf = new DefaultKafkaConsumerFactory<>(props);
    // reset earliest
    ContainerProperties container3Props = new ContainerProperties(topic1Partition0, topic1Partition1);
    CountDownLatch latch3 = new CountDownLatch(4);
    container3Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part e: " + message);
        latch3.countDown();
    });
    final CountDownLatch listenerConsumerAvailableLatch = new CountDownLatch(1);
    final CountDownLatch listenerConsumerStartLatch = new CountDownLatch(1);
    CountDownLatch stubbingComplete3 = new CountDownLatch(1);
    KafkaMessageListenerContainer<Integer, String> resettingContainer = spyOnContainer(new KafkaMessageListenerContainer<Integer, String>(cf, container3Props), stubbingComplete3);
    stubSetRunning(listenerConsumerAvailableLatch, listenerConsumerStartLatch, resettingContainer);
    resettingContainer.setBeanName("b3");
    Executors.newSingleThreadExecutor().submit(resettingContainer::start);
    CountDownLatch stopLatch3 = new CountDownLatch(1);
    assertThat(listenerConsumerAvailableLatch.await(60, TimeUnit.SECONDS)).isTrue();
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch3.countDown();
        }
    }).given(spyOnConsumer(resettingContainer)).commitSync(anyMap(), any());
    stubbingComplete3.countDown();
    listenerConsumerStartLatch.countDown();
    assertThat(latch3.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(stopLatch3.await(60, TimeUnit.SECONDS)).isTrue();
    resettingContainer.stop();
    assertThat(latch3.getCount()).isEqualTo(0L);
    cf = new DefaultKafkaConsumerFactory<>(props);
    // reset beginning for part 0, minus one for part 1
    topic1Partition0 = new TopicPartitionOffset(topic13, 0, -1000L);
    topic1Partition1 = new TopicPartitionOffset(topic13, 1, -1L);
    ContainerProperties container4Props = new ContainerProperties(topic1Partition0, topic1Partition1);
    CountDownLatch latch4 = new CountDownLatch(3);
    AtomicReference<String> receivedMessage = new AtomicReference<>();
    container4Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part 0, -1: " + message);
        receivedMessage.set(message.value());
        latch4.countDown();
    });
    CountDownLatch stubbingComplete4 = new CountDownLatch(1);
    resettingContainer = spyOnContainer(new KafkaMessageListenerContainer<>(cf, container4Props), stubbingComplete4);
    resettingContainer.setBeanName("b4");
    resettingContainer.start();
    CountDownLatch stopLatch4 = new CountDownLatch(1);
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch4.countDown();
        }
    }).given(spyOnConsumer(resettingContainer)).commitSync(anyMap(), any());
    stubbingComplete4.countDown();
    assertThat(latch4.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(stopLatch4.await(60, TimeUnit.SECONDS)).isTrue();
    resettingContainer.stop();
    assertThat(receivedMessage.get()).isIn("baz", "qux");
    assertThat(latch4.getCount()).isEqualTo(0L);
    // reset plus one
    template.sendDefault(0, 0, "FOO");
    template.sendDefault(1, 2, "BAR");
    template.flush();
    topic1Partition0 = new TopicPartitionOffset(topic13, 0, 1L);
    topic1Partition1 = new TopicPartitionOffset(topic13, 1, 1L);
    ContainerProperties container5Props = new ContainerProperties(topic1Partition0, topic1Partition1);
    final CountDownLatch latch5 = new CountDownLatch(4);
    final List<String> messages = new ArrayList<>();
    container5Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part 1: " + message);
        messages.add(message.value());
        latch5.countDown();
    });
    CountDownLatch stubbingComplete5 = new CountDownLatch(1);
    resettingContainer = spyOnContainer(new KafkaMessageListenerContainer<>(cf, container5Props), stubbingComplete5);
    resettingContainer.setBeanName("b5");
    resettingContainer.start();
    CountDownLatch stopLatch5 = new CountDownLatch(1);
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch5.countDown();
        }
    }).given(spyOnConsumer(resettingContainer)).commitSync(anyMap(), any());
    stubbingComplete5.countDown();
    assertThat(latch5.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(stopLatch5.await(60, TimeUnit.SECONDS)).isTrue();
    resettingContainer.stop();
    assertThat(messages).contains("baz", "qux", "FOO", "BAR");
    this.logger.info("+++++++++++++++++++++ Start relative reset");
    template.sendDefault(0, 0, "BAZ");
    template.sendDefault(1, 2, "QUX");
    template.sendDefault(0, 0, "FIZ");
    template.sendDefault(1, 2, "BUZ");
    template.flush();
    topic1Partition0 = new TopicPartitionOffset(topic13, 0, 1L, true);
    topic1Partition1 = new TopicPartitionOffset(topic13, 1, -1L, true);
    ContainerProperties container6Props = new ContainerProperties(topic1Partition0, topic1Partition1);
    final CountDownLatch latch6 = new CountDownLatch(4);
    final List<String> messages6 = new ArrayList<>();
    container6Props.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("defined part relative: " + message);
        messages6.add(message.value());
        latch6.countDown();
    });
    CountDownLatch stubbingComplete6 = new CountDownLatch(1);
    resettingContainer = spyOnContainer(new KafkaMessageListenerContainer<>(cf, container6Props), stubbingComplete6);
    resettingContainer.setBeanName("b6");
    resettingContainer.start();
    CountDownLatch stopLatch6 = new CountDownLatch(1);
    willAnswer(invocation -> {
        try {
            return invocation.callRealMethod();
        } finally {
            stopLatch6.countDown();
        }
    }).given(spyOnConsumer(resettingContainer)).commitSync(anyMap(), any());
    stubbingComplete6.countDown();
    assertThat(latch6.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(stopLatch6.await(60, TimeUnit.SECONDS)).isTrue();
    resettingContainer.stop();
    assertThat(messages6).hasSize(4);
    assertThat(messages6).contains("FIZ", "BAR", "QUX", "BUZ");
    this.logger.info("Stop auto parts");
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) ArrayList(java.util.ArrayList) Properties(java.util.Properties) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Duration(java.time.Duration) AtomicReference(java.util.concurrent.atomic.AtomicReference) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) LinkedHashMap(java.util.LinkedHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Test(org.junit.jupiter.api.Test)

Example 2 with ProducerFactory

use of org.springframework.kafka.core.ProducerFactory in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testBatchAck.

@Test
public void testBatchAck() throws Exception {
    logger.info("Start batch ack");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test6", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic7);
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("batch ack: " + message);
    });
    containerProps.setSyncCommits(true);
    containerProps.setAckMode(AckMode.BATCH);
    containerProps.setPollTimeout(100);
    CountDownLatch stubbingComplete = new CountDownLatch(1);
    KafkaMessageListenerContainer<Integer, String> container = spyOnContainer(new KafkaMessageListenerContainer<>(cf, containerProps), stubbingComplete);
    container.setBeanName("testBatchAcks");
    container.start();
    Consumer<?, ?> containerConsumer = spyOnConsumer(container);
    final CountDownLatch firstBatchLatch = new CountDownLatch(1);
    final CountDownLatch latch = new CountDownLatch(2);
    willAnswer(invocation -> {
        Map<TopicPartition, OffsetAndMetadata> map = invocation.getArgument(0);
        for (Entry<TopicPartition, OffsetAndMetadata> entry : map.entrySet()) {
            if (entry.getValue().offset() == 2) {
                firstBatchLatch.countDown();
            }
        }
        try {
            return invocation.callRealMethod();
        } finally {
            for (Entry<TopicPartition, OffsetAndMetadata> entry : map.entrySet()) {
                if (entry.getValue().offset() == 2) {
                    latch.countDown();
                }
            }
        }
    }).given(containerConsumer).commitSync(anyMap(), any());
    stubbingComplete.countDown();
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic7);
    template.sendDefault(0, 0, "foo");
    template.sendDefault(0, 0, "baz");
    template.sendDefault(1, 0, "bar");
    template.sendDefault(1, 0, "qux");
    template.flush();
    assertThat(firstBatchLatch.await(9, TimeUnit.SECONDS)).isTrue();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    Consumer<Integer, String> consumer = cf.createConsumer();
    consumer.assign(Arrays.asList(new TopicPartition(topic7, 0), new TopicPartition(topic7, 1)));
    assertThat(consumer.position(new TopicPartition(topic7, 0))).isEqualTo(2);
    assertThat(consumer.position(new TopicPartition(topic7, 1))).isEqualTo(2);
    container.stop();
    consumer.close();
    logger.info("Stop batch ack");
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Test(org.junit.jupiter.api.Test)

Example 3 with ProducerFactory

use of org.springframework.kafka.core.ProducerFactory in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testExceptionWhenCommitAfterRebalance.

@Test
public void testExceptionWhenCommitAfterRebalance() throws Exception {
    final CountDownLatch rebalanceLatch = new CountDownLatch(2);
    final CountDownLatch consumeFirstLatch = new CountDownLatch(1);
    final CountDownLatch consumeLatch = new CountDownLatch(2);
    Map<String, Object> props = KafkaTestUtils.consumerProps("test19", "false", embeddedKafka);
    props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 3_000);
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic19);
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.warn("listener: " + message);
        consumeFirstLatch.countDown();
        if (consumeLatch.getCount() > 1) {
            try {
                Thread.sleep(5_000);
            } catch (InterruptedException e1) {
                Thread.currentThread().interrupt();
            }
        }
        consumeLatch.countDown();
    });
    containerProps.setSyncCommits(true);
    containerProps.setAckMode(AckMode.BATCH);
    containerProps.setPollTimeout(100);
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic19);
    containerProps.setConsumerRebalanceListener(new ConsumerRebalanceListener() {

        @Override
        public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
        }

        @Override
        public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
            logger.warn("rebalance occurred.");
            rebalanceLatch.countDown();
        }
    });
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.setBeanName("testContainerException");
    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    template.sendDefault(0, 0, "a");
    assertThat(consumeFirstLatch.await(60, TimeUnit.SECONDS)).isTrue();
    // should be rebalanced and consume again
    boolean rebalancedForTooLongBetweenPolls = rebalanceLatch.await(60, TimeUnit.SECONDS);
    int n = 0;
    while (!rebalancedForTooLongBetweenPolls & n++ < 3) {
        // try a few times in case the rebalance was delayed
        template.sendDefault(0, 0, "a");
        rebalancedForTooLongBetweenPolls = rebalanceLatch.await(60, TimeUnit.SECONDS);
    }
    if (!rebalancedForTooLongBetweenPolls) {
        logger.error("Rebalance did not occur - perhaps the CI server is too busy, don't fail the test");
    }
    assertThat(consumeLatch.await(60, TimeUnit.SECONDS)).isTrue();
    container.stop();
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) Test(org.junit.jupiter.api.Test)

Example 4 with ProducerFactory

use of org.springframework.kafka.core.ProducerFactory in project spring-kafka by spring-projects.

the class KafkaMessageListenerContainerTests method testCommitsAreFlushedOnStop.

@SuppressWarnings("unchecked")
@Test
public void testCommitsAreFlushedOnStop() throws Exception {
    Map<String, Object> props = KafkaTestUtils.consumerProps("flushedOnStop", "false", embeddedKafka);
    DefaultKafkaConsumerFactory<Integer, String> cf = spy(new DefaultKafkaConsumerFactory<>(props));
    AtomicReference<Consumer<Integer, String>> consumer = new AtomicReference<>();
    willAnswer(inv -> {
        consumer.set((Consumer<Integer, String>) spy(inv.callRealMethod()));
        return consumer.get();
    }).given(cf).createConsumer(any(), any(), any(), any());
    ContainerProperties containerProps = new ContainerProperties(topic5);
    containerProps.setAckCount(1);
    // set large values, ensuring that commits don't happen before `stop()`
    containerProps.setAckTime(20000);
    containerProps.setAckCount(20000);
    containerProps.setAckMode(AckMode.COUNT_TIME);
    containerProps.setAssignmentCommitOption(AssignmentCommitOption.ALWAYS);
    final CountDownLatch latch = new CountDownLatch(4);
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        logger.info("flushed: " + message);
        latch.countDown();
    });
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.setBeanName("testManualFlushed");
    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic5);
    template.sendDefault(0, 0, "foo");
    template.sendDefault(1, 2, "bar");
    template.flush();
    Thread.sleep(300);
    template.sendDefault(0, 0, "fiz");
    template.sendDefault(1, 2, "buz");
    template.flush();
    // Verify that commitSync is called when paused
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    // Verify that just the initial commit is processed before stop
    verify(consumer.get(), times(1)).commitSync(anyMap(), any());
    container.stop();
    // Verify that a commit has been made on stop
    verify(consumer.get(), times(2)).commitSync(anyMap(), any());
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) JsonDeserializer(org.springframework.kafka.support.serializer.JsonDeserializer) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) BDDMockito.given(org.mockito.BDDMockito.given) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) ApplicationEventPublisher(org.springframework.context.ApplicationEventPublisher) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Acknowledgment(org.springframework.kafka.support.Acknowledgment) LogFactory(org.apache.commons.logging.LogFactory) Mockito.mock(org.mockito.Mockito.mock) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException) RetriableCommitFailedException(org.apache.kafka.clients.consumer.RetriableCommitFailedException) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) Mockito.spy(org.mockito.Mockito.spy) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) NonResponsiveConsumerEvent(org.springframework.kafka.event.NonResponsiveConsumerEvent) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) Nullable(org.springframework.lang.Nullable) FilteringMessageListenerAdapter(org.springframework.kafka.listener.adapter.FilteringMessageListenerAdapter) ThreadPoolTaskScheduler(org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) Properties(java.util.Properties) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) FencedInstanceIdException(org.apache.kafka.common.errors.FencedInstanceIdException) Mockito.times(org.mockito.Mockito.times) JsonSerializer(org.springframework.kafka.support.serializer.JsonSerializer) ApplicationEvent(org.springframework.context.ApplicationEvent) Mockito.never(org.mockito.Mockito.never) Assertions.assertThatIllegalArgumentException(org.assertj.core.api.Assertions.assertThatIllegalArgumentException) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Level(org.springframework.kafka.support.LogIfLevelEnabled.Level) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConsumerStoppingEvent(org.springframework.kafka.event.ConsumerStoppingEvent) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Collectors(java.util.stream.Collectors) RebalanceInProgressException(org.apache.kafka.common.errors.RebalanceInProgressException) SeekPosition(org.springframework.kafka.support.TopicPartitionOffset.SeekPosition) Test(org.junit.jupiter.api.Test) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ErrorHandlingDeserializer(org.springframework.kafka.support.serializer.ErrorHandlingDeserializer) List(java.util.List) Mockito.inOrder(org.mockito.Mockito.inOrder) Entry(java.util.Map.Entry) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ProducerFactory(org.springframework.kafka.core.ProducerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DirectFieldAccessor(org.springframework.beans.DirectFieldAccessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) HashSet(java.util.HashSet) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ContainerTestUtils(org.springframework.kafka.test.utils.ContainerTestUtils) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) InOrder(org.mockito.InOrder) ConsumerPausedEvent(org.springframework.kafka.event.ConsumerPausedEvent) Iterator(java.util.Iterator) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MethodInterceptor(org.aopalliance.intercept.MethodInterceptor) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) BitSet(java.util.BitSet) ConsumerResumedEvent(org.springframework.kafka.event.ConsumerResumedEvent) Collections(java.util.Collections) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Consumer(org.apache.kafka.clients.consumer.Consumer) Test(org.junit.jupiter.api.Test)

Example 5 with ProducerFactory

use of org.springframework.kafka.core.ProducerFactory in project spring-kafka by spring-projects.

the class TransactionalContainerTests method testConsumeAndProduceTransactionGuts.

@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
private void testConsumeAndProduceTransactionGuts(boolean handleError, AckMode ackMode, EOSMode eosMode, boolean stopWhenFenced) throws Exception {
    Consumer consumer = mock(Consumer.class);
    AtomicBoolean assigned = new AtomicBoolean();
    final TopicPartition topicPartition = new TopicPartition("foo", 0);
    willAnswer(i -> {
        ((ConsumerRebalanceListener) i.getArgument(1)).onPartitionsAssigned(Collections.singletonList(topicPartition));
        assigned.set(true);
        return null;
    }).given(consumer).subscribe(any(Collection.class), any(ConsumerRebalanceListener.class));
    ConsumerRecords records = new ConsumerRecords(Collections.singletonMap(topicPartition, Collections.singletonList(new ConsumerRecord<>("foo", 0, 0, "key", "value"))));
    ConsumerRecords empty = new ConsumerRecords(Collections.emptyMap());
    final AtomicBoolean done = new AtomicBoolean();
    willAnswer(i -> {
        if (done.compareAndSet(false, true)) {
            return records;
        } else {
            Thread.sleep(500);
            return empty;
        }
    }).given(consumer).poll(any(Duration.class));
    ConsumerFactory cf = mock(ConsumerFactory.class);
    willReturn(consumer).given(cf).createConsumer("group", "", null, KafkaTestUtils.defaultPropertyOverrides());
    Producer producer = mock(Producer.class);
    if (stopWhenFenced) {
        willAnswer(inv -> {
            if (assigned.get()) {
                throw new ProducerFencedException("fenced");
            }
            return null;
        }).given(producer).sendOffsetsToTransaction(any(), any(ConsumerGroupMetadata.class));
    }
    given(producer.send(any(), any())).willReturn(new SettableListenableFuture<>());
    final CountDownLatch closeLatch = new CountDownLatch(2);
    willAnswer(i -> {
        closeLatch.countDown();
        return null;
    }).given(producer).close(any());
    ProducerFactory pf = mock(ProducerFactory.class);
    given(pf.isProducerPerConsumerPartition()).willReturn(true);
    given(pf.transactionCapable()).willReturn(true);
    final List<String> transactionalIds = new ArrayList<>();
    willAnswer(i -> {
        transactionalIds.add(TransactionSupport.getTransactionIdSuffix());
        return producer;
    }).given(pf).createProducer(isNull());
    KafkaTransactionManager tm = new KafkaTransactionManager(pf);
    ContainerProperties props = new ContainerProperties("foo");
    props.setAckMode(ackMode);
    props.setGroupId("group");
    props.setTransactionManager(tm);
    props.setAssignmentCommitOption(AssignmentCommitOption.ALWAYS);
    props.setEosMode(eosMode);
    props.setStopContainerWhenFenced(stopWhenFenced);
    ConsumerGroupMetadata consumerGroupMetadata = new ConsumerGroupMetadata("group");
    given(consumer.groupMetadata()).willReturn(consumerGroupMetadata);
    final KafkaTemplate template = new KafkaTemplate(pf);
    if (AckMode.MANUAL_IMMEDIATE.equals(ackMode)) {
        props.setMessageListener((AcknowledgingMessageListener<Object, Object>) (data, acknowledgment) -> {
            template.send("bar", "baz");
            if (handleError) {
                throw new RuntimeException("fail");
            }
            acknowledgment.acknowledge();
        });
    } else {
        props.setMessageListener((MessageListener) m -> {
            template.send("bar", "baz");
            if (handleError) {
                throw new RuntimeException("fail");
            }
        });
    }
    KafkaMessageListenerContainer container = new KafkaMessageListenerContainer<>(cf, props);
    container.setBeanName("commit");
    if (handleError) {
        container.setCommonErrorHandler(new CommonErrorHandler() {
        });
    }
    CountDownLatch stopEventLatch = new CountDownLatch(1);
    AtomicReference<ConsumerStoppedEvent> stopEvent = new AtomicReference<>();
    container.setApplicationEventPublisher(event -> {
        if (event instanceof ConsumerStoppedEvent) {
            stopEvent.set((ConsumerStoppedEvent) event);
            stopEventLatch.countDown();
        }
    });
    container.start();
    assertThat(closeLatch.await(10, TimeUnit.SECONDS)).isTrue();
    InOrder inOrder = inOrder(producer);
    inOrder.verify(producer).beginTransaction();
    inOrder.verify(producer).sendOffsetsToTransaction(Collections.singletonMap(topicPartition, new OffsetAndMetadata(0)), consumerGroupMetadata);
    if (stopWhenFenced) {
        assertThat(stopEventLatch.await(10, TimeUnit.SECONDS)).isTrue();
        assertThat(stopEvent.get().getReason()).isEqualTo(Reason.FENCED);
    } else {
        inOrder.verify(producer).commitTransaction();
        inOrder.verify(producer).close(any());
        inOrder.verify(producer).beginTransaction();
        ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class);
        inOrder.verify(producer).send(captor.capture(), any(Callback.class));
        assertThat(captor.getValue()).isEqualTo(new ProducerRecord("bar", "baz"));
        inOrder.verify(producer).sendOffsetsToTransaction(Collections.singletonMap(topicPartition, new OffsetAndMetadata(1)), consumerGroupMetadata);
        inOrder.verify(producer).commitTransaction();
        inOrder.verify(producer).close(any());
        container.stop();
        verify(pf, times(2)).createProducer(isNull());
        verifyNoMoreInteractions(producer);
        assertThat(transactionalIds.get(0)).isEqualTo("group.foo.0");
        assertThat(transactionalIds.get(0)).isEqualTo("group.foo.0");
        assertThat(stopEventLatch.await(10, TimeUnit.SECONDS)).isTrue();
        assertThat(stopEvent.get().getReason()).isEqualTo(Reason.NORMAL);
    }
    assertThat(stopEvent.get().getSource()).isSameAs(container);
}
Also used : Arrays(java.util.Arrays) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) SettableListenableFuture(org.springframework.util.concurrent.SettableListenableFuture) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Reason(org.springframework.kafka.event.ConsumerStoppedEvent.Reason) EOSMode(org.springframework.kafka.listener.ContainerProperties.EOSMode) DefaultKafkaHeaderMapper(org.springframework.kafka.support.DefaultKafkaHeaderMapper) BDDMockito.willReturn(org.mockito.BDDMockito.willReturn) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) ListenerContainerIdleEvent(org.springframework.kafka.event.ListenerContainerIdleEvent) BDDMockito.given(org.mockito.BDDMockito.given) Mockito.verifyNoMoreInteractions(org.mockito.Mockito.verifyNoMoreInteractions) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BeforeAll(org.junit.jupiter.api.BeforeAll) Duration(java.time.Duration) Map(java.util.Map) FixedBackOff(org.springframework.util.backoff.FixedBackOff) DefaultTransactionDefinition(org.springframework.transaction.support.DefaultTransactionDefinition) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) BDDMockito.willThrow(org.mockito.BDDMockito.willThrow) EmbeddedKafka(org.springframework.kafka.test.context.EmbeddedKafka) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Collection(java.util.Collection) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) LogAccessor(org.springframework.core.log.LogAccessor) Test(org.junit.jupiter.api.Test) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) List(java.util.List) Header(org.apache.kafka.common.header.Header) Mockito.inOrder(org.mockito.Mockito.inOrder) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) ProducerFencedException(org.apache.kafka.common.errors.ProducerFencedException) ProducerFactoryUtils(org.springframework.kafka.core.ProducerFactoryUtils) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) LogFactory(org.apache.commons.logging.LogFactory) Callback(org.apache.kafka.clients.producer.Callback) Mockito.mock(org.mockito.Mockito.mock) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) KafkaHeaders(org.springframework.kafka.support.KafkaHeaders) ProducerFactory(org.springframework.kafka.core.ProducerFactory) TransactionSupport(org.springframework.kafka.support.TransactionSupport) TransactionDefinition(org.springframework.transaction.TransactionDefinition) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ArgumentMatchers.anyMap(org.mockito.ArgumentMatchers.anyMap) HashMap(java.util.HashMap) ArgumentMatchers.anyBoolean(org.mockito.ArgumentMatchers.anyBoolean) Mockito.spy(org.mockito.Mockito.spy) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) EmbeddedKafkaBroker(org.springframework.kafka.test.EmbeddedKafkaBroker) AtomicReference(java.util.concurrent.atomic.AtomicReference) KafkaOperations(org.springframework.kafka.core.KafkaOperations) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ArgumentCaptor(org.mockito.ArgumentCaptor) KafkaTransactionManager(org.springframework.kafka.transaction.KafkaTransactionManager) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) TopicPartitionOffset(org.springframework.kafka.support.TopicPartitionOffset) BiConsumer(java.util.function.BiConsumer) DefaultTransactionStatus(org.springframework.transaction.support.DefaultTransactionStatus) ArgumentMatchers.isNull(org.mockito.ArgumentMatchers.isNull) Assertions.assertThatIllegalStateException(org.assertj.core.api.Assertions.assertThatIllegalStateException) KafkaTestUtils(org.springframework.kafka.test.utils.KafkaTestUtils) InOrder(org.mockito.InOrder) AckMode(org.springframework.kafka.listener.ContainerProperties.AckMode) Producer(org.apache.kafka.clients.producer.Producer) AbstractPlatformTransactionManager(org.springframework.transaction.support.AbstractPlatformTransactionManager) Mockito.times(org.mockito.Mockito.times) EmbeddedKafkaCondition(org.springframework.kafka.test.condition.EmbeddedKafkaCondition) MessageHeaders(org.springframework.messaging.MessageHeaders) BDDMockito.willAnswer(org.mockito.BDDMockito.willAnswer) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) Mockito.never(org.mockito.Mockito.never) TransactionException(org.springframework.transaction.TransactionException) AssignmentCommitOption(org.springframework.kafka.listener.ContainerProperties.AssignmentCommitOption) Collections(java.util.Collections) KafkaTransactionManager(org.springframework.kafka.transaction.KafkaTransactionManager) ArrayList(java.util.ArrayList) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Consumer(org.apache.kafka.clients.consumer.Consumer) BiConsumer(java.util.function.BiConsumer) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) InOrder(org.mockito.InOrder) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) ProducerFactory(org.springframework.kafka.core.ProducerFactory) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) Duration(java.time.Duration) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) ProducerFencedException(org.apache.kafka.common.errors.ProducerFencedException) ConsumerStoppedEvent(org.springframework.kafka.event.ConsumerStoppedEvent) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) ConsumerFactory(org.springframework.kafka.core.ConsumerFactory) ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) Callback(org.apache.kafka.clients.producer.Callback) Producer(org.apache.kafka.clients.producer.Producer) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Collection(java.util.Collection)

Aggregations

ProducerFactory (org.springframework.kafka.core.ProducerFactory)40 Test (org.junit.jupiter.api.Test)39 DefaultKafkaProducerFactory (org.springframework.kafka.core.DefaultKafkaProducerFactory)34 KafkaTemplate (org.springframework.kafka.core.KafkaTemplate)31 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)30 Duration (java.time.Duration)29 TopicPartition (org.apache.kafka.common.TopicPartition)29 CountDownLatch (java.util.concurrent.CountDownLatch)28 Consumer (org.apache.kafka.clients.consumer.Consumer)28 ArrayList (java.util.ArrayList)27 Collection (java.util.Collection)27 AtomicReference (java.util.concurrent.atomic.AtomicReference)27 ConsumerRebalanceListener (org.apache.kafka.clients.consumer.ConsumerRebalanceListener)27 ConsumerFactory (org.springframework.kafka.core.ConsumerFactory)27 Arrays (java.util.Arrays)26 List (java.util.List)26 Map (java.util.Map)26 Set (java.util.Set)26 TimeUnit (java.util.concurrent.TimeUnit)26 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)26