Search in sources :

Example 1 with KafkaConsumerProperties

use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.

the class KafkaStreamsBinderUtils method prepareConsumerBinding.

static void prepareConsumerBinding(String name, String group, ApplicationContext context, KafkaTopicProvisioner kafkaTopicProvisioner, KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties, RetryTemplate retryTemplate, ConfigurableListableBeanFactory beanFactory, String bindingName, KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue, StreamsBuilderFactoryBean streamsBuilderFactoryBean) {
    ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties = (ExtendedConsumerProperties) properties;
    if (binderConfigurationProperties.getDeserializationExceptionHandler() == DeserializationExceptionHandler.sendToDlq) {
        extendedConsumerProperties.getExtension().setEnableDlq(true);
    }
    // check for deserialization handler at the consumer binding, as that takes precedence.
    final DeserializationExceptionHandler deserializationExceptionHandler = properties.getExtension().getDeserializationExceptionHandler();
    if (deserializationExceptionHandler == DeserializationExceptionHandler.sendToDlq) {
        extendedConsumerProperties.getExtension().setEnableDlq(true);
    }
    String[] inputTopics = StringUtils.commaDelimitedListToStringArray(name);
    for (String inputTopic : inputTopics) {
        kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group, extendedConsumerProperties);
    }
    if (extendedConsumerProperties.getExtension().isEnableDlq()) {
        Map<String, DlqPartitionFunction> partitionFunctions = context.getBeansOfType(DlqPartitionFunction.class, false, false);
        boolean oneFunctionPresent = partitionFunctions.size() == 1;
        Integer dlqPartitions = extendedConsumerProperties.getExtension().getDlqPartitions();
        DlqPartitionFunction partitionFunction = oneFunctionPresent ? partitionFunctions.values().iterator().next() : DlqPartitionFunction.determineFallbackFunction(dlqPartitions, LOGGER);
        ProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(new ExtendedProducerProperties<>(extendedConsumerProperties.getExtension().getDlqProducerProperties()), binderConfigurationProperties);
        kafkaStreamsBindingInformationCatalogue.addDlqProducerFactory(streamsBuilderFactoryBean, producerFactory);
        KafkaOperations<byte[], byte[]> kafkaTemplate = new KafkaTemplate<>(producerFactory);
        Map<String, DlqDestinationResolver> dlqDestinationResolvers = context.getBeansOfType(DlqDestinationResolver.class, false, false);
        BiFunction<ConsumerRecord<?, ?>, Exception, TopicPartition> destinationResolver = dlqDestinationResolvers.isEmpty() ? (cr, e) -> new TopicPartition(extendedConsumerProperties.getExtension().getDlqName(), partitionFunction.apply(group, cr, e)) : (cr, e) -> new TopicPartition(dlqDestinationResolvers.values().iterator().next().apply(cr, e), partitionFunction.apply(group, cr, e));
        DeadLetterPublishingRecoverer kafkaStreamsBinderDlqRecoverer = !dlqDestinationResolvers.isEmpty() || !StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) ? new DeadLetterPublishingRecoverer(kafkaTemplate, destinationResolver) : null;
        for (String inputTopic : inputTopics) {
            if (StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) && dlqDestinationResolvers.isEmpty()) {
                destinationResolver = (cr, e) -> new TopicPartition("error." + inputTopic + "." + group, partitionFunction.apply(group, cr, e));
                kafkaStreamsBinderDlqRecoverer = new DeadLetterPublishingRecoverer(kafkaTemplate, destinationResolver);
            }
            SendToDlqAndContinue sendToDlqAndContinue = context.getBean(SendToDlqAndContinue.class);
            sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic, kafkaStreamsBinderDlqRecoverer);
        }
    }
    if (!StringUtils.hasText(properties.getRetryTemplateName())) {
        @SuppressWarnings("unchecked") BeanDefinition retryTemplateBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition((Class<RetryTemplate>) retryTemplate.getClass(), () -> retryTemplate).getRawBeanDefinition();
        ((BeanDefinitionRegistry) beanFactory).registerBeanDefinition(bindingName + "-RetryTemplate", retryTemplateBeanDefinition);
    }
}
Also used : BeanDefinition(org.springframework.beans.factory.config.BeanDefinition) KafkaConsumerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties) DeadLetterPublishingRecoverer(org.springframework.kafka.listener.DeadLetterPublishingRecoverer) DlqDestinationResolver(org.springframework.cloud.stream.binder.kafka.utils.DlqDestinationResolver) DlqPartitionFunction(org.springframework.cloud.stream.binder.kafka.utils.DlqPartitionFunction) ExtendedConsumerProperties(org.springframework.cloud.stream.binder.ExtendedConsumerProperties) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) BeanDefinitionRegistry(org.springframework.beans.factory.support.BeanDefinitionRegistry) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) TopicPartition(org.apache.kafka.common.TopicPartition)

Example 2 with KafkaConsumerProperties

use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.

the class ReactorKafkaBinder method createConsumerEndpoint.

@Override
protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<KafkaConsumerProperties> properties) throws Exception {
    boolean anonymous = !StringUtils.hasText(group);
    String consumerGroup = anonymous ? "anonymous." + UUID.randomUUID().toString() : group;
    Map<String, Object> configs = BindingUtils.createConsumerConfigs(anonymous, consumerGroup, properties, this.configurationProperties);
    if (this.consumerConfigCustomizer != null) {
        this.consumerConfigCustomizer.configure(configs, properties.getBindingName(), destination.getName());
    }
    MessageConverter converter = BindingUtils.getConsumerMessageConverter(getApplicationContext(), properties, this.configurationProperties);
    Assert.isInstanceOf(RecordMessageConverter.class, converter);
    ReceiverOptions<Object, Object> opts = ReceiverOptions.create(configs).addAssignListener(parts -> logger.info("Assigned: " + parts)).subscription(Collections.singletonList(destination.getName()));
    class ReactorMessageProducer extends MessageProducerSupport {

        private final List<KafkaReceiver<Object, Object>> receivers = new ArrayList<>();

        ReactorMessageProducer() {
            for (int i = 0; i < properties.getConcurrency(); i++) {
                this.receivers.add(KafkaReceiver.create(opts));
            }
        }

        @SuppressWarnings("unchecked")
        @Override
        protected void doStart() {
            List<Flux<Message<Object>>> fluxes = new ArrayList<>();
            int concurrency = properties.getConcurrency();
            for (int i = 0; i < concurrency; i++) {
                fluxes.add(this.receivers.get(i).receive().map(record -> (Message<Object>) ((RecordMessageConverter) converter).toMessage(record, null, null, null)));
            }
            if (concurrency == 1) {
                subscribeToPublisher(fluxes.get(0));
            } else {
                subscribeToPublisher(Flux.merge(fluxes));
            }
        }
    }
    return new ReactorMessageProducer();
}
Also used : Sinks(reactor.core.publisher.Sinks) MessagingMessageConverter(org.springframework.kafka.support.converter.MessagingMessageConverter) KafkaBinderConfigurationProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) ConsumerDestination(org.springframework.cloud.stream.provisioning.ConsumerDestination) ExtendedProducerProperties(org.springframework.cloud.stream.binder.ExtendedProducerProperties) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) KafkaConsumerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties) SenderRecord(reactor.kafka.sender.SenderRecord) KafkaTopicProvisioner(org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner) MessageProducerSupport(org.springframework.integration.endpoint.MessageProducerSupport) Map(java.util.Map) RecordMessageConverter(org.springframework.kafka.support.converter.RecordMessageConverter) ExtendedConsumerProperties(org.springframework.cloud.stream.binder.ExtendedConsumerProperties) ProducerConfigCustomizer(org.springframework.cloud.stream.binder.kafka.support.ProducerConfigCustomizer) ExtendedPropertiesBinder(org.springframework.cloud.stream.binder.ExtendedPropertiesBinder) Message(org.springframework.messaging.Message) ReceiverOptions(reactor.kafka.receiver.ReceiverOptions) ConsumerConfigCustomizer(org.springframework.cloud.stream.binder.kafka.support.ConsumerConfigCustomizer) AbstractMessageChannelBinder(org.springframework.cloud.stream.binder.AbstractMessageChannelBinder) MessageConverter(org.springframework.kafka.support.converter.MessageConverter) BindingUtils(org.springframework.cloud.stream.binder.kafka.utils.BindingUtils) Mono(reactor.core.publisher.Mono) AbstractMessageHandler(org.springframework.integration.handler.AbstractMessageHandler) UUID(java.util.UUID) KafkaSender(reactor.kafka.sender.KafkaSender) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) MessageProducer(org.springframework.integration.core.MessageProducer) MessageChannel(org.springframework.messaging.MessageChannel) KafkaProducerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties) Lifecycle(org.springframework.context.Lifecycle) SenderResult(reactor.kafka.sender.SenderResult) Flux(reactor.core.publisher.Flux) List(java.util.List) MessageHandler(org.springframework.messaging.MessageHandler) Log(org.apache.commons.logging.Log) ProducerDestination(org.springframework.cloud.stream.provisioning.ProducerDestination) LogFactory(org.apache.commons.logging.LogFactory) Collections(java.util.Collections) BinderSpecificPropertiesProvider(org.springframework.cloud.stream.binder.BinderSpecificPropertiesProvider) SenderOptions(reactor.kafka.sender.SenderOptions) KafkaExtendedBindingProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaExtendedBindingProperties) KafkaReceiver(reactor.kafka.receiver.KafkaReceiver) Assert(org.springframework.util.Assert) StringUtils(org.springframework.util.StringUtils) Message(org.springframework.messaging.Message) Flux(reactor.core.publisher.Flux) ArrayList(java.util.ArrayList) MessagingMessageConverter(org.springframework.kafka.support.converter.MessagingMessageConverter) RecordMessageConverter(org.springframework.kafka.support.converter.RecordMessageConverter) MessageConverter(org.springframework.kafka.support.converter.MessageConverter) MessageProducerSupport(org.springframework.integration.endpoint.MessageProducerSupport) ArrayList(java.util.ArrayList) List(java.util.List)

Example 3 with KafkaConsumerProperties

use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.

the class KafkaBinderTests method testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff.

@Test
@SuppressWarnings("unchecked")
void testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff() throws Exception {
    Binder binder = getBinder();
    DirectChannel moduleOutputChannel = createBindableChannel("output", createProducerBindingProperties(createProducerProperties()));
    QueueChannel moduleInputChannel = new QueueChannel();
    Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel, createProducerProperties());
    ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
    consumerProperties.getExtension().setAckMode(ContainerProperties.AckMode.MANUAL);
    Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel, consumerProperties);
    String testPayload1 = "foo" + UUID.randomUUID().toString();
    Message<?> message1 = org.springframework.integration.support.MessageBuilder.withPayload(testPayload1.getBytes()).build();
    // Let the consumer actually bind to the producer before sending a msg
    binderBindUnbindLatency();
    moduleOutputChannel.send(message1);
    Message<?> receivedMessage = receive(moduleInputChannel);
    assertThat(receivedMessage).isNotNull();
    assertThat(receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT)).isNotNull();
    Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
    try {
        acknowledgment.acknowledge();
    } catch (Exception e) {
        fail("Acknowledge must not throw an exception");
    } finally {
        producerBinding.unbind();
        consumerBinding.unbind();
    }
}
Also used : Binder(org.springframework.cloud.stream.binder.Binder) QueueChannel(org.springframework.integration.channel.QueueChannel) MessageChannel(org.springframework.messaging.MessageChannel) DirectChannel(org.springframework.integration.channel.DirectChannel) Acknowledgment(org.springframework.kafka.support.Acknowledgment) MessageHandlingException(org.springframework.messaging.MessageHandlingException) BinderException(org.springframework.cloud.stream.binder.BinderException) KafkaSendFailureException(org.springframework.integration.kafka.support.KafkaSendFailureException) IOException(java.io.IOException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) MessagingException(org.springframework.messaging.MessagingException) RequeueCurrentMessageException(org.springframework.cloud.stream.binder.RequeueCurrentMessageException) ProvisioningException(org.springframework.cloud.stream.provisioning.ProvisioningException) KafkaConsumerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties) Test(org.junit.jupiter.api.Test)

Example 4 with KafkaConsumerProperties

use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.

the class KafkaBinderTests method testAutoCreateTopicsDisabledOnBinderStillWorksAsLongAsBrokerCreatesTopic.

@Test
@SuppressWarnings("unchecked")
void testAutoCreateTopicsDisabledOnBinderStillWorksAsLongAsBrokerCreatesTopic() throws Exception {
    KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
    configurationProperties.setAutoCreateTopics(false);
    Binder binder = getBinder(configurationProperties);
    BindingProperties producerBindingProperties = createProducerBindingProperties(createProducerProperties());
    DirectChannel output = createBindableChannel("output", producerBindingProperties);
    ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
    DirectChannel input = createBindableChannel("input", createConsumerBindingProperties(consumerProperties));
    String testTopicName = "createdByBroker-" + System.currentTimeMillis();
    Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, output, producerBindingProperties.getProducer());
    String testPayload = "foo1-" + UUID.randomUUID().toString();
    output.send(new GenericMessage<>(testPayload));
    Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "test", input, consumerProperties);
    CountDownLatch latch = new CountDownLatch(1);
    AtomicReference<Message<byte[]>> inboundMessageRef = new AtomicReference<>();
    input.subscribe(message1 -> {
        try {
            inboundMessageRef.set((Message<byte[]>) message1);
        } finally {
            latch.countDown();
        }
    });
    Assert.isTrue(latch.await(5, TimeUnit.SECONDS), "Failed to receive message");
    assertThat(inboundMessageRef.get()).isNotNull();
    assertThat(new String(inboundMessageRef.get().getPayload(), StandardCharsets.UTF_8)).isEqualTo(testPayload);
    producerBinding.unbind();
    consumerBinding.unbind();
}
Also used : ErrorMessage(org.springframework.messaging.support.ErrorMessage) Message(org.springframework.messaging.Message) GenericMessage(org.springframework.messaging.support.GenericMessage) DirectChannel(org.springframework.integration.channel.DirectChannel) BindingProperties(org.springframework.cloud.stream.config.BindingProperties) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch) KafkaConsumerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties) Binder(org.springframework.cloud.stream.binder.Binder) MessageChannel(org.springframework.messaging.MessageChannel) KafkaBinderConfigurationProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties) Test(org.junit.jupiter.api.Test)

Example 5 with KafkaConsumerProperties

use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.

the class KafkaBinderTests method testInternalHeadersNotPropagatedGuts.

public void testInternalHeadersNotPropagatedGuts(String name, String[] headerPatterns, KafkaHeaderMapper mapper) throws Exception {
    KafkaTestBinder binder;
    if (mapper == null) {
        binder = getBinder();
    } else {
        KafkaBinderConfigurationProperties binderConfiguration = createConfigurationProperties();
        binderConfiguration.setHeaderMapperBeanName("headerMapper");
        KafkaTopicProvisioner kafkaTopicProvisioner = new KafkaTopicProvisioner(binderConfiguration, new TestKafkaProperties(), null);
        try {
            kafkaTopicProvisioner.afterPropertiesSet();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        binder = new KafkaTestBinder(binderConfiguration, kafkaTopicProvisioner);
        ((GenericApplicationContext) binder.getApplicationContext()).registerBean("headerMapper", KafkaHeaderMapper.class, () -> mapper);
    }
    ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
    producerProperties.getExtension().setHeaderPatterns(headerPatterns);
    DirectChannel output = createBindableChannel("output", createProducerBindingProperties(producerProperties));
    output.setBeanName(name + ".out");
    Binding<MessageChannel> producerBinding = binder.bindProducer(name + ".1", output, producerProperties);
    QueueChannel input = new QueueChannel();
    input.setBeanName(name + ".in");
    ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
    Binding<MessageChannel> consumerBinding = binder.bindConsumer(name + ".0", name, input, consumerProperties);
    Map<String, Object> producerProps = KafkaTestUtils.producerProps(embeddedKafka);
    KafkaTemplate template = new KafkaTemplate(new DefaultKafkaProducerFactory<>(producerProps));
    template.send(MessageBuilder.withPayload("internalHeaderPropagation").setHeader(KafkaHeaders.TOPIC, name + ".0").setHeader("someHeader", "someValue").build());
    Message<?> consumed = input.receive(10_000);
    if (headerPatterns != null) {
        consumed = MessageBuilder.fromMessage(consumed).setHeader(headerPatterns[0], "bar").build();
    }
    output.send(consumed);
    Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(name, "false", embeddedKafka);
    consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
    DefaultKafkaConsumerFactory cf = new DefaultKafkaConsumerFactory<>(consumerProps);
    Consumer consumer = cf.createConsumer();
    consumer.assign(Collections.singletonList(new TopicPartition(name + ".1", 0)));
    ConsumerRecords<?, ?> records = consumer.poll(Duration.ofSeconds(10));
    assertThat(records.count()).isEqualTo(1);
    ConsumerRecord<?, ?> received = records.iterator().next();
    assertThat(received.value()).isEqualTo("internalHeaderPropagation".getBytes());
    Header header = received.headers().lastHeader(BinderHeaders.NATIVE_HEADERS_PRESENT);
    assertThat(header).isNull();
    header = received.headers().lastHeader(IntegrationMessageHeaderAccessor.DELIVERY_ATTEMPT);
    assertThat(header).isNull();
    header = received.headers().lastHeader(MessageHeaders.ID);
    assertThat(header).isNull();
    header = received.headers().lastHeader(MessageHeaders.TIMESTAMP);
    assertThat(header).isNull();
    assertThat(received.headers().lastHeader("someHeader")).isNotNull();
    if (headerPatterns != null) {
        assertThat(received.headers().lastHeader(headerPatterns[0])).isNotNull();
    }
    producerBinding.unbind();
    consumerBinding.unbind();
    consumer.close();
}
Also used : QueueChannel(org.springframework.integration.channel.QueueChannel) DirectChannel(org.springframework.integration.channel.DirectChannel) KafkaConsumerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties) GenericApplicationContext(org.springframework.context.support.GenericApplicationContext) KafkaProducerProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Consumer(org.apache.kafka.clients.consumer.Consumer) KafkaTopicProvisioner(org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) MessageHandlingException(org.springframework.messaging.MessageHandlingException) BinderException(org.springframework.cloud.stream.binder.BinderException) KafkaSendFailureException(org.springframework.integration.kafka.support.KafkaSendFailureException) IOException(java.io.IOException) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) MessagingException(org.springframework.messaging.MessagingException) RequeueCurrentMessageException(org.springframework.cloud.stream.binder.RequeueCurrentMessageException) ProvisioningException(org.springframework.cloud.stream.provisioning.ProvisioningException) MessageChannel(org.springframework.messaging.MessageChannel) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Header(org.apache.kafka.common.header.Header) KafkaBinderConfigurationProperties(org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties) TopicPartition(org.apache.kafka.common.TopicPartition)

Aggregations

KafkaConsumerProperties (org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties)69 Test (org.junit.jupiter.api.Test)63 DirectChannel (org.springframework.integration.channel.DirectChannel)55 Binder (org.springframework.cloud.stream.binder.Binder)51 MessageChannel (org.springframework.messaging.MessageChannel)47 KafkaProducerProperties (org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties)41 KafkaBinderConfigurationProperties (org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties)36 QueueChannel (org.springframework.integration.channel.QueueChannel)30 Message (org.springframework.messaging.Message)24 CountDownLatch (java.util.concurrent.CountDownLatch)22 BindingProperties (org.springframework.cloud.stream.config.BindingProperties)21 ErrorMessage (org.springframework.messaging.support.ErrorMessage)21 AtomicReference (java.util.concurrent.atomic.AtomicReference)20 GenericMessage (org.springframework.messaging.support.GenericMessage)20 ExtendedConsumerProperties (org.springframework.cloud.stream.binder.ExtendedConsumerProperties)18 GenericApplicationContext (org.springframework.context.support.GenericApplicationContext)17 KafkaTopicProvisioner (org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner)16 ArrayList (java.util.ArrayList)13 Map (java.util.Map)12 Consumer (org.apache.kafka.clients.consumer.Consumer)12