Search in sources :

Example 71 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.

the class MessageDrivenAdapterTests method testInboundBatch.

@Test
void testInboundBatch() throws Exception {
    Map<String, Object> props = KafkaTestUtils.consumerProps("test2", "true", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic2);
    containerProps.setIdleEventInterval(100L);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container, ListenerMode.batch);
    QueueChannel out = new QueueChannel();
    adapter.setOutputChannel(out);
    final CountDownLatch onPartitionsAssignedCalledLatch = new CountDownLatch(1);
    adapter.setOnPartitionsAssignedSeekCallback((map, consumer) -> onPartitionsAssignedCalledLatch.countDown());
    adapter.afterPropertiesSet();
    adapter.setBatchMessageConverter(new BatchMessagingMessageConverter() {

        @Override
        public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type type) {
            Message<?> message = super.toMessage(records, acknowledgment, consumer, type);
            return MessageBuilder.fromMessage(message).setHeader("testHeader", "testValue").build();
        }
    });
    adapter.start();
    ContainerTestUtils.waitForAssignment(container, 2);
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic2);
    template.sendDefault(0, 1487694048607L, 1, "foo");
    template.sendDefault(0, 1487694048608L, 1, "bar");
    Message<?> received = out.receive(10000);
    assertThat(received).isNotNull();
    Object payload = received.getPayload();
    assertThat(payload).isInstanceOf(List.class);
    List<?> list = (List<?>) payload;
    assertThat(list.size()).isGreaterThan(0);
    MessageHeaders headers = received.getHeaders();
    assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(Arrays.asList(1, 1));
    assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(Arrays.asList("testTopic2", "testTopic2"));
    assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(Arrays.asList(0, 0));
    assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(Arrays.asList(0L, 1L));
    assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo(Arrays.asList("CREATE_TIME", "CREATE_TIME"));
    assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(Arrays.asList(1487694048607L, 1487694048608L));
    assertThat(headers.get("testHeader")).isEqualTo("testValue");
    assertThat(onPartitionsAssignedCalledLatch.await(10, TimeUnit.SECONDS)).isTrue();
    adapter.setMessageConverter(new BatchMessageConverter() {

        @Override
        public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type payloadType) {
            throw new RuntimeException("testError");
        }

        @Override
        public List<ProducerRecord<?, ?>> fromMessage(Message<?> message, String defaultTopic) {
            return null;
        }
    });
    PollableChannel errors = new QueueChannel();
    adapter.setErrorChannel(errors);
    template.sendDefault(1, "foo");
    template.sendDefault(1, "bar");
    Message<?> error = errors.receive(10000);
    assertThat(error).isNotNull();
    assertThat(error.getPayload()).isInstanceOf(ConversionException.class);
    assertThat(((ConversionException) error.getPayload()).getMessage()).contains("Failed to convert to message");
    assertThat(((ConversionException) error.getPayload()).getRecords()).hasSize(2);
    adapter.stop();
}
Also used : QueueChannel(org.springframework.integration.channel.QueueChannel) ErrorMessage(org.springframework.messaging.support.ErrorMessage) Message(org.springframework.messaging.Message) KafkaMessageListenerContainer(org.springframework.kafka.listener.KafkaMessageListenerContainer) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) Acknowledgment(org.springframework.kafka.support.Acknowledgment) List(java.util.List) MessageHeaders(org.springframework.messaging.MessageHeaders) ConversionException(org.springframework.kafka.support.converter.ConversionException) BatchMessageConverter(org.springframework.kafka.support.converter.BatchMessageConverter) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Type(java.lang.reflect.Type) BatchMessagingMessageConverter(org.springframework.kafka.support.converter.BatchMessagingMessageConverter) PollableChannel(org.springframework.messaging.PollableChannel) Test(org.junit.jupiter.api.Test)

Example 72 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.

the class MessageDrivenAdapterTests method testInboundJsonWithPayload.

@Test
void testInboundJsonWithPayload() {
    Map<String, Object> props = KafkaTestUtils.consumerProps("test6", "true", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, Foo> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic6);
    KafkaMessageListenerContainer<Integer, Foo> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    KafkaMessageDrivenChannelAdapter<Integer, Foo> adapter = Kafka.messageDrivenChannelAdapter(container, ListenerMode.record).recordMessageConverter(new StringJsonMessageConverter()).payloadType(Foo.class).get();
    QueueChannel out = new QueueChannel();
    adapter.setOutputChannel(out);
    adapter.afterPropertiesSet();
    adapter.start();
    ContainerTestUtils.waitForAssignment(container, 2);
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic6);
    Headers kHeaders = new RecordHeaders();
    MessageHeaders siHeaders = new MessageHeaders(Collections.singletonMap("foo", "bar"));
    new DefaultKafkaHeaderMapper().fromHeaders(siHeaders, kHeaders);
    template.sendDefault(1, "{\"bar\":\"baz\"}");
    Message<?> received = out.receive(10000);
    assertThat(received).isNotNull();
    MessageHeaders headers = received.getHeaders();
    assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
    assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic6);
    assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
    assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
    assertThat((Long) headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isGreaterThan(0L);
    assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
    assertThat(received.getPayload()).isInstanceOf(Foo.class);
    assertThat(received.getPayload()).isEqualTo(new Foo("baz"));
    adapter.stop();
}
Also used : QueueChannel(org.springframework.integration.channel.QueueChannel) KafkaHeaders(org.springframework.kafka.support.KafkaHeaders) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeaders(org.springframework.messaging.MessageHeaders) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) KafkaMessageListenerContainer(org.springframework.kafka.listener.KafkaMessageListenerContainer) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) DefaultKafkaHeaderMapper(org.springframework.kafka.support.DefaultKafkaHeaderMapper) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) StringJsonMessageConverter(org.springframework.kafka.support.converter.StringJsonMessageConverter) MessageHeaders(org.springframework.messaging.MessageHeaders) Test(org.junit.jupiter.api.Test)

Example 73 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.

the class EventListenConfiguration method operationListenerContainer.

@Bean(name = "operationListenerContainer")
public ConcurrentMessageListenerContainer<String, Operation> operationListenerContainer() {
    Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
    consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
    DefaultKafkaConsumerFactory<String, Operation> operationConsumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new StringDeserializer(), new JsonDeserializer<>(Operation.class));
    ContainerProperties containerProperties = new ContainerProperties(Operation.OPERATION_EVENTS);
    containerProperties.setMessageListener(new MultipleEventMessageListener(eventMessageListeners));
    containerProperties.setAckMode(ContainerProperties.AckMode.BATCH);
    ConcurrentMessageListenerContainer<String, Operation> operationListenerContainer = new ConcurrentMessageListenerContainer<>(operationConsumerFactory, containerProperties);
    operationListenerContainer.setBeanName("emon-operations");
    operationListenerContainer.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getOperationConcurrency());
    return operationListenerContainer;
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MultipleEventMessageListener(io.splitet.core.api.emon.service.MultipleEventMessageListener) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) ConcurrentMessageListenerContainer(org.springframework.kafka.listener.ConcurrentMessageListenerContainer) Operation(io.splitet.core.pojos.Operation) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Bean(org.springframework.context.annotation.Bean)

Aggregations

ContainerProperties (org.springframework.kafka.listener.ContainerProperties)73 KafkaMessageListenerContainer (org.springframework.kafka.listener.KafkaMessageListenerContainer)46 DefaultKafkaConsumerFactory (org.springframework.kafka.core.DefaultKafkaConsumerFactory)41 Test (org.junit.jupiter.api.Test)38 DefaultKafkaProducerFactory (org.springframework.kafka.core.DefaultKafkaProducerFactory)30 KafkaTemplate (org.springframework.kafka.core.KafkaTemplate)26 QueueChannel (org.springframework.integration.channel.QueueChannel)21 ErrorMessage (org.springframework.messaging.support.ErrorMessage)19 Message (org.springframework.messaging.Message)18 MessageHeaders (org.springframework.messaging.MessageHeaders)18 CountDownLatch (java.util.concurrent.CountDownLatch)17 DirectChannel (org.springframework.integration.channel.DirectChannel)17 MessageChannel (org.springframework.messaging.MessageChannel)14 Acknowledgment (org.springframework.kafka.support.Acknowledgment)11 Type (java.lang.reflect.Type)10 HashMap (java.util.HashMap)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 BeanFactory (org.springframework.beans.factory.BeanFactory)9 Bean (org.springframework.context.annotation.Bean)9 ConcurrentMessageListenerContainer (org.springframework.kafka.listener.ConcurrentMessageListenerContainer)9