Search in sources :

Example 26 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.

the class MessageDrivenAdapterTests method testInboundJson.

@Test
void testInboundJson() {
    Map<String, Object> props = KafkaTestUtils.consumerProps("test3", "true", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic3);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
    adapter.setRecordMessageConverter(new StringJsonMessageConverter());
    QueueChannel out = new QueueChannel();
    adapter.setOutputChannel(out);
    adapter.afterPropertiesSet();
    adapter.start();
    ContainerTestUtils.waitForAssignment(container, 2);
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic3);
    Headers kHeaders = new RecordHeaders();
    MessageHeaders siHeaders = new MessageHeaders(Collections.singletonMap("foo", "bar"));
    new DefaultKafkaHeaderMapper().fromHeaders(siHeaders, kHeaders);
    template.send(new ProducerRecord<>(topic3, 0, 1487694048607L, 1, "{\"bar\":\"baz\"}", kHeaders));
    Message<?> received = out.receive(10000);
    assertThat(received).isNotNull();
    MessageHeaders headers = received.getHeaders();
    assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
    assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic3);
    assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
    assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
    assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(1487694048607L);
    assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
    assertThat(headers.get("foo")).isEqualTo("bar");
    assertThat(received.getPayload()).isInstanceOf(Map.class);
    adapter.stop();
}
Also used : QueueChannel(org.springframework.integration.channel.QueueChannel) KafkaHeaders(org.springframework.kafka.support.KafkaHeaders) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeaders(org.springframework.messaging.MessageHeaders) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) KafkaMessageListenerContainer(org.springframework.kafka.listener.KafkaMessageListenerContainer) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) DefaultKafkaHeaderMapper(org.springframework.kafka.support.DefaultKafkaHeaderMapper) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) StringJsonMessageConverter(org.springframework.kafka.support.converter.StringJsonMessageConverter) MessageHeaders(org.springframework.messaging.MessageHeaders) Test(org.junit.jupiter.api.Test)

Example 27 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.

the class MessageDrivenAdapterTests method testInboundRecordNoRetryRecover.

@Test
void testInboundRecordNoRetryRecover() {
    Map<String, Object> props = KafkaTestUtils.consumerProps("test5", "true", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic5);
    containerProps.setDeliveryAttemptHeader(true);
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
    container.setErrorHandler(new SeekToCurrentErrorHandler());
    KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
    MessageChannel out = new DirectChannel() {

        @Override
        protected boolean doSend(Message<?> message, long timeout) {
            throw new RuntimeException("intended");
        }
    };
    adapter.setOutputChannel(out);
    QueueChannel errorChannel = new QueueChannel();
    adapter.setErrorChannel(errorChannel);
    adapter.setRecoveryCallback(new ErrorMessageSendingRecoverer(errorChannel, new RawRecordHeaderErrorMessageStrategy()));
    adapter.setBindSourceRecord(true);
    adapter.afterPropertiesSet();
    adapter.start();
    ContainerTestUtils.waitForAssignment(container, 2);
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic5);
    template.sendDefault(1, "foo");
    Message<?> received = errorChannel.receive(10000);
    assertThat(received).isInstanceOf(ErrorMessage.class);
    MessageHeaders headers = received.getHeaders();
    assertThat(headers.get(KafkaHeaders.RAW_DATA)).isNotNull();
    assertThat(headers.get(IntegrationMessageHeaderAccessor.SOURCE_DATA)).isSameAs(headers.get(KafkaHeaders.RAW_DATA));
    Message<?> originalMessage = ((ErrorMessage) received).getOriginalMessage();
    assertThat(originalMessage).isNotNull();
    assertThat(originalMessage.getHeaders().get(IntegrationMessageHeaderAccessor.SOURCE_DATA)).isSameAs(headers.get(KafkaHeaders.RAW_DATA));
    headers = originalMessage.getHeaders();
    assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
    assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic5);
    assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
    assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
    assertThat(StaticMessageHeaderAccessor.getDeliveryAttempt(originalMessage).get()).isEqualTo(1);
    adapter.stop();
}
Also used : ErrorMessage(org.springframework.messaging.support.ErrorMessage) Message(org.springframework.messaging.Message) QueueChannel(org.springframework.integration.channel.QueueChannel) DirectChannel(org.springframework.integration.channel.DirectChannel) KafkaMessageListenerContainer(org.springframework.kafka.listener.KafkaMessageListenerContainer) ErrorMessageSendingRecoverer(org.springframework.integration.handler.advice.ErrorMessageSendingRecoverer) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) MessageHeaders(org.springframework.messaging.MessageHeaders) SeekToCurrentErrorHandler(org.springframework.kafka.listener.SeekToCurrentErrorHandler) KafkaTemplate(org.springframework.kafka.core.KafkaTemplate) RawRecordHeaderErrorMessageStrategy(org.springframework.integration.kafka.support.RawRecordHeaderErrorMessageStrategy) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) MessageChannel(org.springframework.messaging.MessageChannel) ErrorMessage(org.springframework.messaging.support.ErrorMessage) Test(org.junit.jupiter.api.Test)

Example 28 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.

the class ConsumerOffsetListenerConfiguration method consumerOffsetListenerContainer.

@Bean("consumerOffsetListenerContainer")
public ConcurrentMessageListenerContainer<byte[], byte[]> consumerOffsetListenerContainer() {
    Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
    consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
    ContainerProperties containerProperties = new ContainerProperties(CONSUMER_OFFSETS);
    containerProperties.setMessageListener(consumerOffsetListener);
    // To avoid echoings
    containerProperties.setAckMode(ContainerProperties.AckMode.TIME);
    containerProperties.setAckTime(3000);
    DefaultKafkaConsumerFactory<byte[], byte[]> operationConsumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new ByteArrayDeserializer(), new ByteArrayDeserializer());
    ConcurrentMessageListenerContainer<byte[], byte[]> consumerOffsetListenerContainer = new ConcurrentMessageListenerContainer<>(operationConsumerFactory, containerProperties);
    consumerOffsetListenerContainer.setConcurrency(5);
    consumerOffsetListenerContainer.setBeanName("consumer-offsets");
    return consumerOffsetListenerContainer;
}
Also used : ContainerProperties(org.springframework.kafka.listener.ContainerProperties) ConcurrentMessageListenerContainer(org.springframework.kafka.listener.ConcurrentMessageListenerContainer) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Bean(org.springframework.context.annotation.Bean)

Example 29 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.

the class EventListenConfiguration method messageListenerContainer.

@Bean(name = "messageListenerContainer")
public ConcurrentMessageListenerContainer<String, PublishedEventWrapper> messageListenerContainer() {
    Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
    consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
    consumerProperties.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, 3000);
    DefaultKafkaConsumerFactory<String, PublishedEventWrapper> consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new StringDeserializer(), new JsonDeserializer<>(PublishedEventWrapper.class));
    ContainerProperties containerProperties = new ContainerProperties(Pattern.compile(eventTopicRegexStr));
    containerProperties.setMessageListener(new MultipleEventMessageListener(eventMessageListeners));
    containerProperties.setAckMode(ContainerProperties.AckMode.BATCH);
    ConcurrentMessageListenerContainer<String, PublishedEventWrapper> messageListenerContainer = new ConcurrentMessageListenerContainer<>(consumerFactory, containerProperties);
    messageListenerContainer.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getEventConcurrency());
    messageListenerContainer.setBeanName("emon-events");
    return messageListenerContainer;
}
Also used : PublishedEventWrapper(io.splitet.core.kafka.PublishedEventWrapper) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MultipleEventMessageListener(io.splitet.core.api.emon.service.MultipleEventMessageListener) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) ConcurrentMessageListenerContainer(org.springframework.kafka.listener.ConcurrentMessageListenerContainer) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) Bean(org.springframework.context.annotation.Bean)

Example 30 with ContainerProperties

use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.

the class KafkaProducerMessageHandlerTests method testOutboundGatewayGuts.

private void testOutboundGatewayGuts(ProducerRecord<?, ?> payload) throws Exception {
    ConsumerFactory<Integer, String> consumerFactory = new DefaultKafkaConsumerFactory<>(KafkaTestUtils.consumerProps(topic5, "false", embeddedKafka));
    ContainerProperties containerProperties = new ContainerProperties(topic6);
    final CountDownLatch assigned = new CountDownLatch(1);
    containerProperties.setConsumerRebalanceListener(new ConsumerRebalanceListener() {

        @Override
        public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
        // empty
        }

        @Override
        public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
            assigned.countDown();
        }
    });
    KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
    DefaultKafkaProducerFactory<Integer, String> producerFactory = new DefaultKafkaProducerFactory<>(KafkaTestUtils.producerProps(embeddedKafka));
    ReplyingKafkaTemplate<Integer, String, String> template = new ReplyingKafkaTemplate<>(producerFactory, container);
    template.start();
    assertThat(assigned.await(30, TimeUnit.SECONDS)).isTrue();
    KafkaProducerMessageHandler<Integer, String> handler = new KafkaProducerMessageHandler<>(template);
    handler.setBeanFactory(mock(BeanFactory.class));
    QueueChannel replies = new QueueChannel();
    handler.setOutputChannel(replies);
    handler.afterPropertiesSet();
    Message<?> message;
    if (payload == null) {
        message = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).build();
    } else {
        message = MessageBuilder.withPayload(payload).build();
    }
    handler.handleMessage(message);
    ConsumerRecord<Integer, String> record = KafkaTestUtils.getSingleRecord(consumer, topic5);
    assertThat(record).has(key(2));
    assertThat(record).has(partition(1));
    assertThat(record).has(value("foo"));
    Map<String, Object> headers = new HashMap<>();
    new DefaultKafkaHeaderMapper().toHeaders(record.headers(), headers);
    assertThat(headers.get(KafkaHeaders.REPLY_TOPIC)).isEqualTo(topic6.getBytes());
    ProducerRecord<Integer, String> pr = new ProducerRecord<>(topic6, 0, 1, "FOO", record.headers());
    template.send(pr);
    Message<?> reply = replies.receive(30_000);
    assertThat(reply).isNotNull();
    assertThat(reply.getPayload()).isEqualTo("FOO");
    assertThat(reply.getHeaders().get(KafkaHeaders.TOPIC)).isNull();
    assertThat(reply.getHeaders().get(KafkaHeaders.CORRELATION_ID)).isNull();
    final Message<?> messageToHandle1 = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).setHeader(KafkaHeaders.REPLY_TOPIC, "bad").build();
    assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() -> handler.handleMessage(messageToHandle1)).withMessageContaining("The reply topic header [bad] does not match any reply container topic: " + "[" + topic6 + "]");
    final Message<?> messageToHandle2 = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).setHeader(KafkaHeaders.REPLY_PARTITION, 999).build();
    assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() -> handler.handleMessage(messageToHandle2)).withMessageContaining("The reply partition header [999] " + "does not match any reply container partition for topic [" + topic6 + "]: [0, 1]");
    template.stop();
    // discard from the test consumer
    KafkaTestUtils.getSingleRecord(consumer, topic6);
    producerFactory.destroy();
}
Also used : QueueChannel(org.springframework.integration.channel.QueueChannel) HashMap(java.util.HashMap) KafkaMessageListenerContainer(org.springframework.kafka.listener.KafkaMessageListenerContainer) DefaultKafkaHeaderMapper(org.springframework.kafka.support.DefaultKafkaHeaderMapper) ContainerProperties(org.springframework.kafka.listener.ContainerProperties) BeanFactory(org.springframework.beans.factory.BeanFactory) ReplyingKafkaTemplate(org.springframework.kafka.requestreply.ReplyingKafkaTemplate) DefaultKafkaConsumerFactory(org.springframework.kafka.core.DefaultKafkaConsumerFactory) CountDownLatch(java.util.concurrent.CountDownLatch) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) DefaultKafkaProducerFactory(org.springframework.kafka.core.DefaultKafkaProducerFactory) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Aggregations

ContainerProperties (org.springframework.kafka.listener.ContainerProperties)73 KafkaMessageListenerContainer (org.springframework.kafka.listener.KafkaMessageListenerContainer)46 DefaultKafkaConsumerFactory (org.springframework.kafka.core.DefaultKafkaConsumerFactory)41 Test (org.junit.jupiter.api.Test)38 DefaultKafkaProducerFactory (org.springframework.kafka.core.DefaultKafkaProducerFactory)30 KafkaTemplate (org.springframework.kafka.core.KafkaTemplate)26 QueueChannel (org.springframework.integration.channel.QueueChannel)21 ErrorMessage (org.springframework.messaging.support.ErrorMessage)19 Message (org.springframework.messaging.Message)18 MessageHeaders (org.springframework.messaging.MessageHeaders)18 CountDownLatch (java.util.concurrent.CountDownLatch)17 DirectChannel (org.springframework.integration.channel.DirectChannel)17 MessageChannel (org.springframework.messaging.MessageChannel)14 Acknowledgment (org.springframework.kafka.support.Acknowledgment)11 Type (java.lang.reflect.Type)10 HashMap (java.util.HashMap)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 BeanFactory (org.springframework.beans.factory.BeanFactory)9 Bean (org.springframework.context.annotation.Bean)9 ConcurrentMessageListenerContainer (org.springframework.kafka.listener.ConcurrentMessageListenerContainer)9