use of org.springframework.kafka.listener.ContainerProperties in project av-service by dvoraka.
the class KafkaFileClientConfig method messageListenerContainer.
@Bean
public MessageListenerContainer messageListenerContainer(ConsumerFactory<String, DefaultAvMessage> consumerFactory, MessageListener<String, AvMessage> messageListener, ThreadPoolTaskScheduler kafkaClientThreadPoolTaskScheduler) {
ContainerProperties props = new ContainerProperties(resultTopic);
// shouldn't be necessary but the default scheduler is not destroyed after shutdown
props.setScheduler(kafkaClientThreadPoolTaskScheduler);
MessageListenerContainer container = new ConcurrentMessageListenerContainer<>(consumerFactory, props);
container.setupMessageListener(messageListener);
return container;
}
use of org.springframework.kafka.listener.ContainerProperties in project loc-framework by lord-of-code.
the class LocKafkaAutoConfiguration method kafkaListenerContainerFactory.
@Bean(name = "kafkaListenerContainerFactory")
public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(ConcurrentKafkaListenerContainerFactoryConfigurer configurer, ConsumerFactory<Object, Object> kafkaConsumerFactory) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
ContainerProperties containerProperties = factory.getContainerProperties();
factory.setRecordFilterStrategy(locMessageFilterStrategy());
factory.setErrorHandler(new LocKafkaConsumerErrorHandler());
factory.setMessageConverter(recordMessageConverter());
configurer.configure(factory, kafkaConsumerFactory);
return factory;
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class InboundGatewayTests method testInboundRetryErrorRecover.
@Test
void testInboundRetryErrorRecover() {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("replyHandler3", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
ConsumerFactory<Integer, String> cf2 = new DefaultKafkaConsumerFactory<>(consumerProps);
Consumer<Integer, String> consumer = cf2.createConsumer();
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, topic6);
Map<String, Object> props = KafkaTestUtils.consumerProps("test3", "false", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic5);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic5);
KafkaInboundGateway<Integer, String, String> gateway = new KafkaInboundGateway<>(container, template);
MessageChannel out = new DirectChannel() {
@Override
protected boolean doSend(Message<?> message, long timeout) {
throw new RuntimeException("intended");
}
};
QueueChannel errors = new QueueChannel();
gateway.setRequestChannel(out);
gateway.setErrorChannel(errors);
gateway.setBeanFactory(mock(BeanFactory.class));
gateway.setMessageConverter(new MessagingMessageConverter() {
@Override
public Message<?> toMessage(ConsumerRecord<?, ?> record, Acknowledgment acknowledgment, Consumer<?, ?> con, Type type) {
Message<?> message = super.toMessage(record, acknowledgment, con, type);
return MessageBuilder.fromMessage(message).setHeader("testHeader", "testValue").setHeader(KafkaHeaders.REPLY_TOPIC, topic6).setHeader(KafkaHeaders.REPLY_PARTITION, 1).build();
}
});
gateway.setReplyTimeout(30_000);
RetryTemplate retryTemplate = new RetryTemplate();
SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy();
retryPolicy.setMaxAttempts(2);
retryTemplate.setRetryPolicy(retryPolicy);
retryTemplate.setBackOffPolicy(new NoBackOffPolicy());
gateway.setRetryTemplate(retryTemplate);
gateway.setRecoveryCallback(new ErrorMessageSendingRecoverer(errors, new RawRecordHeaderErrorMessageStrategy()));
gateway.afterPropertiesSet();
gateway.start();
ContainerTestUtils.waitForAssignment(container, 2);
template.sendDefault(0, 1487694048607L, 1, "foo");
ErrorMessage em = (ErrorMessage) errors.receive(30_000);
assertThat(em).isNotNull();
Message<?> failed = ((MessagingException) em.getPayload()).getFailedMessage();
assertThat(failed).isNotNull();
assertThat(failed.getHeaders().get(IntegrationMessageHeaderAccessor.SOURCE_DATA)).isNull();
MessageChannel reply = (MessageChannel) em.getHeaders().getReplyChannel();
MessageHeaders headers = failed.getHeaders();
reply.send(MessageBuilder.withPayload("ERROR").copyHeaders(headers).build());
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic5);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(1487694048607L);
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
assertThat(headers.get(KafkaHeaders.REPLY_TOPIC)).isEqualTo(topic6);
assertThat(headers.get("testHeader")).isEqualTo("testValue");
ConsumerRecord<Integer, String> record = KafkaTestUtils.getSingleRecord(consumer, topic6);
assertThat(record).has(partition(1));
assertThat(record).has(value("ERROR"));
gateway.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundRecord.
@Test
void testInboundRecord() {
Map<String, Object> props = KafkaTestUtils.consumerProps("test1", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic1);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
QueueChannel out = new QueueChannel();
adapter.setOutputChannel(out);
adapter.afterPropertiesSet();
adapter.setRecordMessageConverter(new MessagingMessageConverter() {
@Override
public Message<?> toMessage(ConsumerRecord<?, ?> record, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type type) {
Message<?> message = super.toMessage(record, acknowledgment, consumer, type);
return MessageBuilder.fromMessage(message).setHeader("testHeader", "testValue").build();
}
});
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic1);
template.sendDefault(0, 1487694048607L, 1, "foo");
Message<?> received = out.receive(10000);
assertThat(received).isNotNull();
MessageHeaders headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic1);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(1487694048607L);
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
assertThat(headers.get("testHeader")).isEqualTo("testValue");
template.sendDefault(1, null);
received = out.receive(10000);
assertThat(received).isNotNull();
assertThat(received.getPayload()).isInstanceOf(KafkaNull.class);
headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic1);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(1L);
assertThat((Long) headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isGreaterThan(0L);
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
assertThat(headers.get("testHeader")).isEqualTo("testValue");
adapter.setMessageConverter(new RecordMessageConverter() {
@Override
public Message<?> toMessage(ConsumerRecord<?, ?> record, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type type) {
throw new RuntimeException("testError");
}
@Override
public ProducerRecord<?, ?> fromMessage(Message<?> message, String defaultTopic) {
return null;
}
});
PollableChannel errors = new QueueChannel();
adapter.setErrorChannel(errors);
template.sendDefault(1, "bar");
Message<?> error = errors.receive(10000);
assertThat(error).isNotNull();
assertThat(error.getPayload()).isInstanceOf(ConversionException.class);
assertThat(((ConversionException) error.getPayload()).getMessage()).contains("Failed to convert to message");
assertThat(((ConversionException) error.getPayload()).getRecord()).isNotNull();
adapter.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundRecordRetryRecoverWithoutRecoveryCallback.
/**
* the recovery callback is not mandatory, if not set and retries are exhausted the last throwable is rethrown
* to the consumer.
*/
@Test
void testInboundRecordRetryRecoverWithoutRecoveryCallback() throws Exception {
Map<String, Object> props = KafkaTestUtils.consumerProps("test6", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic6);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
MessageChannel out = new DirectChannel() {
@Override
protected boolean doSend(Message<?> message, long timeout) {
throw new RuntimeException("intended");
}
};
adapter.setOutputChannel(out);
RetryTemplate retryTemplate = new RetryTemplate();
SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy();
retryPolicy.setMaxAttempts(2);
retryTemplate.setRetryPolicy(retryPolicy);
final CountDownLatch retryCountLatch = new CountDownLatch(retryPolicy.getMaxAttempts());
retryTemplate.registerListener(new RetryListenerSupport() {
@Override
public <T, E extends Throwable> void onError(RetryContext context, RetryCallback<T, E> callback, Throwable throwable) {
retryCountLatch.countDown();
}
});
adapter.setRetryTemplate(retryTemplate);
adapter.afterPropertiesSet();
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic6);
template.sendDefault(1, "foo");
assertThat(retryCountLatch.await(10, TimeUnit.SECONDS)).isTrue();
adapter.stop();
pf.destroy();
}
Aggregations