use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundBatch.
@Test
void testInboundBatch() throws Exception {
Map<String, Object> props = KafkaTestUtils.consumerProps("test2", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic2);
containerProps.setIdleEventInterval(100L);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container, ListenerMode.batch);
QueueChannel out = new QueueChannel();
adapter.setOutputChannel(out);
final CountDownLatch onPartitionsAssignedCalledLatch = new CountDownLatch(1);
adapter.setOnPartitionsAssignedSeekCallback((map, consumer) -> onPartitionsAssignedCalledLatch.countDown());
adapter.afterPropertiesSet();
adapter.setBatchMessageConverter(new BatchMessagingMessageConverter() {
@Override
public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type type) {
Message<?> message = super.toMessage(records, acknowledgment, consumer, type);
return MessageBuilder.fromMessage(message).setHeader("testHeader", "testValue").build();
}
});
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic2);
template.sendDefault(0, 1487694048607L, 1, "foo");
template.sendDefault(0, 1487694048608L, 1, "bar");
Message<?> received = out.receive(10000);
assertThat(received).isNotNull();
Object payload = received.getPayload();
assertThat(payload).isInstanceOf(List.class);
List<?> list = (List<?>) payload;
assertThat(list.size()).isGreaterThan(0);
MessageHeaders headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(Arrays.asList(1, 1));
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(Arrays.asList("testTopic2", "testTopic2"));
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(Arrays.asList(0, 0));
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(Arrays.asList(0L, 1L));
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo(Arrays.asList("CREATE_TIME", "CREATE_TIME"));
assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(Arrays.asList(1487694048607L, 1487694048608L));
assertThat(headers.get("testHeader")).isEqualTo("testValue");
assertThat(onPartitionsAssignedCalledLatch.await(10, TimeUnit.SECONDS)).isTrue();
adapter.setMessageConverter(new BatchMessageConverter() {
@Override
public Message<?> toMessage(List<ConsumerRecord<?, ?>> records, Acknowledgment acknowledgment, Consumer<?, ?> consumer, Type payloadType) {
throw new RuntimeException("testError");
}
@Override
public List<ProducerRecord<?, ?>> fromMessage(Message<?> message, String defaultTopic) {
return null;
}
});
PollableChannel errors = new QueueChannel();
adapter.setErrorChannel(errors);
template.sendDefault(1, "foo");
template.sendDefault(1, "bar");
Message<?> error = errors.receive(10000);
assertThat(error).isNotNull();
assertThat(error.getPayload()).isInstanceOf(ConversionException.class);
assertThat(((ConversionException) error.getPayload()).getMessage()).contains("Failed to convert to message");
assertThat(((ConversionException) error.getPayload()).getRecords()).hasSize(2);
adapter.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundJsonWithPayload.
@Test
void testInboundJsonWithPayload() {
Map<String, Object> props = KafkaTestUtils.consumerProps("test6", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, Foo> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic6);
KafkaMessageListenerContainer<Integer, Foo> container = new KafkaMessageListenerContainer<>(cf, containerProps);
KafkaMessageDrivenChannelAdapter<Integer, Foo> adapter = Kafka.messageDrivenChannelAdapter(container, ListenerMode.record).recordMessageConverter(new StringJsonMessageConverter()).payloadType(Foo.class).get();
QueueChannel out = new QueueChannel();
adapter.setOutputChannel(out);
adapter.afterPropertiesSet();
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic6);
Headers kHeaders = new RecordHeaders();
MessageHeaders siHeaders = new MessageHeaders(Collections.singletonMap("foo", "bar"));
new DefaultKafkaHeaderMapper().fromHeaders(siHeaders, kHeaders);
template.sendDefault(1, "{\"bar\":\"baz\"}");
Message<?> received = out.receive(10000);
assertThat(received).isNotNull();
MessageHeaders headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic6);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
assertThat((Long) headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isGreaterThan(0L);
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
assertThat(received.getPayload()).isInstanceOf(Foo.class);
assertThat(received.getPayload()).isEqualTo(new Foo("baz"));
adapter.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.
the class EventListenConfiguration method operationListenerContainer.
@Bean(name = "operationListenerContainer")
public ConcurrentMessageListenerContainer<String, Operation> operationListenerContainer() {
Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
DefaultKafkaConsumerFactory<String, Operation> operationConsumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new StringDeserializer(), new JsonDeserializer<>(Operation.class));
ContainerProperties containerProperties = new ContainerProperties(Operation.OPERATION_EVENTS);
containerProperties.setMessageListener(new MultipleEventMessageListener(eventMessageListeners));
containerProperties.setAckMode(ContainerProperties.AckMode.BATCH);
ConcurrentMessageListenerContainer<String, Operation> operationListenerContainer = new ConcurrentMessageListenerContainer<>(operationConsumerFactory, containerProperties);
operationListenerContainer.setBeanName("emon-operations");
operationListenerContainer.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getOperationConcurrency());
return operationListenerContainer;
}
Aggregations