use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundJson.
@Test
void testInboundJson() {
Map<String, Object> props = KafkaTestUtils.consumerProps("test3", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic3);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
adapter.setRecordMessageConverter(new StringJsonMessageConverter());
QueueChannel out = new QueueChannel();
adapter.setOutputChannel(out);
adapter.afterPropertiesSet();
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic3);
Headers kHeaders = new RecordHeaders();
MessageHeaders siHeaders = new MessageHeaders(Collections.singletonMap("foo", "bar"));
new DefaultKafkaHeaderMapper().fromHeaders(siHeaders, kHeaders);
template.send(new ProducerRecord<>(topic3, 0, 1487694048607L, 1, "{\"bar\":\"baz\"}", kHeaders));
Message<?> received = out.receive(10000);
assertThat(received).isNotNull();
MessageHeaders headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic3);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
assertThat(headers.get(KafkaHeaders.RECEIVED_TIMESTAMP)).isEqualTo(1487694048607L);
assertThat(headers.get(KafkaHeaders.TIMESTAMP_TYPE)).isEqualTo("CREATE_TIME");
assertThat(headers.get("foo")).isEqualTo("bar");
assertThat(received.getPayload()).isInstanceOf(Map.class);
adapter.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class MessageDrivenAdapterTests method testInboundRecordNoRetryRecover.
@Test
void testInboundRecordNoRetryRecover() {
Map<String, Object> props = KafkaTestUtils.consumerProps("test5", "true", embeddedKafka);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
ContainerProperties containerProps = new ContainerProperties(topic5);
containerProps.setDeliveryAttemptHeader(true);
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps);
container.setErrorHandler(new SeekToCurrentErrorHandler());
KafkaMessageDrivenChannelAdapter<Integer, String> adapter = new KafkaMessageDrivenChannelAdapter<>(container);
MessageChannel out = new DirectChannel() {
@Override
protected boolean doSend(Message<?> message, long timeout) {
throw new RuntimeException("intended");
}
};
adapter.setOutputChannel(out);
QueueChannel errorChannel = new QueueChannel();
adapter.setErrorChannel(errorChannel);
adapter.setRecoveryCallback(new ErrorMessageSendingRecoverer(errorChannel, new RawRecordHeaderErrorMessageStrategy()));
adapter.setBindSourceRecord(true);
adapter.afterPropertiesSet();
adapter.start();
ContainerTestUtils.waitForAssignment(container, 2);
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
template.setDefaultTopic(topic5);
template.sendDefault(1, "foo");
Message<?> received = errorChannel.receive(10000);
assertThat(received).isInstanceOf(ErrorMessage.class);
MessageHeaders headers = received.getHeaders();
assertThat(headers.get(KafkaHeaders.RAW_DATA)).isNotNull();
assertThat(headers.get(IntegrationMessageHeaderAccessor.SOURCE_DATA)).isSameAs(headers.get(KafkaHeaders.RAW_DATA));
Message<?> originalMessage = ((ErrorMessage) received).getOriginalMessage();
assertThat(originalMessage).isNotNull();
assertThat(originalMessage.getHeaders().get(IntegrationMessageHeaderAccessor.SOURCE_DATA)).isSameAs(headers.get(KafkaHeaders.RAW_DATA));
headers = originalMessage.getHeaders();
assertThat(headers.get(KafkaHeaders.RECEIVED_MESSAGE_KEY)).isEqualTo(1);
assertThat(headers.get(KafkaHeaders.RECEIVED_TOPIC)).isEqualTo(topic5);
assertThat(headers.get(KafkaHeaders.RECEIVED_PARTITION_ID)).isEqualTo(0);
assertThat(headers.get(KafkaHeaders.OFFSET)).isEqualTo(0L);
assertThat(StaticMessageHeaderAccessor.getDeliveryAttempt(originalMessage).get()).isEqualTo(1);
adapter.stop();
}
use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.
the class ConsumerOffsetListenerConfiguration method consumerOffsetListenerContainer.
@Bean("consumerOffsetListenerContainer")
public ConcurrentMessageListenerContainer<byte[], byte[]> consumerOffsetListenerContainer() {
Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
ContainerProperties containerProperties = new ContainerProperties(CONSUMER_OFFSETS);
containerProperties.setMessageListener(consumerOffsetListener);
// To avoid echoings
containerProperties.setAckMode(ContainerProperties.AckMode.TIME);
containerProperties.setAckTime(3000);
DefaultKafkaConsumerFactory<byte[], byte[]> operationConsumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new ByteArrayDeserializer(), new ByteArrayDeserializer());
ConcurrentMessageListenerContainer<byte[], byte[]> consumerOffsetListenerContainer = new ConcurrentMessageListenerContainer<>(operationConsumerFactory, containerProperties);
consumerOffsetListenerContainer.setConcurrency(5);
consumerOffsetListenerContainer.setBeanName("consumer-offsets");
return consumerOffsetListenerContainer;
}
use of org.springframework.kafka.listener.ContainerProperties in project eventapis by kloiasoft.
the class EventListenConfiguration method messageListenerContainer.
@Bean(name = "messageListenerContainer")
public ConcurrentMessageListenerContainer<String, PublishedEventWrapper> messageListenerContainer() {
Map<String, Object> consumerProperties = eventApisConfiguration.getEventBus().buildConsumerProperties();
consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
consumerProperties.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, 3000);
DefaultKafkaConsumerFactory<String, PublishedEventWrapper> consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties, new StringDeserializer(), new JsonDeserializer<>(PublishedEventWrapper.class));
ContainerProperties containerProperties = new ContainerProperties(Pattern.compile(eventTopicRegexStr));
containerProperties.setMessageListener(new MultipleEventMessageListener(eventMessageListeners));
containerProperties.setAckMode(ContainerProperties.AckMode.BATCH);
ConcurrentMessageListenerContainer<String, PublishedEventWrapper> messageListenerContainer = new ConcurrentMessageListenerContainer<>(consumerFactory, containerProperties);
messageListenerContainer.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getEventConcurrency());
messageListenerContainer.setBeanName("emon-events");
return messageListenerContainer;
}
use of org.springframework.kafka.listener.ContainerProperties in project spring-integration by spring-projects.
the class KafkaProducerMessageHandlerTests method testOutboundGatewayGuts.
private void testOutboundGatewayGuts(ProducerRecord<?, ?> payload) throws Exception {
ConsumerFactory<Integer, String> consumerFactory = new DefaultKafkaConsumerFactory<>(KafkaTestUtils.consumerProps(topic5, "false", embeddedKafka));
ContainerProperties containerProperties = new ContainerProperties(topic6);
final CountDownLatch assigned = new CountDownLatch(1);
containerProperties.setConsumerRebalanceListener(new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
// empty
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
assigned.countDown();
}
});
KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
DefaultKafkaProducerFactory<Integer, String> producerFactory = new DefaultKafkaProducerFactory<>(KafkaTestUtils.producerProps(embeddedKafka));
ReplyingKafkaTemplate<Integer, String, String> template = new ReplyingKafkaTemplate<>(producerFactory, container);
template.start();
assertThat(assigned.await(30, TimeUnit.SECONDS)).isTrue();
KafkaProducerMessageHandler<Integer, String> handler = new KafkaProducerMessageHandler<>(template);
handler.setBeanFactory(mock(BeanFactory.class));
QueueChannel replies = new QueueChannel();
handler.setOutputChannel(replies);
handler.afterPropertiesSet();
Message<?> message;
if (payload == null) {
message = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).build();
} else {
message = MessageBuilder.withPayload(payload).build();
}
handler.handleMessage(message);
ConsumerRecord<Integer, String> record = KafkaTestUtils.getSingleRecord(consumer, topic5);
assertThat(record).has(key(2));
assertThat(record).has(partition(1));
assertThat(record).has(value("foo"));
Map<String, Object> headers = new HashMap<>();
new DefaultKafkaHeaderMapper().toHeaders(record.headers(), headers);
assertThat(headers.get(KafkaHeaders.REPLY_TOPIC)).isEqualTo(topic6.getBytes());
ProducerRecord<Integer, String> pr = new ProducerRecord<>(topic6, 0, 1, "FOO", record.headers());
template.send(pr);
Message<?> reply = replies.receive(30_000);
assertThat(reply).isNotNull();
assertThat(reply.getPayload()).isEqualTo("FOO");
assertThat(reply.getHeaders().get(KafkaHeaders.TOPIC)).isNull();
assertThat(reply.getHeaders().get(KafkaHeaders.CORRELATION_ID)).isNull();
final Message<?> messageToHandle1 = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).setHeader(KafkaHeaders.REPLY_TOPIC, "bad").build();
assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() -> handler.handleMessage(messageToHandle1)).withMessageContaining("The reply topic header [bad] does not match any reply container topic: " + "[" + topic6 + "]");
final Message<?> messageToHandle2 = MessageBuilder.withPayload("foo").setHeader(KafkaHeaders.TOPIC, topic5).setHeader(KafkaHeaders.KEY, 2).setHeader(KafkaHeaders.PARTITION, 1).setHeader(KafkaHeaders.REPLY_PARTITION, 999).build();
assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() -> handler.handleMessage(messageToHandle2)).withMessageContaining("The reply partition header [999] " + "does not match any reply container partition for topic [" + topic6 + "]: [0, 1]");
template.stop();
// discard from the test consumer
KafkaTestUtils.getSingleRecord(consumer, topic6);
producerFactory.destroy();
}
Aggregations