use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.
the class KafkaStreamsBinderUtils method prepareConsumerBinding.
static void prepareConsumerBinding(String name, String group, ApplicationContext context, KafkaTopicProvisioner kafkaTopicProvisioner, KafkaStreamsBinderConfigurationProperties binderConfigurationProperties, ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties, RetryTemplate retryTemplate, ConfigurableListableBeanFactory beanFactory, String bindingName, KafkaStreamsBindingInformationCatalogue kafkaStreamsBindingInformationCatalogue, StreamsBuilderFactoryBean streamsBuilderFactoryBean) {
ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties = (ExtendedConsumerProperties) properties;
if (binderConfigurationProperties.getDeserializationExceptionHandler() == DeserializationExceptionHandler.sendToDlq) {
extendedConsumerProperties.getExtension().setEnableDlq(true);
}
// check for deserialization handler at the consumer binding, as that takes precedence.
final DeserializationExceptionHandler deserializationExceptionHandler = properties.getExtension().getDeserializationExceptionHandler();
if (deserializationExceptionHandler == DeserializationExceptionHandler.sendToDlq) {
extendedConsumerProperties.getExtension().setEnableDlq(true);
}
String[] inputTopics = StringUtils.commaDelimitedListToStringArray(name);
for (String inputTopic : inputTopics) {
kafkaTopicProvisioner.provisionConsumerDestination(inputTopic, group, extendedConsumerProperties);
}
if (extendedConsumerProperties.getExtension().isEnableDlq()) {
Map<String, DlqPartitionFunction> partitionFunctions = context.getBeansOfType(DlqPartitionFunction.class, false, false);
boolean oneFunctionPresent = partitionFunctions.size() == 1;
Integer dlqPartitions = extendedConsumerProperties.getExtension().getDlqPartitions();
DlqPartitionFunction partitionFunction = oneFunctionPresent ? partitionFunctions.values().iterator().next() : DlqPartitionFunction.determineFallbackFunction(dlqPartitions, LOGGER);
ProducerFactory<byte[], byte[]> producerFactory = getProducerFactory(new ExtendedProducerProperties<>(extendedConsumerProperties.getExtension().getDlqProducerProperties()), binderConfigurationProperties);
kafkaStreamsBindingInformationCatalogue.addDlqProducerFactory(streamsBuilderFactoryBean, producerFactory);
KafkaOperations<byte[], byte[]> kafkaTemplate = new KafkaTemplate<>(producerFactory);
Map<String, DlqDestinationResolver> dlqDestinationResolvers = context.getBeansOfType(DlqDestinationResolver.class, false, false);
BiFunction<ConsumerRecord<?, ?>, Exception, TopicPartition> destinationResolver = dlqDestinationResolvers.isEmpty() ? (cr, e) -> new TopicPartition(extendedConsumerProperties.getExtension().getDlqName(), partitionFunction.apply(group, cr, e)) : (cr, e) -> new TopicPartition(dlqDestinationResolvers.values().iterator().next().apply(cr, e), partitionFunction.apply(group, cr, e));
DeadLetterPublishingRecoverer kafkaStreamsBinderDlqRecoverer = !dlqDestinationResolvers.isEmpty() || !StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) ? new DeadLetterPublishingRecoverer(kafkaTemplate, destinationResolver) : null;
for (String inputTopic : inputTopics) {
if (StringUtils.isEmpty(extendedConsumerProperties.getExtension().getDlqName()) && dlqDestinationResolvers.isEmpty()) {
destinationResolver = (cr, e) -> new TopicPartition("error." + inputTopic + "." + group, partitionFunction.apply(group, cr, e));
kafkaStreamsBinderDlqRecoverer = new DeadLetterPublishingRecoverer(kafkaTemplate, destinationResolver);
}
SendToDlqAndContinue sendToDlqAndContinue = context.getBean(SendToDlqAndContinue.class);
sendToDlqAndContinue.addKStreamDlqDispatch(inputTopic, kafkaStreamsBinderDlqRecoverer);
}
}
if (!StringUtils.hasText(properties.getRetryTemplateName())) {
@SuppressWarnings("unchecked") BeanDefinition retryTemplateBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition((Class<RetryTemplate>) retryTemplate.getClass(), () -> retryTemplate).getRawBeanDefinition();
((BeanDefinitionRegistry) beanFactory).registerBeanDefinition(bindingName + "-RetryTemplate", retryTemplateBeanDefinition);
}
}
use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.
the class ReactorKafkaBinder method createConsumerEndpoint.
@Override
protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<KafkaConsumerProperties> properties) throws Exception {
boolean anonymous = !StringUtils.hasText(group);
String consumerGroup = anonymous ? "anonymous." + UUID.randomUUID().toString() : group;
Map<String, Object> configs = BindingUtils.createConsumerConfigs(anonymous, consumerGroup, properties, this.configurationProperties);
if (this.consumerConfigCustomizer != null) {
this.consumerConfigCustomizer.configure(configs, properties.getBindingName(), destination.getName());
}
MessageConverter converter = BindingUtils.getConsumerMessageConverter(getApplicationContext(), properties, this.configurationProperties);
Assert.isInstanceOf(RecordMessageConverter.class, converter);
ReceiverOptions<Object, Object> opts = ReceiverOptions.create(configs).addAssignListener(parts -> logger.info("Assigned: " + parts)).subscription(Collections.singletonList(destination.getName()));
class ReactorMessageProducer extends MessageProducerSupport {
private final List<KafkaReceiver<Object, Object>> receivers = new ArrayList<>();
ReactorMessageProducer() {
for (int i = 0; i < properties.getConcurrency(); i++) {
this.receivers.add(KafkaReceiver.create(opts));
}
}
@SuppressWarnings("unchecked")
@Override
protected void doStart() {
List<Flux<Message<Object>>> fluxes = new ArrayList<>();
int concurrency = properties.getConcurrency();
for (int i = 0; i < concurrency; i++) {
fluxes.add(this.receivers.get(i).receive().map(record -> (Message<Object>) ((RecordMessageConverter) converter).toMessage(record, null, null, null)));
}
if (concurrency == 1) {
subscribeToPublisher(fluxes.get(0));
} else {
subscribeToPublisher(Flux.merge(fluxes));
}
}
}
return new ReactorMessageProducer();
}
use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.
the class KafkaBinderTests method testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff.
@Test
@SuppressWarnings("unchecked")
void testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff() throws Exception {
Binder binder = getBinder();
DirectChannel moduleOutputChannel = createBindableChannel("output", createProducerBindingProperties(createProducerProperties()));
QueueChannel moduleInputChannel = new QueueChannel();
Binding<MessageChannel> producerBinding = binder.bindProducer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", moduleOutputChannel, createProducerProperties());
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
consumerProperties.getExtension().setAckMode(ContainerProperties.AckMode.MANUAL);
Binding<MessageChannel> consumerBinding = binder.bindConsumer("testManualAckSucceedsWhenAutoCommitOffsetIsTurnedOff", "test", moduleInputChannel, consumerProperties);
String testPayload1 = "foo" + UUID.randomUUID().toString();
Message<?> message1 = org.springframework.integration.support.MessageBuilder.withPayload(testPayload1.getBytes()).build();
// Let the consumer actually bind to the producer before sending a msg
binderBindUnbindLatency();
moduleOutputChannel.send(message1);
Message<?> receivedMessage = receive(moduleInputChannel);
assertThat(receivedMessage).isNotNull();
assertThat(receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT)).isNotNull();
Acknowledgment acknowledgment = receivedMessage.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
try {
acknowledgment.acknowledge();
} catch (Exception e) {
fail("Acknowledge must not throw an exception");
} finally {
producerBinding.unbind();
consumerBinding.unbind();
}
}
use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.
the class KafkaBinderTests method testAutoCreateTopicsDisabledOnBinderStillWorksAsLongAsBrokerCreatesTopic.
@Test
@SuppressWarnings("unchecked")
void testAutoCreateTopicsDisabledOnBinderStillWorksAsLongAsBrokerCreatesTopic() throws Exception {
KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
configurationProperties.setAutoCreateTopics(false);
Binder binder = getBinder(configurationProperties);
BindingProperties producerBindingProperties = createProducerBindingProperties(createProducerProperties());
DirectChannel output = createBindableChannel("output", producerBindingProperties);
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
DirectChannel input = createBindableChannel("input", createConsumerBindingProperties(consumerProperties));
String testTopicName = "createdByBroker-" + System.currentTimeMillis();
Binding<MessageChannel> producerBinding = binder.bindProducer(testTopicName, output, producerBindingProperties.getProducer());
String testPayload = "foo1-" + UUID.randomUUID().toString();
output.send(new GenericMessage<>(testPayload));
Binding<MessageChannel> consumerBinding = binder.bindConsumer(testTopicName, "test", input, consumerProperties);
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Message<byte[]>> inboundMessageRef = new AtomicReference<>();
input.subscribe(message1 -> {
try {
inboundMessageRef.set((Message<byte[]>) message1);
} finally {
latch.countDown();
}
});
Assert.isTrue(latch.await(5, TimeUnit.SECONDS), "Failed to receive message");
assertThat(inboundMessageRef.get()).isNotNull();
assertThat(new String(inboundMessageRef.get().getPayload(), StandardCharsets.UTF_8)).isEqualTo(testPayload);
producerBinding.unbind();
consumerBinding.unbind();
}
use of org.springframework.cloud.stream.binder.kafka.properties.KafkaConsumerProperties in project spring-cloud-stream by spring-cloud.
the class KafkaBinderTests method testInternalHeadersNotPropagatedGuts.
public void testInternalHeadersNotPropagatedGuts(String name, String[] headerPatterns, KafkaHeaderMapper mapper) throws Exception {
KafkaTestBinder binder;
if (mapper == null) {
binder = getBinder();
} else {
KafkaBinderConfigurationProperties binderConfiguration = createConfigurationProperties();
binderConfiguration.setHeaderMapperBeanName("headerMapper");
KafkaTopicProvisioner kafkaTopicProvisioner = new KafkaTopicProvisioner(binderConfiguration, new TestKafkaProperties(), null);
try {
kafkaTopicProvisioner.afterPropertiesSet();
} catch (Exception e) {
throw new RuntimeException(e);
}
binder = new KafkaTestBinder(binderConfiguration, kafkaTopicProvisioner);
((GenericApplicationContext) binder.getApplicationContext()).registerBean("headerMapper", KafkaHeaderMapper.class, () -> mapper);
}
ExtendedProducerProperties<KafkaProducerProperties> producerProperties = createProducerProperties();
producerProperties.getExtension().setHeaderPatterns(headerPatterns);
DirectChannel output = createBindableChannel("output", createProducerBindingProperties(producerProperties));
output.setBeanName(name + ".out");
Binding<MessageChannel> producerBinding = binder.bindProducer(name + ".1", output, producerProperties);
QueueChannel input = new QueueChannel();
input.setBeanName(name + ".in");
ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
Binding<MessageChannel> consumerBinding = binder.bindConsumer(name + ".0", name, input, consumerProperties);
Map<String, Object> producerProps = KafkaTestUtils.producerProps(embeddedKafka);
KafkaTemplate template = new KafkaTemplate(new DefaultKafkaProducerFactory<>(producerProps));
template.send(MessageBuilder.withPayload("internalHeaderPropagation").setHeader(KafkaHeaders.TOPIC, name + ".0").setHeader("someHeader", "someValue").build());
Message<?> consumed = input.receive(10_000);
if (headerPatterns != null) {
consumed = MessageBuilder.fromMessage(consumed).setHeader(headerPatterns[0], "bar").build();
}
output.send(consumed);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(name, "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
DefaultKafkaConsumerFactory cf = new DefaultKafkaConsumerFactory<>(consumerProps);
Consumer consumer = cf.createConsumer();
consumer.assign(Collections.singletonList(new TopicPartition(name + ".1", 0)));
ConsumerRecords<?, ?> records = consumer.poll(Duration.ofSeconds(10));
assertThat(records.count()).isEqualTo(1);
ConsumerRecord<?, ?> received = records.iterator().next();
assertThat(received.value()).isEqualTo("internalHeaderPropagation".getBytes());
Header header = received.headers().lastHeader(BinderHeaders.NATIVE_HEADERS_PRESENT);
assertThat(header).isNull();
header = received.headers().lastHeader(IntegrationMessageHeaderAccessor.DELIVERY_ATTEMPT);
assertThat(header).isNull();
header = received.headers().lastHeader(MessageHeaders.ID);
assertThat(header).isNull();
header = received.headers().lastHeader(MessageHeaders.TIMESTAMP);
assertThat(header).isNull();
assertThat(received.headers().lastHeader("someHeader")).isNotNull();
if (headerPatterns != null) {
assertThat(received.headers().lastHeader(headerPatterns[0])).isNotNull();
}
producerBinding.unbind();
consumerBinding.unbind();
consumer.close();
}
Aggregations