use of io.micronaut.inject.ExecutableMethod in project micronaut-kafka by micronaut-projects.
the class KafkaConsumerProcessor method createConsumerThreadPollLoop.
private void createConsumerThreadPollLoop(final ExecutableMethod<?, ?> method, final ConsumerState consumerState) {
final boolean isBatch = method.isTrue(KafkaListener.class, "batch");
final Duration pollTimeout = method.getValue(KafkaListener.class, "pollTimeout", Duration.class).orElseGet(() -> Duration.ofMillis(100));
final Optional<Argument<?>> consumerArg = Arrays.stream(method.getArguments()).filter(arg -> Consumer.class.isAssignableFrom(arg.getType())).findFirst();
final Optional<Argument<?>> ackArg = Arrays.stream(method.getArguments()).filter(arg -> Acknowledgement.class.isAssignableFrom(arg.getType())).findFirst();
try (Consumer<?, ?> kafkaConsumer = consumerState.kafkaConsumer) {
final boolean trackPartitions = ackArg.isPresent() || consumerState.offsetStrategy == OffsetStrategy.SYNC_PER_RECORD || consumerState.offsetStrategy == OffsetStrategy.ASYNC_PER_RECORD;
final Map<Argument<?>, Object> boundArguments = new HashMap<>(2);
consumerArg.ifPresent(argument -> boundArguments.put(argument, kafkaConsumer));
// noinspection InfiniteLoopStatement
while (true) {
consumerState.assignments = Collections.unmodifiableSet(kafkaConsumer.assignment());
if (consumerState.autoPaused) {
consumerState.pause(consumerState.assignments);
kafkaConsumer.pause(consumerState.assignments);
}
boolean failed = true;
try {
consumerState.pauseTopicPartitions();
final ConsumerRecords<?, ?> consumerRecords = kafkaConsumer.poll(pollTimeout);
failed = false;
consumerState.resumeTopicPartitions();
if (consumerRecords == null || consumerRecords.count() <= 0) {
// No consumer records to process
continue;
}
if (isBatch) {
failed = !processConsumerRecordsAsBatch(consumerState, method, boundArguments, consumerRecords);
} else {
failed = !processConsumerRecords(consumerState, method, boundArguments, trackPartitions, ackArg, consumerRecords);
}
if (!failed) {
if (consumerState.offsetStrategy == OffsetStrategy.SYNC) {
try {
kafkaConsumer.commitSync();
} catch (CommitFailedException e) {
handleException(consumerState, null, e);
}
} else if (consumerState.offsetStrategy == OffsetStrategy.ASYNC) {
kafkaConsumer.commitAsync(resolveCommitCallback(consumerState.consumerBean));
}
}
} catch (WakeupException e) {
try {
if (!failed && consumerState.offsetStrategy != OffsetStrategy.DISABLED) {
kafkaConsumer.commitSync();
}
} catch (Throwable ex) {
LOG.warn("Error committing Kafka offsets on shutdown: {}", ex.getMessage(), ex);
}
throw e;
} catch (Throwable e) {
handleException(consumerState, null, e);
}
}
} catch (WakeupException e) {
// ignore for shutdown
}
}
use of io.micronaut.inject.ExecutableMethod in project micronaut-kafka by micronaut-projects.
the class KafkaConsumerProcessor method handleResultFlux.
@SuppressWarnings({ "SubscriberImplementation", "unchecked" })
private void handleResultFlux(ConsumerState consumerState, ExecutableMethod<?, ?> method, ConsumerRecord<?, ?> consumerRecord, Flux<?> resultFlowable, boolean isBlocking, ConsumerRecords<?, ?> consumerRecords) {
Flux<RecordMetadata> recordMetadataProducer = resultFlowable.flatMap((Function<Object, Publisher<RecordMetadata>>) value -> {
if (consumerState.sendToDestinationTopics != null) {
Object key = consumerRecord.key();
if (value != null) {
Producer kafkaProducer;
if (consumerState.useSendOffsetsToTransaction) {
kafkaProducer = transactionalProducerRegistry.getTransactionalProducer(consumerState.producerClientId, consumerState.producerTransactionalId, Argument.of(byte[].class), Argument.of(Object.class));
} else {
kafkaProducer = producerRegistry.getProducer(consumerState.producerClientId == null ? consumerState.groupId : consumerState.producerClientId, Argument.of((Class) (key != null ? key.getClass() : byte[].class)), Argument.of(value.getClass()));
}
return Flux.create(emitter -> {
try {
if (consumerState.useSendOffsetsToTransaction) {
try {
LOG.trace("Beginning transaction for producer: {}", consumerState.producerTransactionalId);
kafkaProducer.beginTransaction();
} catch (ProducerFencedException e) {
handleProducerFencedException(kafkaProducer, e);
}
}
for (String destinationTopic : consumerState.sendToDestinationTopics) {
if (consumerState.isMessagesIterableReturnType) {
Iterable<KafkaMessage> messages = (Iterable<KafkaMessage>) value;
for (KafkaMessage message : messages) {
ProducerRecord record = createFromMessage(destinationTopic, message);
kafkaProducer.send(record, (metadata, exception) -> {
if (exception != null) {
emitter.error(exception);
} else {
emitter.next(metadata);
}
});
}
} else {
ProducerRecord record;
if (consumerState.isMessageReturnType) {
record = createFromMessage(destinationTopic, (KafkaMessage) value);
} else {
record = new ProducerRecord(destinationTopic, null, key, value, consumerRecord.headers());
}
LOG.trace("Sending record: {} for producer: {} {}", record, kafkaProducer, consumerState.producerTransactionalId);
kafkaProducer.send(record, (metadata, exception) -> {
if (exception != null) {
emitter.error(exception);
} else {
emitter.next(metadata);
}
});
}
}
if (consumerState.useSendOffsetsToTransaction) {
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = new HashMap<>();
for (TopicPartition partition : consumerRecords.partitions()) {
List<? extends ConsumerRecord<?, ?>> partitionedRecords = consumerRecords.records(partition);
long offset = partitionedRecords.get(partitionedRecords.size() - 1).offset();
offsetsToCommit.put(partition, new OffsetAndMetadata(offset + 1));
}
try {
LOG.trace("Sending offsets: {} to transaction for producer: {} and customer group id: {}", offsetsToCommit, consumerState.producerTransactionalId, consumerState.groupId);
kafkaProducer.sendOffsetsToTransaction(offsetsToCommit, consumerState.groupId);
LOG.trace("Committing transaction for producer: {}", consumerState.producerTransactionalId);
kafkaProducer.commitTransaction();
LOG.trace("Committed transaction for producer: {}", consumerState.producerTransactionalId);
} catch (ProducerFencedException e) {
handleProducerFencedException(kafkaProducer, e);
}
}
emitter.complete();
} catch (Exception e) {
if (consumerState.useSendOffsetsToTransaction) {
try {
LOG.trace("Aborting transaction for producer: {} because of error: {}", consumerState.producerTransactionalId, e.getMessage());
kafkaProducer.abortTransaction();
} catch (ProducerFencedException ex) {
handleProducerFencedException(kafkaProducer, ex);
}
}
emitter.error(e);
}
});
}
return Flux.empty();
}
return Flux.empty();
});
recordMetadataProducer = recordMetadataProducer.onErrorResume((Function<Throwable, Publisher<RecordMetadata>>) throwable -> {
handleException(consumerState.consumerBean, new KafkaListenerException("Error occurred processing record [" + consumerRecord + "] with Kafka reactive consumer [" + method + "]: " + throwable.getMessage(), throwable, consumerState.consumerBean, consumerState.kafkaConsumer, consumerRecord));
if (consumerState.redelivery) {
LOG.debug("Attempting redelivery of record [{}] following error", consumerRecord);
Object key = consumerRecord.key();
Object value = consumerRecord.value();
if (key != null && value != null) {
Producer kafkaProducer = producerRegistry.getProducer(consumerState.producerClientId == null ? consumerState.groupId : consumerState.producerClientId, Argument.of(key.getClass()), Argument.of(value.getClass()));
ProducerRecord record = new ProducerRecord(consumerRecord.topic(), consumerRecord.partition(), key, value, consumerRecord.headers());
return producerSend(consumerState, kafkaProducer, record).doOnError(ex -> {
handleException(consumerState.consumerBean, new KafkaListenerException("Redelivery failed for record [" + consumerRecord + "] with Kafka reactive consumer [" + method + "]: " + throwable.getMessage(), throwable, consumerState.consumerBean, consumerState.kafkaConsumer, consumerRecord));
});
}
}
return Flux.empty();
});
if (isBlocking) {
List<RecordMetadata> listRecords = recordMetadataProducer.collectList().block();
LOG.trace("Method [{}] produced record metadata: {}", method, listRecords);
} else {
recordMetadataProducer.subscribe(recordMetadata -> LOG.trace("Method [{}] produced record metadata: {}", method, recordMetadata));
}
}
use of io.micronaut.inject.ExecutableMethod in project micronaut-nats by micronaut-projects.
the class NatsConsumerAdvice method process.
@Override
public void process(BeanDefinition<?> beanDefinition, ExecutableMethod<?, ?> method) {
if (method.hasAnnotation(NatsListener.class)) {
AnnotationValue<Subject> subjectAnn = method.getAnnotation(Subject.class);
if (subjectAnn != null) {
String subject = subjectAnn.getRequiredValue(String.class);
String connectionName = method.findAnnotation(NatsConnection.class).flatMap(conn -> conn.get("connection", String.class)).orElse(NatsConnection.DEFAULT_CONNECTION);
io.micronaut.context.Qualifier<Object> qualifer = beanDefinition.getAnnotationTypeByStereotype("javax.inject.Qualifier").map(type -> Qualifiers.byAnnotation(beanDefinition, type)).orElse(null);
Class<Object> beanType = (Class<Object>) beanDefinition.getBeanType();
Class<?> returnTypeClass = method.getReturnType().getType();
boolean isVoid = returnTypeClass == Void.class || returnTypeClass == void.class;
Object bean = beanContext.findBean(beanType, qualifer).orElseThrow(() -> new MessageListenerException("Could not find the bean to execute the method " + method));
Connection connection = beanContext.getBean(Connection.class, Qualifiers.byName(connectionName));
DefaultExecutableBinder<Message> binder = new DefaultExecutableBinder<>();
Dispatcher ds = connection.createDispatcher(msg -> {
BoundExecutable boundExecutable = null;
try {
boundExecutable = binder.bind(method, binderRegistry, msg);
} catch (Throwable e) {
handleException(new NatsListenerException("An error occurred binding the message to the method", e, bean, msg));
}
if (boundExecutable != null) {
Object returnedValue = boundExecutable.invoke(bean);
if (!isVoid && StringUtils.isNotEmpty(msg.getReplyTo())) {
byte[] converted = null;
if (returnedValue != null) {
NatsMessageSerDes serDes = serDesRegistry.findSerdes(method.getReturnType().asArgument()).map(NatsMessageSerDes.class::cast).orElseThrow(() -> new NatsListenerException(String.format("Could not find a serializer for the body argument of type [%s]", returnedValue.getClass().getName()), bean, msg));
converted = serDes.serialize(returnedValue);
}
connection.publish(msg.getReplyTo(), converted);
}
}
});
Optional<String> queueOptional = subjectAnn.get("queue", String.class);
if (queueOptional.isPresent() && !queueOptional.get().isEmpty()) {
ds.subscribe(subject, queueOptional.get());
} else {
ds.subscribe(subject);
}
consumerDispatchers.put(ds, subject);
}
}
}
use of io.micronaut.inject.ExecutableMethod in project micronaut-gcp by micronaut-projects.
the class PubSubConsumerAdvice method process.
@Override
public void process(BeanDefinition<?> beanDefinition, ExecutableMethod<?, ?> method) {
if (beanDefinition.hasDeclaredAnnotation(PubSubListener.class)) {
AnnotationValue<Subscription> subscriptionAnnotation = method.getAnnotation(Subscription.class);
io.micronaut.context.Qualifier<Object> qualifier = beanDefinition.getAnnotationTypeByStereotype(Qualifier.class).map(type -> Qualifiers.byAnnotation(beanDefinition, type)).orElse(null);
boolean hasAckArg = Arrays.stream(method.getArguments()).anyMatch(arg -> Acknowledgement.class.isAssignableFrom(arg.getType()));
Class<Object> beanType = (Class<Object>) beanDefinition.getBeanType();
Object bean = beanContext.findBean(beanType, qualifier).orElseThrow(() -> new MessageListenerException("Could not find the bean to execute the method " + method));
DefaultExecutableBinder<PubSubConsumerState> binder = new DefaultExecutableBinder<>();
if (subscriptionAnnotation != null) {
String subscriptionName = subscriptionAnnotation.getRequiredValue(String.class);
ProjectSubscriptionName projectSubscriptionName = PubSubSubscriptionUtils.toProjectSubscriptionName(subscriptionName, googleCloudConfiguration.getProjectId());
String defaultContentType = subscriptionAnnotation.stringValue("contentType").orElse(MediaType.APPLICATION_JSON);
String configuration = subscriptionAnnotation.stringValue("configuration").orElse("");
MessageReceiver receiver = (PubsubMessage message, AckReplyConsumer ackReplyConsumer) -> {
String messageContentType = message.getAttributesMap().getOrDefault("Content-Type", "");
String contentType = Optional.of(messageContentType).filter(StringUtils::isNotEmpty).orElse(defaultContentType);
DefaultPubSubAcknowledgement pubSubAcknowledgement = new DefaultPubSubAcknowledgement(ackReplyConsumer);
PubSubConsumerState consumerState = new PubSubConsumerState(message, ackReplyConsumer, projectSubscriptionName, contentType);
try {
BoundExecutable executable = null;
try {
executable = binder.bind(method, binderRegistry, consumerState);
} catch (Exception ex) {
handleException(new PubSubMessageReceiverException("Error binding message to the method", ex, bean, consumerState));
}
// Discard result
executable.invoke(bean);
if (!hasAckArg) {
// if manual ack is not specified we auto ack message after method execution
pubSubAcknowledgement.ack();
} else {
Optional<Object> boundAck = Arrays.stream(executable.getBoundArguments()).filter(o -> (o instanceof DefaultPubSubAcknowledgement)).findFirst();
if (boundAck.isPresent()) {
DefaultPubSubAcknowledgement manualAck = (DefaultPubSubAcknowledgement) boundAck.get();
if (!manualAck.isClientAck()) {
logger.warn("Method {} was executed and no message acknowledge detected. Did you forget to invoke ack()/nack()?", method.getName());
}
}
}
} catch (Exception e) {
handleException(new PubSubMessageReceiverException("Error handling message", e, bean, consumerState));
}
};
try {
this.subscriberFactory.createSubscriber(new SubscriberFactoryConfig(projectSubscriptionName, receiver, configuration, pubSubConfigurationProperties.getSubscribingExecutor()));
} catch (Exception e) {
throw new PubSubListenerException("Failed to create subscriber", e);
}
}
}
}
use of io.micronaut.inject.ExecutableMethod in project micronaut-jms by micronaut-projects.
the class AbstractJMSListenerMethodProcessor method registerListener.
private void registerListener(ExecutableMethod<?, ?> method, String connectionFactoryName, BeanDefinition<?> beanDefinition, AnnotationValue<T> destinationAnnotation, JMSDestinationType type) {
validateArguments(method);
final Class<?> targetClass = Stream.of(method.getArguments()).filter(arg -> arg.isDeclaredAnnotationPresent(MessageBody.class) || arg.isDeclaredAnnotationPresent(io.micronaut.jms.annotations.Message.class)).findAny().map(Argument::getClass).get();
final String destination = destinationAnnotation.getRequiredValue(String.class);
final int acknowledgeMode = destinationAnnotation.getRequiredValue("acknowledgeMode", Integer.class);
final boolean transacted = destinationAnnotation.getRequiredValue("transacted", Boolean.class);
final JMSListenerContainerFactory listenerFactory = beanContext.findBean(JMSListenerContainerFactory.class).orElseThrow(() -> new IllegalStateException("No JMSListenerFactory configured"));
final JMSConnectionPool connectionPool = beanContext.getBean(JMSConnectionPool.class, Qualifiers.byName(connectionFactoryName));
final Object bean = beanContext.findBean(beanDefinition.getBeanType()).get();
final ExecutorService executor = getExecutorService(destinationAnnotation);
MessageListener listener = generateAndBindListener(bean, method, executor, CLIENT_ACKNOWLEDGE == acknowledgeMode);
listenerFactory.registerListener(connectionPool, destination, listener, targetClass, transacted, acknowledgeMode, type);
}
Aggregations