Search in sources :

Example 1 with AnnotationValue

use of io.micronaut.core.annotation.AnnotationValue in project resilience4j by resilience4j.

the class RateLimiterInterceptor method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    Optional<AnnotationValue<io.github.resilience4j.micronaut.annotation.RateLimiter>> opt = context.findAnnotation(io.github.resilience4j.micronaut.annotation.RateLimiter.class);
    if (!opt.isPresent()) {
        return context.proceed();
    }
    ExecutableMethod executableMethod = context.getExecutableMethod();
    final String name = executableMethod.stringValue(io.github.resilience4j.micronaut.annotation.RateLimiter.class, "name").orElse("default");
    RateLimiter rateLimiter = this.rateLimiterRegistry.rateLimiter(name);
    InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
    try {
        switch(interceptedMethod.resultType()) {
            case PUBLISHER:
                return interceptedMethod.handleResult(fallbackReactiveTypes(Flowable.fromPublisher(interceptedMethod.interceptResultAsPublisher()).compose(RateLimiterOperator.of(rateLimiter)), context));
            case COMPLETION_STAGE:
                return interceptedMethod.handleResult(fallbackForFuture(rateLimiter.executeCompletionStage(() -> {
                    try {
                        return interceptedMethod.interceptResultAsCompletionStage();
                    } catch (Exception e) {
                        throw new CompletionException(e);
                    }
                }), context));
            case SYNCHRONOUS:
                try {
                    return rateLimiter.executeCheckedSupplier(context::proceed);
                } catch (Throwable exception) {
                    return fallback(context, exception);
                }
            default:
                return interceptedMethod.unsupported();
        }
    } catch (Exception e) {
        return interceptedMethod.handleException(e);
    }
}
Also used : ExecutableMethod(io.micronaut.inject.ExecutableMethod) InterceptedMethod(io.micronaut.aop.InterceptedMethod) RateLimiter(io.github.resilience4j.ratelimiter.RateLimiter) CompletionException(java.util.concurrent.CompletionException) CompletionException(java.util.concurrent.CompletionException) AnnotationValue(io.micronaut.core.annotation.AnnotationValue)

Example 2 with AnnotationValue

use of io.micronaut.core.annotation.AnnotationValue in project resilience4j by resilience4j.

the class BulkheadInterceptor method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    Optional<AnnotationValue<io.github.resilience4j.micronaut.annotation.Bulkhead>> opt = context.findAnnotation(io.github.resilience4j.micronaut.annotation.Bulkhead.class);
    if (!opt.isPresent()) {
        return context.proceed();
    }
    final io.github.resilience4j.micronaut.annotation.Bulkhead.Type type = opt.get().enumValue("type", io.github.resilience4j.micronaut.annotation.Bulkhead.Type.class).orElse(io.github.resilience4j.micronaut.annotation.Bulkhead.Type.SEMAPHORE);
    if (type == io.github.resilience4j.micronaut.annotation.Bulkhead.Type.THREADPOOL) {
        return handleThreadPoolBulkhead(context, opt.get());
    } else {
        final String name = opt.get().stringValue("name").orElse("default");
        Bulkhead bulkhead = this.bulkheadRegistry.bulkhead(name);
        InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
        try {
            switch(interceptedMethod.resultType()) {
                case PUBLISHER:
                    return interceptedMethod.handleResult(fallbackReactiveTypes(Flowable.fromPublisher(interceptedMethod.interceptResultAsPublisher()).compose(BulkheadOperator.of(bulkhead)), context));
                case COMPLETION_STAGE:
                    return interceptedMethod.handleResult(fallbackForFuture(bulkhead.executeCompletionStage(() -> {
                        try {
                            return interceptedMethod.interceptResultAsCompletionStage();
                        } catch (Exception e) {
                            throw new CompletionException(e);
                        }
                    }), context));
                case SYNCHRONOUS:
                    try {
                        return bulkhead.executeCheckedSupplier(context::proceed);
                    } catch (Throwable exception) {
                        return fallback(context, exception);
                    }
                default:
                    return interceptedMethod.unsupported();
            }
        } catch (Exception e) {
            return interceptedMethod.handleException(e);
        }
    }
}
Also used : InterceptedMethod(io.micronaut.aop.InterceptedMethod) CompletionException(java.util.concurrent.CompletionException) ExecutionException(java.util.concurrent.ExecutionException) CompletionException(java.util.concurrent.CompletionException) AnnotationValue(io.micronaut.core.annotation.AnnotationValue)

Example 3 with AnnotationValue

use of io.micronaut.core.annotation.AnnotationValue in project resilience4j by resilience4j.

the class RetryInterceptor method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    Optional<AnnotationValue<io.github.resilience4j.micronaut.annotation.Retry>> opt = context.findAnnotation(io.github.resilience4j.micronaut.annotation.Retry.class);
    if (!opt.isPresent()) {
        return context.proceed();
    }
    ExecutableMethod executableMethod = context.getExecutableMethod();
    final String name = executableMethod.stringValue(io.github.resilience4j.micronaut.annotation.Retry.class, "name").orElse("default");
    Retry retry = retryRegistry.retry(name);
    InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
    try {
        switch(interceptedMethod.resultType()) {
            case PUBLISHER:
                return interceptedMethod.handleResult(fallbackReactiveTypes(Flowable.fromPublisher(interceptedMethod.interceptResultAsPublisher()).compose(RetryTransformer.of(retry)), context));
            case COMPLETION_STAGE:
                return interceptedMethod.handleResult(fallbackForFuture(retry.executeCompletionStage(executorService, () -> {
                    try {
                        return interceptedMethod.interceptResultAsCompletionStage();
                    } catch (Exception e) {
                        throw new CompletionException(e);
                    }
                }), context));
            case SYNCHRONOUS:
                try {
                    return retry.executeCheckedSupplier(context::proceed);
                } catch (Throwable exception) {
                    return fallback(context, exception);
                }
            default:
                return interceptedMethod.unsupported();
        }
    } catch (Exception e) {
        return interceptedMethod.handleException(e);
    }
}
Also used : ExecutableMethod(io.micronaut.inject.ExecutableMethod) InterceptedMethod(io.micronaut.aop.InterceptedMethod) CompletionException(java.util.concurrent.CompletionException) CompletionException(java.util.concurrent.CompletionException) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Retry(io.github.resilience4j.retry.Retry)

Example 4 with AnnotationValue

use of io.micronaut.core.annotation.AnnotationValue in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method getProducer.

@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
    ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
    return producerMap.computeIfAbsent(key, producerKey -> {
        String clientId = context.stringValue(KafkaClient.class).orElse(null);
        List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
        List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
        if (!headers.isEmpty()) {
            List<Header> kafkaHeaders = new ArrayList<>(headers.size());
            for (AnnotationValue<MessageHeader> header : headers) {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
                }
            }
            if (!kafkaHeaders.isEmpty()) {
                headersSuppliers.add(ctx -> kafkaHeaders);
            }
        }
        Argument keyArgument = null;
        Argument bodyArgument = null;
        ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
        topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
        ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
        ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
        ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
        Argument[] arguments = context.getArguments();
        for (int i = 0; i < arguments.length; i++) {
            int finalI = i;
            Argument<Object> argument = arguments[i];
            if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
                bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                valueSupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(KafkaKey.class)) {
                keyArgument = argument;
                keySupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(Topic.class)) {
                ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
                topicSupplier[0] = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null) {
                        String topic = o.toString();
                        if (StringUtils.isNotEmpty(topic)) {
                            return topic;
                        }
                    }
                    return prevTopicSupplier.get(ctx);
                };
            } else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
                timestampSupplier = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o instanceof Long) {
                        return (Long) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartition.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
                        return (Integer) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object partitionKey = ctx.getParameterValues()[finalI];
                    if (partitionKey != null) {
                        Serializer serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer == null) {
                            serializer = new ByteArraySerializer();
                        }
                        String topic = topicSupplier[0].get(ctx);
                        byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
                        return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(MessageHeader.class)) {
                final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
                String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
                headersSuppliers.add(ctx -> {
                    Object headerValue = ctx.getParameterValues()[finalI];
                    if (headerValue != null) {
                        Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer != null) {
                            try {
                                return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
                            } catch (Exception e) {
                                throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
                            }
                        }
                    }
                    return Collections.emptySet();
                });
            } else {
                if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
                    headersSuppliers.add(ctx -> {
                        Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
                        if (parameterHeaders != null) {
                            return parameterHeaders;
                        }
                        return Collections.emptySet();
                    });
                } else {
                    Class argumentType = argument.getType();
                    if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
                        headersSuppliers.add(ctx -> {
                            Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
                            if (parameterHeaders != null) {
                                return parameterHeaders;
                            }
                            return Collections.emptySet();
                        });
                    }
                }
            }
        }
        if (bodyArgument == null) {
            for (int i = 0; i < arguments.length; i++) {
                int finalI = i;
                Argument argument = arguments[i];
                if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
                    bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                    valueSupplier = ctx -> ctx.getParameterValues()[finalI];
                    break;
                }
            }
            if (bodyArgument == null) {
                throw new MessagingClientException("No valid message body argument found for method: " + context);
            }
        }
        AbstractKafkaProducerConfiguration configuration;
        if (clientId != null) {
            Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
            if (namedConfig.isPresent()) {
                configuration = namedConfig.get();
            } else {
                configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
            }
        } else {
            configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
        }
        DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
        Properties newProperties = newConfiguration.getConfig();
        String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
        if (clientId != null) {
            newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
        }
        if (transactionalId != null) {
            newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
        }
        context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
        Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
        if (ack != KafkaClient.Acknowledge.DEFAULT) {
            String acksValue = ack == -1 ? "all" : String.valueOf(ack);
            newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
        }
        context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
        LOG.debug("Creating new KafkaProducer.");
        if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
            if (keySerializer == null) {
                if (keyArgument != null) {
                    keySerializer = serdeRegistry.pickSerializer(keyArgument);
                } else {
                    keySerializer = new ByteArraySerializer();
                }
                LOG.debug("Using Kafka key serializer: {}", keySerializer);
                newConfiguration.setKeySerializer((Serializer) keySerializer);
            }
        }
        boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
        if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
            if (valueSerializer == null) {
                valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
                LOG.debug("Using Kafka value serializer: {}", valueSerializer);
                newConfiguration.setValueSerializer((Serializer) valueSerializer);
            }
        }
        Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
        boolean transactional = StringUtils.isNotEmpty(transactionalId);
        timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
        Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
        if (transactional) {
            producer.initTransactions();
        }
        ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
            if (headersSuppliers.isEmpty()) {
                return null;
            }
            List<Header> headerList = new ArrayList<>(headersSuppliers.size());
            for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
                for (Header header : supplier.get(ctx)) {
                    headerList.add(header);
                }
            }
            if (headerList.isEmpty()) {
                return null;
            }
            return headerList;
        };
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
        ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
        return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
    });
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ArrayList(java.util.ArrayList) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) Serializer(org.apache.kafka.common.serialization.Serializer) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) Duration(java.time.Duration) LinkedList(java.util.LinkedList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) Header(org.apache.kafka.common.header.Header) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Collection(java.util.Collection) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Argument(io.micronaut.core.type.Argument) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringUtils(io.micronaut.core.util.StringUtils) Topic(io.micronaut.configuration.kafka.annotation.Topic) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) Bindable(io.micronaut.core.bind.annotation.Bindable) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) MessageBody(io.micronaut.messaging.annotation.MessageBody)

Example 5 with AnnotationValue

use of io.micronaut.core.annotation.AnnotationValue in project micronaut-kafka by micronaut-projects.

the class KafkaConsumerProcessor method process.

@Override
public void process(BeanDefinition<?> beanDefinition, ExecutableMethod<?, ?> method) {
    List<AnnotationValue<Topic>> topicAnnotations = method.getDeclaredAnnotationValuesByType(Topic.class);
    final AnnotationValue<KafkaListener> consumerAnnotation = method.getAnnotation(KafkaListener.class);
    if (CollectionUtils.isEmpty(topicAnnotations)) {
        topicAnnotations = beanDefinition.getDeclaredAnnotationValuesByType(Topic.class);
    }
    if (consumerAnnotation == null || CollectionUtils.isEmpty(topicAnnotations)) {
        // No topics to consume
        return;
    }
    final Class<?> beanType = beanDefinition.getBeanType();
    String groupId = consumerAnnotation.stringValue("groupId").filter(StringUtils::isNotEmpty).orElseGet(() -> applicationConfiguration.getName().orElse(beanType.getName()));
    if (consumerAnnotation.isTrue("uniqueGroupId")) {
        groupId = groupId + "_" + UUID.randomUUID();
    }
    final String clientId = consumerAnnotation.stringValue("clientId").filter(StringUtils::isNotEmpty).orElseGet(() -> applicationConfiguration.getName().map(s -> s + '-' + NameUtils.hyphenate(beanType.getSimpleName())).orElse(null));
    final OffsetStrategy offsetStrategy = consumerAnnotation.enumValue("offsetStrategy", OffsetStrategy.class).orElse(OffsetStrategy.AUTO);
    final AbstractKafkaConsumerConfiguration<?, ?> consumerConfigurationDefaults = beanContext.findBean(AbstractKafkaConsumerConfiguration.class, Qualifiers.byName(groupId)).orElse(defaultConsumerConfiguration);
    final DefaultKafkaConsumerConfiguration<?, ?> consumerConfiguration = new DefaultKafkaConsumerConfiguration<>(consumerConfigurationDefaults);
    final Properties properties = createConsumerProperties(consumerAnnotation, consumerConfiguration, clientId, groupId, offsetStrategy);
    configureDeserializers(method, consumerConfiguration);
    submitConsumerThreads(method, clientId, groupId, offsetStrategy, topicAnnotations, consumerAnnotation, consumerConfiguration, properties, beanType);
}
Also used : AbstractKafkaConsumerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaConsumerConfiguration) OffsetStrategy(io.micronaut.configuration.kafka.annotation.OffsetStrategy) Properties(java.util.Properties) KafkaListener(io.micronaut.configuration.kafka.annotation.KafkaListener) DefaultKafkaConsumerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaConsumerConfiguration) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Topic(io.micronaut.configuration.kafka.annotation.Topic)

Aggregations

AnnotationValue (io.micronaut.core.annotation.AnnotationValue)13 ExecutableMethod (io.micronaut.inject.ExecutableMethod)10 InterceptedMethod (io.micronaut.aop.InterceptedMethod)7 BeanContext (io.micronaut.context.BeanContext)5 StringUtils (io.micronaut.core.util.StringUtils)5 Qualifiers (io.micronaut.inject.qualifiers.Qualifiers)5 Singleton (jakarta.inject.Singleton)5 CompletionException (java.util.concurrent.CompletionException)5 Logger (org.slf4j.Logger)5 LoggerFactory (org.slf4j.LoggerFactory)5 ConversionService (io.micronaut.core.convert.ConversionService)4 Argument (io.micronaut.core.type.Argument)4 MessageBody (io.micronaut.messaging.annotation.MessageBody)4 List (java.util.List)4 Map (java.util.Map)4 Optional (java.util.Optional)4 MethodInterceptor (io.micronaut.aop.MethodInterceptor)3 MethodInvocationContext (io.micronaut.aop.MethodInvocationContext)3 ExecutableMethodProcessor (io.micronaut.context.processor.ExecutableMethodProcessor)3 Publishers (io.micronaut.core.async.publisher.Publishers)3