Search in sources :

Example 1 with MethodInvocationContext

use of io.micronaut.aop.MethodInvocationContext in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method getProducer.

@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
    ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
    return producerMap.computeIfAbsent(key, producerKey -> {
        String clientId = context.stringValue(KafkaClient.class).orElse(null);
        List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
        List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
        if (!headers.isEmpty()) {
            List<Header> kafkaHeaders = new ArrayList<>(headers.size());
            for (AnnotationValue<MessageHeader> header : headers) {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
                }
            }
            if (!kafkaHeaders.isEmpty()) {
                headersSuppliers.add(ctx -> kafkaHeaders);
            }
        }
        Argument keyArgument = null;
        Argument bodyArgument = null;
        ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
        topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
        ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
        ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
        ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
        Argument[] arguments = context.getArguments();
        for (int i = 0; i < arguments.length; i++) {
            int finalI = i;
            Argument<Object> argument = arguments[i];
            if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
                bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                valueSupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(KafkaKey.class)) {
                keyArgument = argument;
                keySupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(Topic.class)) {
                ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
                topicSupplier[0] = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null) {
                        String topic = o.toString();
                        if (StringUtils.isNotEmpty(topic)) {
                            return topic;
                        }
                    }
                    return prevTopicSupplier.get(ctx);
                };
            } else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
                timestampSupplier = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o instanceof Long) {
                        return (Long) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartition.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
                        return (Integer) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object partitionKey = ctx.getParameterValues()[finalI];
                    if (partitionKey != null) {
                        Serializer serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer == null) {
                            serializer = new ByteArraySerializer();
                        }
                        String topic = topicSupplier[0].get(ctx);
                        byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
                        return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(MessageHeader.class)) {
                final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
                String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
                headersSuppliers.add(ctx -> {
                    Object headerValue = ctx.getParameterValues()[finalI];
                    if (headerValue != null) {
                        Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer != null) {
                            try {
                                return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
                            } catch (Exception e) {
                                throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
                            }
                        }
                    }
                    return Collections.emptySet();
                });
            } else {
                if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
                    headersSuppliers.add(ctx -> {
                        Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
                        if (parameterHeaders != null) {
                            return parameterHeaders;
                        }
                        return Collections.emptySet();
                    });
                } else {
                    Class argumentType = argument.getType();
                    if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
                        headersSuppliers.add(ctx -> {
                            Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
                            if (parameterHeaders != null) {
                                return parameterHeaders;
                            }
                            return Collections.emptySet();
                        });
                    }
                }
            }
        }
        if (bodyArgument == null) {
            for (int i = 0; i < arguments.length; i++) {
                int finalI = i;
                Argument argument = arguments[i];
                if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
                    bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                    valueSupplier = ctx -> ctx.getParameterValues()[finalI];
                    break;
                }
            }
            if (bodyArgument == null) {
                throw new MessagingClientException("No valid message body argument found for method: " + context);
            }
        }
        AbstractKafkaProducerConfiguration configuration;
        if (clientId != null) {
            Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
            if (namedConfig.isPresent()) {
                configuration = namedConfig.get();
            } else {
                configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
            }
        } else {
            configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
        }
        DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
        Properties newProperties = newConfiguration.getConfig();
        String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
        if (clientId != null) {
            newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
        }
        if (transactionalId != null) {
            newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
        }
        context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
        Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
        if (ack != KafkaClient.Acknowledge.DEFAULT) {
            String acksValue = ack == -1 ? "all" : String.valueOf(ack);
            newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
        }
        context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
        LOG.debug("Creating new KafkaProducer.");
        if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
            if (keySerializer == null) {
                if (keyArgument != null) {
                    keySerializer = serdeRegistry.pickSerializer(keyArgument);
                } else {
                    keySerializer = new ByteArraySerializer();
                }
                LOG.debug("Using Kafka key serializer: {}", keySerializer);
                newConfiguration.setKeySerializer((Serializer) keySerializer);
            }
        }
        boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
        if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
            if (valueSerializer == null) {
                valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
                LOG.debug("Using Kafka value serializer: {}", valueSerializer);
                newConfiguration.setValueSerializer((Serializer) valueSerializer);
            }
        }
        Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
        boolean transactional = StringUtils.isNotEmpty(transactionalId);
        timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
        Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
        if (transactional) {
            producer.initTransactions();
        }
        ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
            if (headersSuppliers.isEmpty()) {
                return null;
            }
            List<Header> headerList = new ArrayList<>(headersSuppliers.size());
            for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
                for (Header header : supplier.get(ctx)) {
                    headerList.add(header);
                }
            }
            if (headerList.isEmpty()) {
                return null;
            }
            return headerList;
        };
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
        ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
        return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
    });
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ArrayList(java.util.ArrayList) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) Serializer(org.apache.kafka.common.serialization.Serializer) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) Duration(java.time.Duration) LinkedList(java.util.LinkedList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) Header(org.apache.kafka.common.header.Header) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Collection(java.util.Collection) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Argument(io.micronaut.core.type.Argument) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringUtils(io.micronaut.core.util.StringUtils) Topic(io.micronaut.configuration.kafka.annotation.Topic) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) Bindable(io.micronaut.core.bind.annotation.Bindable) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) MessageBody(io.micronaut.messaging.annotation.MessageBody)

Example 2 with MethodInvocationContext

use of io.micronaut.aop.MethodInvocationContext in project micronaut-gcp by micronaut-projects.

the class PubSubClientIntroductionAdvice method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    if (context.hasAnnotation(Topic.class)) {
        PubSubPublisherState publisherState = publisherStateCache.computeIfAbsent(context.getExecutableMethod(), method -> {
            String projectId = method.stringValue(PubSubClient.class).orElse(googleCloudConfiguration.getProjectId());
            Optional<Argument> orderingArgument = Arrays.stream(method.getArguments()).filter(argument -> argument.getAnnotationMetadata().hasAnnotation(OrderingKey.class)).findFirst();
            String topic = method.stringValue(Topic.class).orElse(context.getName());
            String endpoint = method.stringValue(Topic.class, "endpoint").orElse("");
            String configurationName = method.stringValue(Topic.class, "configuration").orElse("");
            String contentType = method.stringValue(Topic.class, "contentType").orElse(MediaType.APPLICATION_JSON);
            ProjectTopicName projectTopicName = PubSubTopicUtils.toProjectTopicName(topic, projectId);
            Map<String, String> staticMessageAttributes = new HashMap<>();
            List<AnnotationValue<MessageHeader>> headerAnnotations = context.getAnnotationValuesByType(MessageHeader.class);
            headerAnnotations.forEach((header) -> {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    staticMessageAttributes.put(name, value);
                }
            });
            Argument<?> bodyArgument = findBodyArgument(method).orElseThrow(() -> new PubSubClientException("No valid message body argument found for method: " + context.getExecutableMethod()));
            PubSubPublisherState.TopicState topicState = new PubSubPublisherState.TopicState(contentType, projectTopicName, configurationName, endpoint, orderingArgument.isPresent());
            logger.debug("Created a new publisher[{}] for topic: {}", context.getExecutableMethod().getName(), topic);
            PublisherInterface publisher = publisherFactory.createPublisher(new PublisherFactoryConfig(topicState, pubSubConfigurationProperties.getPublishingExecutor()));
            return new PubSubPublisherState(topicState, staticMessageAttributes, bodyArgument, publisher, orderingArgument);
        });
        Map<String, String> messageAttributes = new HashMap<>(publisherState.getStaticMessageAttributes());
        String contentType = publisherState.getTopicState().getContentType();
        Argument<?> bodyArgument = publisherState.getBodyArgument();
        Map<String, Object> parameterValues = context.getParameterValueMap();
        final ReturnType<Object> returnTypeInfo = context.getReturnType();
        ReturnType<Object> returnType = returnTypeInfo;
        Class<?> javaReturnType = returnType.getType();
        Argument[] arguments = context.getArguments();
        for (Argument arg : arguments) {
            AnnotationValue<MessageHeader> headerAnn = arg.getAnnotation(MessageHeader.class);
            if (headerAnn != null) {
                Map.Entry<String, String> entry = getNameAndValue(arg, headerAnn, parameterValues);
                messageAttributes.put(entry.getKey(), entry.getValue());
            }
        }
        PublisherInterface publisher = publisherState.getPublisher();
        Object body = parameterValues.get(bodyArgument.getName());
        PubsubMessage pubsubMessage = null;
        if (body.getClass() == PubsubMessage.class) {
            pubsubMessage = (PubsubMessage) body;
        } else {
            // if target type is byte[] we bypass serdes completely
            byte[] serialized = null;
            if (body.getClass() == byte[].class) {
                serialized = (byte[]) body;
            } else {
                PubSubMessageSerDes serDes = serDesRegistry.find(contentType).orElseThrow(() -> new PubSubClientException("Could not locate a valid SerDes implementation for type: " + contentType));
                serialized = serDes.serialize(body);
            }
            messageAttributes.put("Content-Type", contentType);
            PubsubMessage.Builder messageBuilder = PubsubMessage.newBuilder();
            messageBuilder.setData(ByteString.copyFrom(serialized)).putAllAttributes(messageAttributes);
            if (publisherState.getOrderingArgument().isPresent()) {
                String orderingKey = conversionService.convert(parameterValues.get(publisherState.getOrderingArgument().get().getName()), String.class).orElseThrow(() -> new PubSubClientException("Could not convert argument annotated with @OrderingKey to String type"));
                messageBuilder.setOrderingKey(orderingKey);
            }
            pubsubMessage = messageBuilder.build();
        }
        PubsubMessage finalPubsubMessage = pubsubMessage;
        Mono<String> reactiveResult = Mono.create(sink -> {
            ApiFuture<String> future = publisher.publish(finalPubsubMessage);
            future.addListener(() -> {
                try {
                    final String result = future.get();
                    sink.success(result);
                } catch (Throwable e) {
                    sink.error(e);
                }
            }, executorService);
        });
        if (javaReturnType == void.class || javaReturnType == Void.class) {
            String result = reactiveResult.block();
            return null;
        } else {
            if (returnTypeInfo.isReactive()) {
                return Publishers.convertPublisher(reactiveResult, javaReturnType);
            } else if (returnTypeInfo.isAsync()) {
                return reactiveResult.toFuture();
            } else {
                String result = reactiveResult.block();
                return conversionService.convert(result, javaReturnType).orElseThrow(() -> new PubSubClientException("Could not convert publisher result to method return type: " + javaReturnType));
            }
        }
    } else {
        return context.proceed();
    }
}
Also used : PublisherFactory(io.micronaut.gcp.pubsub.support.PublisherFactory) Arrays(java.util.Arrays) Publishers(io.micronaut.core.async.publisher.Publishers) PubSubPublisherState(io.micronaut.gcp.pubsub.support.PubSubPublisherState) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) HashMap(java.util.HashMap) PublisherFactoryConfig(io.micronaut.gcp.pubsub.support.PublisherFactoryConfig) ExecutableMethod(io.micronaut.inject.ExecutableMethod) PubSubMessageSerDesRegistry(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDesRegistry) PubsubMessage(com.google.pubsub.v1.PubsubMessage) PreDestroy(javax.annotation.PreDestroy) TaskExecutors(io.micronaut.scheduling.TaskExecutors) PublisherInterface(com.google.cloud.pubsub.v1.PublisherInterface) PubSubClientException(io.micronaut.gcp.pubsub.exception.PubSubClientException) MethodInterceptor(io.micronaut.aop.MethodInterceptor) PubSubTopicUtils(io.micronaut.gcp.pubsub.support.PubSubTopicUtils) MediaType(io.micronaut.http.MediaType) Map(java.util.Map) ReturnType(io.micronaut.core.type.ReturnType) PubSubConfigurationProperties(io.micronaut.gcp.pubsub.configuration.PubSubConfigurationProperties) ProjectTopicName(com.google.pubsub.v1.ProjectTopicName) Argument(io.micronaut.core.type.Argument) PubSubClient(io.micronaut.gcp.pubsub.annotation.PubSubClient) ConversionService(io.micronaut.core.convert.ConversionService) PubSubMessageSerDes(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDes) ExecutorService(java.util.concurrent.ExecutorService) Logger(org.slf4j.Logger) GoogleCloudConfiguration(io.micronaut.gcp.GoogleCloudConfiguration) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Singleton(jakarta.inject.Singleton) Mono(reactor.core.publisher.Mono) ApiFuture(com.google.api.core.ApiFuture) ByteString(com.google.protobuf.ByteString) StringUtils(io.micronaut.core.util.StringUtils) AbstractMap(java.util.AbstractMap) List(java.util.List) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) OrderingKey(io.micronaut.gcp.pubsub.annotation.OrderingKey) Optional(java.util.Optional) Topic(io.micronaut.gcp.pubsub.annotation.Topic) MessageBody(io.micronaut.messaging.annotation.MessageBody) Named(jakarta.inject.Named) PubSubClientException(io.micronaut.gcp.pubsub.exception.PubSubClientException) Argument(io.micronaut.core.type.Argument) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) PubSubPublisherState(io.micronaut.gcp.pubsub.support.PubSubPublisherState) ByteString(com.google.protobuf.ByteString) PublisherInterface(com.google.cloud.pubsub.v1.PublisherInterface) PubsubMessage(com.google.pubsub.v1.PubsubMessage) PubSubClient(io.micronaut.gcp.pubsub.annotation.PubSubClient) Topic(io.micronaut.gcp.pubsub.annotation.Topic) PubSubMessageSerDes(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDes) PublisherFactoryConfig(io.micronaut.gcp.pubsub.support.PublisherFactoryConfig) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) ProjectTopicName(com.google.pubsub.v1.ProjectTopicName) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) HashMap(java.util.HashMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) AbstractMap(java.util.AbstractMap)

Example 3 with MethodInvocationContext

use of io.micronaut.aop.MethodInvocationContext in project micronaut-micrometer by micronaut-projects.

the class CountedInterceptor method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    final AnnotationMetadata metadata = context.getAnnotationMetadata();
    final String metricName = metadata.stringValue(Counted.class).orElse(DEFAULT_METRIC_NAME);
    if (StringUtils.isNotEmpty(metricName)) {
        InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
        try {
            InterceptedMethod.ResultType resultType = interceptedMethod.resultType();
            switch(resultType) {
                case PUBLISHER:
                    Object interceptResult = context.proceed();
                    if (interceptResult == null) {
                        return null;
                    }
                    Object reactiveResult;
                    if (context.getReturnType().isSingleResult()) {
                        Mono<?> single = Publishers.convertPublisher(interceptResult, Mono.class);
                        reactiveResult = single.doOnError(throwable -> doCount(metadata, metricName, throwable)).doOnSuccess(o -> doCount(metadata, metricName, null));
                    } else {
                        Flux<?> flowable = Publishers.convertPublisher(interceptResult, Flux.class);
                        reactiveResult = flowable.doOnError(throwable -> doCount(metadata, metricName, throwable)).doOnComplete(() -> doCount(metadata, metricName, null));
                    }
                    return Publishers.convertPublisher(reactiveResult, context.getReturnType().getType());
                case COMPLETION_STAGE:
                    CompletionStage<?> completionStage = interceptedMethod.interceptResultAsCompletionStage();
                    CompletionStage<?> completionStageResult = completionStage.whenComplete((o, throwable) -> doCount(metadata, metricName, throwable));
                    return interceptedMethod.handleResult(completionStageResult);
                case SYNCHRONOUS:
                    final Object result = context.proceed();
                    try {
                        return result;
                    } finally {
                        if (metadata.isFalse(Counted.class, "recordFailuresOnly")) {
                            doCount(metadata, metricName, null);
                        }
                    }
                default:
                    return interceptedMethod.unsupported();
            }
        } catch (Exception e) {
            try {
                return interceptedMethod.handleException(e);
            } finally {
                doCount(metadata, metricName, e);
            }
        }
    }
    return context.proceed();
}
Also used : Counter(io.micrometer.core.instrument.Counter) RequiresMetrics(io.micronaut.configuration.metrics.annotation.RequiresMetrics) Publishers(io.micronaut.core.async.publisher.Publishers) Mono(reactor.core.publisher.Mono) InterceptedMethod(io.micronaut.aop.InterceptedMethod) InterceptorBean(io.micronaut.aop.InterceptorBean) StringUtils(io.micronaut.core.util.StringUtils) Flux(reactor.core.publisher.Flux) Counted(io.micrometer.core.annotation.Counted) CompletionStage(java.util.concurrent.CompletionStage) EXCEPTION_TAG(io.micrometer.core.aop.TimedAspect.EXCEPTION_TAG) MeterRegistry(io.micrometer.core.instrument.MeterRegistry) MethodInterceptor(io.micronaut.aop.MethodInterceptor) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) Nullable(io.micronaut.core.annotation.Nullable) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Counted(io.micrometer.core.annotation.Counted) InterceptedMethod(io.micronaut.aop.InterceptedMethod) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata)

Example 4 with MethodInvocationContext

use of io.micronaut.aop.MethodInvocationContext in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method buildSendFluxForReactiveValue.

private Flux<Object> buildSendFluxForReactiveValue(MethodInvocationContext<Object, Object> context, ProducerState producerState, Argument<?> returnType, Object value) {
    Flux<?> valueFlowable = Flux.from(Publishers.convertPublisher(value, Publisher.class));
    Class<?> javaReturnType = returnType.getType();
    if (Iterable.class.isAssignableFrom(javaReturnType)) {
        returnType = returnType.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT);
    }
    boolean transactional = producerState.transactional;
    Producer<?, ?> kafkaProducer = producerState.kafkaProducer;
    if (transactional) {
        LOG.trace("Beginning transaction for producer: {}", producerState.transactionalId);
        kafkaProducer.beginTransaction();
    }
    Argument<?> finalReturnType = returnType;
    Flux<Object> sendFlowable = valueFlowable.flatMap(o -> {
        ProducerRecord record = buildProducerRecord(context, producerState, o);
        LOG.trace("@KafkaClient method [{}] Sending producer record: {}", context, record);
        return producerSend(kafkaProducer, record).map(metadata -> convertResult(metadata, finalReturnType, o, producerState.bodyArgument)).onErrorMap(e -> wrapException(context, e));
    });
    if (transactional) {
        sendFlowable = addTransactionalProcessing(producerState, sendFlowable);
    }
    if (producerState.maxBlock != null) {
        sendFlowable = sendFlowable.timeout(producerState.maxBlock);
    }
    return sendFlowable;
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Publisher(org.reactivestreams.Publisher)

Example 5 with MethodInvocationContext

use of io.micronaut.aop.MethodInvocationContext in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method buildSendFlux.

private Flux<Object> buildSendFlux(MethodInvocationContext<Object, Object> context, ProducerState producerState, Object value, Argument<?> returnType) {
    ProducerRecord record = buildProducerRecord(context, producerState, value);
    return Flux.defer(() -> {
        boolean transactional = producerState.transactional;
        Producer<?, ?> kafkaProducer = producerState.kafkaProducer;
        if (transactional) {
            LOG.trace("Committing transaction for producer: {}", producerState.transactionalId);
            kafkaProducer.beginTransaction();
        }
        Mono<Object> result = producerSend(kafkaProducer, record).map(metadata -> convertResult(metadata, returnType, value, producerState.bodyArgument)).onErrorMap(e -> wrapException(context, e));
        if (transactional) {
            return addTransactionalProcessing(producerState, result.flux());
        }
        return result;
    });
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Aggregations

MethodInterceptor (io.micronaut.aop.MethodInterceptor)7 MethodInvocationContext (io.micronaut.aop.MethodInvocationContext)7 StringUtils (io.micronaut.core.util.StringUtils)6 ExecutableMethod (io.micronaut.inject.ExecutableMethod)6 MessageBody (io.micronaut.messaging.annotation.MessageBody)6 Arrays (java.util.Arrays)6 Map (java.util.Map)6 Mono (reactor.core.publisher.Mono)6 InterceptedMethod (io.micronaut.aop.InterceptedMethod)5 InterceptorBean (io.micronaut.aop.InterceptorBean)5 AnnotationValue (io.micronaut.core.annotation.AnnotationValue)5 Publishers (io.micronaut.core.async.publisher.Publishers)5 ConversionService (io.micronaut.core.convert.ConversionService)5 Argument (io.micronaut.core.type.Argument)5 MessageHeader (io.micronaut.messaging.annotation.MessageHeader)5 List (java.util.List)5 Optional (java.util.Optional)5 Logger (org.slf4j.Logger)5 LoggerFactory (org.slf4j.LoggerFactory)5 BeanContext (io.micronaut.context.BeanContext)4