Search in sources :

Example 1 with Argument

use of io.micronaut.core.type.Argument in project micronaut-kafka by micronaut-projects.

the class KafkaConsumerProcessor method createConsumerThreadPollLoop.

private void createConsumerThreadPollLoop(final ExecutableMethod<?, ?> method, final ConsumerState consumerState) {
    final boolean isBatch = method.isTrue(KafkaListener.class, "batch");
    final Duration pollTimeout = method.getValue(KafkaListener.class, "pollTimeout", Duration.class).orElseGet(() -> Duration.ofMillis(100));
    final Optional<Argument<?>> consumerArg = Arrays.stream(method.getArguments()).filter(arg -> Consumer.class.isAssignableFrom(arg.getType())).findFirst();
    final Optional<Argument<?>> ackArg = Arrays.stream(method.getArguments()).filter(arg -> Acknowledgement.class.isAssignableFrom(arg.getType())).findFirst();
    try (Consumer<?, ?> kafkaConsumer = consumerState.kafkaConsumer) {
        final boolean trackPartitions = ackArg.isPresent() || consumerState.offsetStrategy == OffsetStrategy.SYNC_PER_RECORD || consumerState.offsetStrategy == OffsetStrategy.ASYNC_PER_RECORD;
        final Map<Argument<?>, Object> boundArguments = new HashMap<>(2);
        consumerArg.ifPresent(argument -> boundArguments.put(argument, kafkaConsumer));
        // noinspection InfiniteLoopStatement
        while (true) {
            consumerState.assignments = Collections.unmodifiableSet(kafkaConsumer.assignment());
            if (consumerState.autoPaused) {
                consumerState.pause(consumerState.assignments);
                kafkaConsumer.pause(consumerState.assignments);
            }
            boolean failed = true;
            try {
                consumerState.pauseTopicPartitions();
                final ConsumerRecords<?, ?> consumerRecords = kafkaConsumer.poll(pollTimeout);
                failed = false;
                consumerState.resumeTopicPartitions();
                if (consumerRecords == null || consumerRecords.count() <= 0) {
                    // No consumer records to process
                    continue;
                }
                if (isBatch) {
                    failed = !processConsumerRecordsAsBatch(consumerState, method, boundArguments, consumerRecords);
                } else {
                    failed = !processConsumerRecords(consumerState, method, boundArguments, trackPartitions, ackArg, consumerRecords);
                }
                if (!failed) {
                    if (consumerState.offsetStrategy == OffsetStrategy.SYNC) {
                        try {
                            kafkaConsumer.commitSync();
                        } catch (CommitFailedException e) {
                            handleException(consumerState, null, e);
                        }
                    } else if (consumerState.offsetStrategy == OffsetStrategy.ASYNC) {
                        kafkaConsumer.commitAsync(resolveCommitCallback(consumerState.consumerBean));
                    }
                }
            } catch (WakeupException e) {
                try {
                    if (!failed && consumerState.offsetStrategy != OffsetStrategy.DISABLED) {
                        kafkaConsumer.commitSync();
                    }
                } catch (Throwable ex) {
                    LOG.warn("Error committing Kafka offsets on shutdown: {}", ex.getMessage(), ex);
                }
                throw e;
            } catch (Throwable e) {
                handleException(consumerState, null, e);
            }
        }
    } catch (WakeupException e) {
    // ignore for shutdown
    }
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) KafkaAcknowledgement(io.micronaut.configuration.kafka.KafkaAcknowledgement) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) MessagingSystemException(io.micronaut.messaging.exceptions.MessagingSystemException) ConsumerRecordBinderRegistry(io.micronaut.configuration.kafka.bind.ConsumerRecordBinderRegistry) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Duration(java.time.Duration) Map(java.util.Map) OffsetCommitCallback(org.apache.kafka.clients.consumer.OffsetCommitCallback) ArgumentUtils(io.micronaut.core.util.ArgumentUtils) DefaultExecutableBinder(io.micronaut.core.bind.DefaultExecutableBinder) Singleton(jakarta.inject.Singleton) Set(java.util.Set) Acknowledgement(io.micronaut.messaging.Acknowledgement) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) ErrorStrategy(io.micronaut.configuration.kafka.annotation.ErrorStrategy) ExecutableMethodProcessor(io.micronaut.context.processor.ExecutableMethodProcessor) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) AbstractKafkaConsumerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaConsumerConfiguration) ProducerRegistry(io.micronaut.configuration.kafka.ProducerRegistry) MessageBody(io.micronaut.messaging.annotation.MessageBody) ConsumerRegistry(io.micronaut.configuration.kafka.ConsumerRegistry) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaMessage(io.micronaut.configuration.kafka.KafkaMessage) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) Blocking(io.micronaut.core.annotation.Blocking) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) OffsetStrategy(io.micronaut.configuration.kafka.annotation.OffsetStrategy) Publisher(org.reactivestreams.Publisher) KafkaListenerException(io.micronaut.configuration.kafka.exceptions.KafkaListenerException) Mono(reactor.core.publisher.Mono) SendTo(io.micronaut.messaging.annotation.SendTo) Flux(reactor.core.publisher.Flux) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) TaskScheduler(io.micronaut.scheduling.TaskScheduler) BeanDefinition(io.micronaut.inject.BeanDefinition) BeanContext(io.micronaut.context.BeanContext) ArrayUtils(io.micronaut.core.util.ArrayUtils) LoggerFactory(org.slf4j.LoggerFactory) PreDestroy(javax.annotation.PreDestroy) TaskExecutors(io.micronaut.scheduling.TaskExecutors) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Locale(java.util.Locale) KafkaListenerExceptionHandler(io.micronaut.configuration.kafka.exceptions.KafkaListenerExceptionHandler) ApplicationConfiguration(io.micronaut.runtime.ApplicationConfiguration) NameUtils(io.micronaut.core.naming.NameUtils) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) WakeupException(org.apache.kafka.common.errors.WakeupException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) StringUtils(io.micronaut.core.util.StringUtils) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) List(java.util.List) ProducerFencedException(org.apache.kafka.common.errors.ProducerFencedException) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) Named(jakarta.inject.Named) KafkaListener(io.micronaut.configuration.kafka.annotation.KafkaListener) BatchConsumerRecordsBinderRegistry(io.micronaut.configuration.kafka.bind.batch.BatchConsumerRecordsBinderRegistry) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) ErrorStrategyValue(io.micronaut.configuration.kafka.annotation.ErrorStrategyValue) ScheduledExecutorTaskScheduler(io.micronaut.scheduling.ScheduledExecutorTaskScheduler) TransactionalProducerRegistry(io.micronaut.configuration.kafka.TransactionalProducerRegistry) KafkaDefaultConfiguration(io.micronaut.configuration.kafka.config.KafkaDefaultConfiguration) HashMap(java.util.HashMap) Scheduler(reactor.core.scheduler.Scheduler) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) HashSet(java.util.HashSet) ExecutableBinder(io.micronaut.core.bind.ExecutableBinder) Requires(io.micronaut.context.annotation.Requires) Schedulers(reactor.core.scheduler.Schedulers) BoundExecutable(io.micronaut.core.bind.BoundExecutable) ExecutorService(java.util.concurrent.ExecutorService) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) OffsetReset(io.micronaut.configuration.kafka.annotation.OffsetReset) NonNull(io.micronaut.core.annotation.NonNull) IsolationLevel(org.apache.kafka.common.IsolationLevel) CollectionUtils(io.micronaut.core.util.CollectionUtils) DefaultKafkaConsumerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaConsumerConfiguration) Collections(java.util.Collections) ConsumerAware(io.micronaut.configuration.kafka.ConsumerAware) Argument(io.micronaut.core.type.Argument) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Duration(java.time.Duration) WakeupException(org.apache.kafka.common.errors.WakeupException) KafkaListener(io.micronaut.configuration.kafka.annotation.KafkaListener) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException)

Example 2 with Argument

use of io.micronaut.core.type.Argument in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method getProducer.

@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
    ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
    return producerMap.computeIfAbsent(key, producerKey -> {
        String clientId = context.stringValue(KafkaClient.class).orElse(null);
        List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
        List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
        if (!headers.isEmpty()) {
            List<Header> kafkaHeaders = new ArrayList<>(headers.size());
            for (AnnotationValue<MessageHeader> header : headers) {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
                }
            }
            if (!kafkaHeaders.isEmpty()) {
                headersSuppliers.add(ctx -> kafkaHeaders);
            }
        }
        Argument keyArgument = null;
        Argument bodyArgument = null;
        ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
        topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
        ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
        ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
        ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
        Argument[] arguments = context.getArguments();
        for (int i = 0; i < arguments.length; i++) {
            int finalI = i;
            Argument<Object> argument = arguments[i];
            if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
                bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                valueSupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(KafkaKey.class)) {
                keyArgument = argument;
                keySupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(Topic.class)) {
                ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
                topicSupplier[0] = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null) {
                        String topic = o.toString();
                        if (StringUtils.isNotEmpty(topic)) {
                            return topic;
                        }
                    }
                    return prevTopicSupplier.get(ctx);
                };
            } else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
                timestampSupplier = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o instanceof Long) {
                        return (Long) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartition.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
                        return (Integer) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object partitionKey = ctx.getParameterValues()[finalI];
                    if (partitionKey != null) {
                        Serializer serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer == null) {
                            serializer = new ByteArraySerializer();
                        }
                        String topic = topicSupplier[0].get(ctx);
                        byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
                        return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(MessageHeader.class)) {
                final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
                String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
                headersSuppliers.add(ctx -> {
                    Object headerValue = ctx.getParameterValues()[finalI];
                    if (headerValue != null) {
                        Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer != null) {
                            try {
                                return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
                            } catch (Exception e) {
                                throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
                            }
                        }
                    }
                    return Collections.emptySet();
                });
            } else {
                if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
                    headersSuppliers.add(ctx -> {
                        Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
                        if (parameterHeaders != null) {
                            return parameterHeaders;
                        }
                        return Collections.emptySet();
                    });
                } else {
                    Class argumentType = argument.getType();
                    if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
                        headersSuppliers.add(ctx -> {
                            Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
                            if (parameterHeaders != null) {
                                return parameterHeaders;
                            }
                            return Collections.emptySet();
                        });
                    }
                }
            }
        }
        if (bodyArgument == null) {
            for (int i = 0; i < arguments.length; i++) {
                int finalI = i;
                Argument argument = arguments[i];
                if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
                    bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                    valueSupplier = ctx -> ctx.getParameterValues()[finalI];
                    break;
                }
            }
            if (bodyArgument == null) {
                throw new MessagingClientException("No valid message body argument found for method: " + context);
            }
        }
        AbstractKafkaProducerConfiguration configuration;
        if (clientId != null) {
            Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
            if (namedConfig.isPresent()) {
                configuration = namedConfig.get();
            } else {
                configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
            }
        } else {
            configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
        }
        DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
        Properties newProperties = newConfiguration.getConfig();
        String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
        if (clientId != null) {
            newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
        }
        if (transactionalId != null) {
            newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
        }
        context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
        Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
        if (ack != KafkaClient.Acknowledge.DEFAULT) {
            String acksValue = ack == -1 ? "all" : String.valueOf(ack);
            newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
        }
        context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
        LOG.debug("Creating new KafkaProducer.");
        if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
            if (keySerializer == null) {
                if (keyArgument != null) {
                    keySerializer = serdeRegistry.pickSerializer(keyArgument);
                } else {
                    keySerializer = new ByteArraySerializer();
                }
                LOG.debug("Using Kafka key serializer: {}", keySerializer);
                newConfiguration.setKeySerializer((Serializer) keySerializer);
            }
        }
        boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
        if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
            if (valueSerializer == null) {
                valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
                LOG.debug("Using Kafka value serializer: {}", valueSerializer);
                newConfiguration.setValueSerializer((Serializer) valueSerializer);
            }
        }
        Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
        boolean transactional = StringUtils.isNotEmpty(transactionalId);
        timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
        Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
        if (transactional) {
            producer.initTransactions();
        }
        ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
            if (headersSuppliers.isEmpty()) {
                return null;
            }
            List<Header> headerList = new ArrayList<>(headersSuppliers.size());
            for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
                for (Header header : supplier.get(ctx)) {
                    headerList.add(header);
                }
            }
            if (headerList.isEmpty()) {
                return null;
            }
            return headerList;
        };
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
        ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
        return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
    });
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ArrayList(java.util.ArrayList) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) Serializer(org.apache.kafka.common.serialization.Serializer) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) Duration(java.time.Duration) LinkedList(java.util.LinkedList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) Header(org.apache.kafka.common.header.Header) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Collection(java.util.Collection) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Argument(io.micronaut.core.type.Argument) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringUtils(io.micronaut.core.util.StringUtils) Topic(io.micronaut.configuration.kafka.annotation.Topic) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) Bindable(io.micronaut.core.bind.annotation.Bindable) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) MessageBody(io.micronaut.messaging.annotation.MessageBody)

Example 3 with Argument

use of io.micronaut.core.type.Argument in project micronaut-gcp by micronaut-projects.

the class PubSubClientIntroductionAdvice method intercept.

@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
    if (context.hasAnnotation(Topic.class)) {
        PubSubPublisherState publisherState = publisherStateCache.computeIfAbsent(context.getExecutableMethod(), method -> {
            String projectId = method.stringValue(PubSubClient.class).orElse(googleCloudConfiguration.getProjectId());
            Optional<Argument> orderingArgument = Arrays.stream(method.getArguments()).filter(argument -> argument.getAnnotationMetadata().hasAnnotation(OrderingKey.class)).findFirst();
            String topic = method.stringValue(Topic.class).orElse(context.getName());
            String endpoint = method.stringValue(Topic.class, "endpoint").orElse("");
            String configurationName = method.stringValue(Topic.class, "configuration").orElse("");
            String contentType = method.stringValue(Topic.class, "contentType").orElse(MediaType.APPLICATION_JSON);
            ProjectTopicName projectTopicName = PubSubTopicUtils.toProjectTopicName(topic, projectId);
            Map<String, String> staticMessageAttributes = new HashMap<>();
            List<AnnotationValue<MessageHeader>> headerAnnotations = context.getAnnotationValuesByType(MessageHeader.class);
            headerAnnotations.forEach((header) -> {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    staticMessageAttributes.put(name, value);
                }
            });
            Argument<?> bodyArgument = findBodyArgument(method).orElseThrow(() -> new PubSubClientException("No valid message body argument found for method: " + context.getExecutableMethod()));
            PubSubPublisherState.TopicState topicState = new PubSubPublisherState.TopicState(contentType, projectTopicName, configurationName, endpoint, orderingArgument.isPresent());
            logger.debug("Created a new publisher[{}] for topic: {}", context.getExecutableMethod().getName(), topic);
            PublisherInterface publisher = publisherFactory.createPublisher(new PublisherFactoryConfig(topicState, pubSubConfigurationProperties.getPublishingExecutor()));
            return new PubSubPublisherState(topicState, staticMessageAttributes, bodyArgument, publisher, orderingArgument);
        });
        Map<String, String> messageAttributes = new HashMap<>(publisherState.getStaticMessageAttributes());
        String contentType = publisherState.getTopicState().getContentType();
        Argument<?> bodyArgument = publisherState.getBodyArgument();
        Map<String, Object> parameterValues = context.getParameterValueMap();
        final ReturnType<Object> returnTypeInfo = context.getReturnType();
        ReturnType<Object> returnType = returnTypeInfo;
        Class<?> javaReturnType = returnType.getType();
        Argument[] arguments = context.getArguments();
        for (Argument arg : arguments) {
            AnnotationValue<MessageHeader> headerAnn = arg.getAnnotation(MessageHeader.class);
            if (headerAnn != null) {
                Map.Entry<String, String> entry = getNameAndValue(arg, headerAnn, parameterValues);
                messageAttributes.put(entry.getKey(), entry.getValue());
            }
        }
        PublisherInterface publisher = publisherState.getPublisher();
        Object body = parameterValues.get(bodyArgument.getName());
        PubsubMessage pubsubMessage = null;
        if (body.getClass() == PubsubMessage.class) {
            pubsubMessage = (PubsubMessage) body;
        } else {
            // if target type is byte[] we bypass serdes completely
            byte[] serialized = null;
            if (body.getClass() == byte[].class) {
                serialized = (byte[]) body;
            } else {
                PubSubMessageSerDes serDes = serDesRegistry.find(contentType).orElseThrow(() -> new PubSubClientException("Could not locate a valid SerDes implementation for type: " + contentType));
                serialized = serDes.serialize(body);
            }
            messageAttributes.put("Content-Type", contentType);
            PubsubMessage.Builder messageBuilder = PubsubMessage.newBuilder();
            messageBuilder.setData(ByteString.copyFrom(serialized)).putAllAttributes(messageAttributes);
            if (publisherState.getOrderingArgument().isPresent()) {
                String orderingKey = conversionService.convert(parameterValues.get(publisherState.getOrderingArgument().get().getName()), String.class).orElseThrow(() -> new PubSubClientException("Could not convert argument annotated with @OrderingKey to String type"));
                messageBuilder.setOrderingKey(orderingKey);
            }
            pubsubMessage = messageBuilder.build();
        }
        PubsubMessage finalPubsubMessage = pubsubMessage;
        Mono<String> reactiveResult = Mono.create(sink -> {
            ApiFuture<String> future = publisher.publish(finalPubsubMessage);
            future.addListener(() -> {
                try {
                    final String result = future.get();
                    sink.success(result);
                } catch (Throwable e) {
                    sink.error(e);
                }
            }, executorService);
        });
        if (javaReturnType == void.class || javaReturnType == Void.class) {
            String result = reactiveResult.block();
            return null;
        } else {
            if (returnTypeInfo.isReactive()) {
                return Publishers.convertPublisher(reactiveResult, javaReturnType);
            } else if (returnTypeInfo.isAsync()) {
                return reactiveResult.toFuture();
            } else {
                String result = reactiveResult.block();
                return conversionService.convert(result, javaReturnType).orElseThrow(() -> new PubSubClientException("Could not convert publisher result to method return type: " + javaReturnType));
            }
        }
    } else {
        return context.proceed();
    }
}
Also used : PublisherFactory(io.micronaut.gcp.pubsub.support.PublisherFactory) Arrays(java.util.Arrays) Publishers(io.micronaut.core.async.publisher.Publishers) PubSubPublisherState(io.micronaut.gcp.pubsub.support.PubSubPublisherState) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) HashMap(java.util.HashMap) PublisherFactoryConfig(io.micronaut.gcp.pubsub.support.PublisherFactoryConfig) ExecutableMethod(io.micronaut.inject.ExecutableMethod) PubSubMessageSerDesRegistry(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDesRegistry) PubsubMessage(com.google.pubsub.v1.PubsubMessage) PreDestroy(javax.annotation.PreDestroy) TaskExecutors(io.micronaut.scheduling.TaskExecutors) PublisherInterface(com.google.cloud.pubsub.v1.PublisherInterface) PubSubClientException(io.micronaut.gcp.pubsub.exception.PubSubClientException) MethodInterceptor(io.micronaut.aop.MethodInterceptor) PubSubTopicUtils(io.micronaut.gcp.pubsub.support.PubSubTopicUtils) MediaType(io.micronaut.http.MediaType) Map(java.util.Map) ReturnType(io.micronaut.core.type.ReturnType) PubSubConfigurationProperties(io.micronaut.gcp.pubsub.configuration.PubSubConfigurationProperties) ProjectTopicName(com.google.pubsub.v1.ProjectTopicName) Argument(io.micronaut.core.type.Argument) PubSubClient(io.micronaut.gcp.pubsub.annotation.PubSubClient) ConversionService(io.micronaut.core.convert.ConversionService) PubSubMessageSerDes(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDes) ExecutorService(java.util.concurrent.ExecutorService) Logger(org.slf4j.Logger) GoogleCloudConfiguration(io.micronaut.gcp.GoogleCloudConfiguration) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Singleton(jakarta.inject.Singleton) Mono(reactor.core.publisher.Mono) ApiFuture(com.google.api.core.ApiFuture) ByteString(com.google.protobuf.ByteString) StringUtils(io.micronaut.core.util.StringUtils) AbstractMap(java.util.AbstractMap) List(java.util.List) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) OrderingKey(io.micronaut.gcp.pubsub.annotation.OrderingKey) Optional(java.util.Optional) Topic(io.micronaut.gcp.pubsub.annotation.Topic) MessageBody(io.micronaut.messaging.annotation.MessageBody) Named(jakarta.inject.Named) PubSubClientException(io.micronaut.gcp.pubsub.exception.PubSubClientException) Argument(io.micronaut.core.type.Argument) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) PubSubPublisherState(io.micronaut.gcp.pubsub.support.PubSubPublisherState) ByteString(com.google.protobuf.ByteString) PublisherInterface(com.google.cloud.pubsub.v1.PublisherInterface) PubsubMessage(com.google.pubsub.v1.PubsubMessage) PubSubClient(io.micronaut.gcp.pubsub.annotation.PubSubClient) Topic(io.micronaut.gcp.pubsub.annotation.Topic) PubSubMessageSerDes(io.micronaut.gcp.pubsub.serdes.PubSubMessageSerDes) PublisherFactoryConfig(io.micronaut.gcp.pubsub.support.PublisherFactoryConfig) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) ProjectTopicName(com.google.pubsub.v1.ProjectTopicName) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) HashMap(java.util.HashMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) AbstractMap(java.util.AbstractMap)

Example 4 with Argument

use of io.micronaut.core.type.Argument in project micronaut-test by micronaut-projects.

the class MicronautJunit5Extension method getArgument.

private Argument<?> getArgument(ParameterContext parameterContext, ApplicationContext applicationContext) {
    try {
        final Executable declaringExecutable = parameterContext.getDeclaringExecutable();
        final int index = parameterContext.getIndex();
        if (declaringExecutable instanceof Constructor) {
            final Class<?> declaringClass = declaringExecutable.getDeclaringClass();
            final BeanDefinition<?> beanDefinition = applicationContext.findBeanDefinition(declaringClass).orElse(null);
            if (beanDefinition != null) {
                final Argument<?>[] arguments = beanDefinition.getConstructor().getArguments();
                if (index < arguments.length) {
                    return arguments[index];
                }
            }
        } else {
            final ExecutableMethod<?, Object> executableMethod = applicationContext.getExecutableMethod(declaringExecutable.getDeclaringClass(), declaringExecutable.getName(), declaringExecutable.getParameterTypes());
            final Argument<?>[] arguments = executableMethod.getArguments();
            if (index < arguments.length) {
                return arguments[index];
            }
        }
    } catch (NoSuchMethodException e) {
        return null;
    }
    return null;
}
Also used : Argument(io.micronaut.core.type.Argument) Constructor(java.lang.reflect.Constructor) Executable(java.lang.reflect.Executable) FieldInjectionPoint(io.micronaut.inject.FieldInjectionPoint)

Example 5 with Argument

use of io.micronaut.core.type.Argument in project micronaut-test by micronaut-projects.

the class MicronautJunit5Extension method resolveQualifier.

/**
 * Build a qualifier for the given argument.
 * @param argument The argument
 * @param <T> The type
 * @return The resolved qualifier
 */
@SuppressWarnings("unchecked")
private static <T> Qualifier<T> resolveQualifier(Argument<?> argument) {
    AnnotationMetadata annotationMetadata = Objects.requireNonNull(argument, "Argument cannot be null").getAnnotationMetadata();
    boolean hasMetadata = annotationMetadata != AnnotationMetadata.EMPTY_METADATA;
    List<String> qualifierTypes = hasMetadata ? annotationMetadata.getAnnotationNamesByStereotype(AnnotationUtil.QUALIFIER) : null;
    if (CollectionUtils.isNotEmpty(qualifierTypes)) {
        if (qualifierTypes.size() == 1) {
            return Qualifiers.byAnnotation(annotationMetadata, qualifierTypes.iterator().next());
        } else {
            final Qualifier[] qualifiers = qualifierTypes.stream().map((type) -> Qualifiers.byAnnotation(annotationMetadata, type)).toArray(Qualifier[]::new);
            return Qualifiers.<T>byQualifiers(qualifiers);
        }
    }
    return null;
}
Also used : java.util(java.util) InterceptedProxy(io.micronaut.aop.InterceptedProxy) TestContext(io.micronaut.test.context.TestContext) Qualifier(io.micronaut.context.Qualifier) Nested(org.junit.jupiter.api.Nested) Constructor(java.lang.reflect.Constructor) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ApplicationContext(io.micronaut.context.ApplicationContext) MockBean(io.micronaut.test.annotation.MockBean) TestInstance(org.junit.jupiter.api.TestInstance) Executable(java.lang.reflect.Executable) Argument(io.micronaut.core.type.Argument) MicronautTest(io.micronaut.test.extensions.junit5.annotation.MicronautTest) Method(java.lang.reflect.Method) Property(io.micronaut.context.annotation.Property) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) Value(io.micronaut.context.annotation.Value) Field(java.lang.reflect.Field) FieldInjectionPoint(io.micronaut.inject.FieldInjectionPoint) AnnotationSupport(org.junit.platform.commons.support.AnnotationSupport) MicronautTestValue(io.micronaut.test.annotation.MicronautTestValue) AnnotationUtil(io.micronaut.core.annotation.AnnotationUtil) CollectionUtils(io.micronaut.core.util.CollectionUtils) AbstractMicronautExtension(io.micronaut.test.extensions.AbstractMicronautExtension) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) BeanDefinition(io.micronaut.inject.BeanDefinition) org.junit.jupiter.api.extension(org.junit.jupiter.api.extension) TestPropertyProvider(io.micronaut.test.support.TestPropertyProvider) AnnotatedElement(java.lang.reflect.AnnotatedElement) Qualifier(io.micronaut.context.Qualifier) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata)

Aggregations

Argument (io.micronaut.core.type.Argument)12 ExecutableMethod (io.micronaut.inject.ExecutableMethod)9 AnnotationValue (io.micronaut.core.annotation.AnnotationValue)8 Qualifiers (io.micronaut.inject.qualifiers.Qualifiers)8 MessageBody (io.micronaut.messaging.annotation.MessageBody)8 Arrays (java.util.Arrays)8 List (java.util.List)8 Map (java.util.Map)8 Optional (java.util.Optional)8 Logger (org.slf4j.Logger)8 LoggerFactory (org.slf4j.LoggerFactory)8 BeanContext (io.micronaut.context.BeanContext)7 StringUtils (io.micronaut.core.util.StringUtils)7 Mono (reactor.core.publisher.Mono)7 Publishers (io.micronaut.core.async.publisher.Publishers)6 ReturnType (io.micronaut.core.type.ReturnType)6 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)6 PreDestroy (javax.annotation.PreDestroy)6 MethodInterceptor (io.micronaut.aop.MethodInterceptor)5 MethodInvocationContext (io.micronaut.aop.MethodInvocationContext)5