Search in sources :

Example 1 with DefaultKafkaProducerConfiguration

use of io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration in project micronaut-kafka by micronaut-projects.

the class KafkaClientIntroductionAdvice method getProducer.

@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
    ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
    return producerMap.computeIfAbsent(key, producerKey -> {
        String clientId = context.stringValue(KafkaClient.class).orElse(null);
        List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
        List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
        if (!headers.isEmpty()) {
            List<Header> kafkaHeaders = new ArrayList<>(headers.size());
            for (AnnotationValue<MessageHeader> header : headers) {
                String name = header.stringValue("name").orElse(null);
                String value = header.stringValue().orElse(null);
                if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
                    kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
                }
            }
            if (!kafkaHeaders.isEmpty()) {
                headersSuppliers.add(ctx -> kafkaHeaders);
            }
        }
        Argument keyArgument = null;
        Argument bodyArgument = null;
        ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
        topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
        ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
        ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
        ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
        Argument[] arguments = context.getArguments();
        for (int i = 0; i < arguments.length; i++) {
            int finalI = i;
            Argument<Object> argument = arguments[i];
            if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
                bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                valueSupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(KafkaKey.class)) {
                keyArgument = argument;
                keySupplier = ctx -> ctx.getParameterValues()[finalI];
            } else if (argument.isAnnotationPresent(Topic.class)) {
                ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
                topicSupplier[0] = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null) {
                        String topic = o.toString();
                        if (StringUtils.isNotEmpty(topic)) {
                            return topic;
                        }
                    }
                    return prevTopicSupplier.get(ctx);
                };
            } else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
                timestampSupplier = ctx -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o instanceof Long) {
                        return (Long) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartition.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object o = ctx.getParameterValues()[finalI];
                    if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
                        return (Integer) o;
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
                partitionFromProducerFn = (ctx, producer) -> {
                    Object partitionKey = ctx.getParameterValues()[finalI];
                    if (partitionKey != null) {
                        Serializer serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer == null) {
                            serializer = new ByteArraySerializer();
                        }
                        String topic = topicSupplier[0].get(ctx);
                        byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
                        return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
                    }
                    return null;
                };
            } else if (argument.isAnnotationPresent(MessageHeader.class)) {
                final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
                String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
                headersSuppliers.add(ctx -> {
                    Object headerValue = ctx.getParameterValues()[finalI];
                    if (headerValue != null) {
                        Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
                        if (serializer != null) {
                            try {
                                return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
                            } catch (Exception e) {
                                throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
                            }
                        }
                    }
                    return Collections.emptySet();
                });
            } else {
                if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
                    headersSuppliers.add(ctx -> {
                        Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
                        if (parameterHeaders != null) {
                            return parameterHeaders;
                        }
                        return Collections.emptySet();
                    });
                } else {
                    Class argumentType = argument.getType();
                    if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
                        headersSuppliers.add(ctx -> {
                            Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
                            if (parameterHeaders != null) {
                                return parameterHeaders;
                            }
                            return Collections.emptySet();
                        });
                    }
                }
            }
        }
        if (bodyArgument == null) {
            for (int i = 0; i < arguments.length; i++) {
                int finalI = i;
                Argument argument = arguments[i];
                if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
                    bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
                    valueSupplier = ctx -> ctx.getParameterValues()[finalI];
                    break;
                }
            }
            if (bodyArgument == null) {
                throw new MessagingClientException("No valid message body argument found for method: " + context);
            }
        }
        AbstractKafkaProducerConfiguration configuration;
        if (clientId != null) {
            Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
            if (namedConfig.isPresent()) {
                configuration = namedConfig.get();
            } else {
                configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
            }
        } else {
            configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
        }
        DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
        Properties newProperties = newConfiguration.getConfig();
        String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
        if (clientId != null) {
            newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
        }
        if (transactionalId != null) {
            newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
        }
        context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
        Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
        if (ack != KafkaClient.Acknowledge.DEFAULT) {
            String acksValue = ack == -1 ? "all" : String.valueOf(ack);
            newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
        }
        context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
        LOG.debug("Creating new KafkaProducer.");
        if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
            if (keySerializer == null) {
                if (keyArgument != null) {
                    keySerializer = serdeRegistry.pickSerializer(keyArgument);
                } else {
                    keySerializer = new ByteArraySerializer();
                }
                LOG.debug("Using Kafka key serializer: {}", keySerializer);
                newConfiguration.setKeySerializer((Serializer) keySerializer);
            }
        }
        boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
        if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
            Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
            if (valueSerializer == null) {
                valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
                LOG.debug("Using Kafka value serializer: {}", valueSerializer);
                newConfiguration.setValueSerializer((Serializer) valueSerializer);
            }
        }
        Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
        boolean transactional = StringUtils.isNotEmpty(transactionalId);
        timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
        Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
        if (transactional) {
            producer.initTransactions();
        }
        ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
            if (headersSuppliers.isEmpty()) {
                return null;
            }
            List<Header> headerList = new ArrayList<>(headersSuppliers.size());
            for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
                for (Header header : supplier.get(ctx)) {
                    headerList.add(header);
                }
            }
            if (headerList.isEmpty()) {
                return null;
            }
            return headerList;
        };
        BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
        ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
        return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
    });
}
Also used : Topic(io.micronaut.configuration.kafka.annotation.Topic) Publishers(io.micronaut.core.async.publisher.Publishers) Bindable(io.micronaut.core.bind.annotation.Bindable) Arrays(java.util.Arrays) BeanContext(io.micronaut.context.BeanContext) BiFunction(java.util.function.BiFunction) LoggerFactory(org.slf4j.LoggerFactory) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) KafkaPartition(io.micronaut.configuration.kafka.annotation.KafkaPartition) InterceptorBean(io.micronaut.aop.InterceptorBean) PreDestroy(javax.annotation.PreDestroy) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) Duration(java.time.Duration) Map(java.util.Map) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) Qualifiers(io.micronaut.inject.qualifiers.Qualifiers) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) StandardCharsets(java.nio.charset.StandardCharsets) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Objects(java.util.Objects) StringUtils(io.micronaut.core.util.StringUtils) List(java.util.List) SerdeRegistry(io.micronaut.configuration.kafka.serde.SerdeRegistry) Header(org.apache.kafka.common.header.Header) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) Optional(java.util.Optional) MessageBody(io.micronaut.messaging.annotation.MessageBody) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) CompletableFuture(java.util.concurrent.CompletableFuture) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) InterceptedMethod(io.micronaut.aop.InterceptedMethod) Function(java.util.function.Function) ExecutableMethod(io.micronaut.inject.ExecutableMethod) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) MethodInterceptor(io.micronaut.aop.MethodInterceptor) Nullable(io.micronaut.core.annotation.Nullable) ReturnType(io.micronaut.core.type.ReturnType) Argument(io.micronaut.core.type.Argument) LinkedList(java.util.LinkedList) ConversionService(io.micronaut.core.convert.ConversionService) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Subscriber(org.reactivestreams.Subscriber) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) Utils(org.apache.kafka.common.utils.Utils) Logger(org.slf4j.Logger) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) Publisher(org.reactivestreams.Publisher) Mono(reactor.core.publisher.Mono) TimeUnit(java.util.concurrent.TimeUnit) Flux(reactor.core.publisher.Flux) Serializer(org.apache.kafka.common.serialization.Serializer) Subscription(org.reactivestreams.Subscription) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) Collections(java.util.Collections) ArrayList(java.util.ArrayList) AnnotationMetadata(io.micronaut.core.annotation.AnnotationMetadata) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) KafkaClient(io.micronaut.configuration.kafka.annotation.KafkaClient) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) Serializer(org.apache.kafka.common.serialization.Serializer) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) KafkaKey(io.micronaut.configuration.kafka.annotation.KafkaKey) Duration(java.time.Duration) LinkedList(java.util.LinkedList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) Header(org.apache.kafka.common.header.Header) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Collection(java.util.Collection) KafkaTimestamp(io.micronaut.configuration.kafka.annotation.KafkaTimestamp) Argument(io.micronaut.core.type.Argument) Headers(org.apache.kafka.common.header.Headers) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringUtils(io.micronaut.core.util.StringUtils) Topic(io.micronaut.configuration.kafka.annotation.Topic) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaPartitionKey(io.micronaut.configuration.kafka.annotation.KafkaPartitionKey) MethodInvocationContext(io.micronaut.aop.MethodInvocationContext) Bindable(io.micronaut.core.bind.annotation.Bindable) MessagingClientException(io.micronaut.messaging.exceptions.MessagingClientException) AbstractKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration) KafkaProducerConfiguration(io.micronaut.configuration.kafka.config.KafkaProducerConfiguration) DefaultKafkaProducerConfiguration(io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration) AnnotationValue(io.micronaut.core.annotation.AnnotationValue) MessageHeader(io.micronaut.messaging.annotation.MessageHeader) MessageBody(io.micronaut.messaging.annotation.MessageBody)

Aggregations

InterceptedMethod (io.micronaut.aop.InterceptedMethod)1 InterceptorBean (io.micronaut.aop.InterceptorBean)1 MethodInterceptor (io.micronaut.aop.MethodInterceptor)1 MethodInvocationContext (io.micronaut.aop.MethodInvocationContext)1 KafkaClient (io.micronaut.configuration.kafka.annotation.KafkaClient)1 KafkaKey (io.micronaut.configuration.kafka.annotation.KafkaKey)1 KafkaPartition (io.micronaut.configuration.kafka.annotation.KafkaPartition)1 KafkaPartitionKey (io.micronaut.configuration.kafka.annotation.KafkaPartitionKey)1 KafkaTimestamp (io.micronaut.configuration.kafka.annotation.KafkaTimestamp)1 Topic (io.micronaut.configuration.kafka.annotation.Topic)1 AbstractKafkaProducerConfiguration (io.micronaut.configuration.kafka.config.AbstractKafkaProducerConfiguration)1 DefaultKafkaProducerConfiguration (io.micronaut.configuration.kafka.config.DefaultKafkaProducerConfiguration)1 KafkaProducerConfiguration (io.micronaut.configuration.kafka.config.KafkaProducerConfiguration)1 SerdeRegistry (io.micronaut.configuration.kafka.serde.SerdeRegistry)1 BeanContext (io.micronaut.context.BeanContext)1 AnnotationMetadata (io.micronaut.core.annotation.AnnotationMetadata)1 AnnotationValue (io.micronaut.core.annotation.AnnotationValue)1 Nullable (io.micronaut.core.annotation.Nullable)1 Publishers (io.micronaut.core.async.publisher.Publishers)1 Bindable (io.micronaut.core.bind.annotation.Bindable)1