use of io.micronaut.core.annotation.AnnotationMetadata in project micronaut-kafka by micronaut-projects.
the class KafkaClientIntroductionAdvice method getProducer.
@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
return producerMap.computeIfAbsent(key, producerKey -> {
String clientId = context.stringValue(KafkaClient.class).orElse(null);
List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
if (!headers.isEmpty()) {
List<Header> kafkaHeaders = new ArrayList<>(headers.size());
for (AnnotationValue<MessageHeader> header : headers) {
String name = header.stringValue("name").orElse(null);
String value = header.stringValue().orElse(null);
if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
}
}
if (!kafkaHeaders.isEmpty()) {
headersSuppliers.add(ctx -> kafkaHeaders);
}
}
Argument keyArgument = null;
Argument bodyArgument = null;
ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
Argument[] arguments = context.getArguments();
for (int i = 0; i < arguments.length; i++) {
int finalI = i;
Argument<Object> argument = arguments[i];
if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
valueSupplier = ctx -> ctx.getParameterValues()[finalI];
} else if (argument.isAnnotationPresent(KafkaKey.class)) {
keyArgument = argument;
keySupplier = ctx -> ctx.getParameterValues()[finalI];
} else if (argument.isAnnotationPresent(Topic.class)) {
ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
topicSupplier[0] = ctx -> {
Object o = ctx.getParameterValues()[finalI];
if (o != null) {
String topic = o.toString();
if (StringUtils.isNotEmpty(topic)) {
return topic;
}
}
return prevTopicSupplier.get(ctx);
};
} else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
timestampSupplier = ctx -> {
Object o = ctx.getParameterValues()[finalI];
if (o instanceof Long) {
return (Long) o;
}
return null;
};
} else if (argument.isAnnotationPresent(KafkaPartition.class)) {
partitionFromProducerFn = (ctx, producer) -> {
Object o = ctx.getParameterValues()[finalI];
if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
return (Integer) o;
}
return null;
};
} else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
partitionFromProducerFn = (ctx, producer) -> {
Object partitionKey = ctx.getParameterValues()[finalI];
if (partitionKey != null) {
Serializer serializer = serdeRegistry.pickSerializer(argument);
if (serializer == null) {
serializer = new ByteArraySerializer();
}
String topic = topicSupplier[0].get(ctx);
byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
}
return null;
};
} else if (argument.isAnnotationPresent(MessageHeader.class)) {
final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
headersSuppliers.add(ctx -> {
Object headerValue = ctx.getParameterValues()[finalI];
if (headerValue != null) {
Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
if (serializer != null) {
try {
return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
} catch (Exception e) {
throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
}
}
}
return Collections.emptySet();
});
} else {
if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
headersSuppliers.add(ctx -> {
Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
if (parameterHeaders != null) {
return parameterHeaders;
}
return Collections.emptySet();
});
} else {
Class argumentType = argument.getType();
if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
headersSuppliers.add(ctx -> {
Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
if (parameterHeaders != null) {
return parameterHeaders;
}
return Collections.emptySet();
});
}
}
}
}
if (bodyArgument == null) {
for (int i = 0; i < arguments.length; i++) {
int finalI = i;
Argument argument = arguments[i];
if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
valueSupplier = ctx -> ctx.getParameterValues()[finalI];
break;
}
}
if (bodyArgument == null) {
throw new MessagingClientException("No valid message body argument found for method: " + context);
}
}
AbstractKafkaProducerConfiguration configuration;
if (clientId != null) {
Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
if (namedConfig.isPresent()) {
configuration = namedConfig.get();
} else {
configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
}
} else {
configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
}
DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
Properties newProperties = newConfiguration.getConfig();
String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
if (clientId != null) {
newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
}
if (transactionalId != null) {
newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
}
context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
if (ack != KafkaClient.Acknowledge.DEFAULT) {
String acksValue = ack == -1 ? "all" : String.valueOf(ack);
newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
}
context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
LOG.debug("Creating new KafkaProducer.");
if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
if (keySerializer == null) {
if (keyArgument != null) {
keySerializer = serdeRegistry.pickSerializer(keyArgument);
} else {
keySerializer = new ByteArraySerializer();
}
LOG.debug("Using Kafka key serializer: {}", keySerializer);
newConfiguration.setKeySerializer((Serializer) keySerializer);
}
}
boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
if (valueSerializer == null) {
valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
LOG.debug("Using Kafka value serializer: {}", valueSerializer);
newConfiguration.setValueSerializer((Serializer) valueSerializer);
}
}
Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
boolean transactional = StringUtils.isNotEmpty(transactionalId);
timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
if (transactional) {
producer.initTransactions();
}
ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
if (headersSuppliers.isEmpty()) {
return null;
}
List<Header> headerList = new ArrayList<>(headersSuppliers.size());
for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
for (Header header : supplier.get(ctx)) {
headerList.add(header);
}
}
if (headerList.isEmpty()) {
return null;
}
return headerList;
};
BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
});
}
use of io.micronaut.core.annotation.AnnotationMetadata in project micronaut-views by micronaut-projects.
the class ViewsFilter method resolveMediaType.
/**
* Resolves the response content type for the matched route.
* @param response HTTP response
* @param responseBody HTTP Response body
* @return The resolved content type
*/
@NonNull
protected MediaType resolveMediaType(@NonNull HttpResponse<?> response, @Nullable Object responseBody) {
Optional<AnnotationMetadata> routeMatch = response.getAttribute(HttpAttributes.ROUTE_MATCH, AnnotationMetadata.class);
if (!routeMatch.isPresent()) {
return MediaType.APPLICATION_JSON_TYPE;
}
AnnotationMetadata route = routeMatch.get();
return route.getValue(Produces.class, MediaType.class).orElse((route.getValue(View.class).isPresent() || responseBody instanceof ModelAndView) ? MediaType.TEXT_HTML_TYPE : MediaType.APPLICATION_JSON_TYPE);
}
use of io.micronaut.core.annotation.AnnotationMetadata in project micronaut-test by micronaut-projects.
the class MicronautJunit5Extension method resolveQualifier.
/**
* Build a qualifier for the given argument.
* @param argument The argument
* @param <T> The type
* @return The resolved qualifier
*/
@SuppressWarnings("unchecked")
private static <T> Qualifier<T> resolveQualifier(Argument<?> argument) {
AnnotationMetadata annotationMetadata = Objects.requireNonNull(argument, "Argument cannot be null").getAnnotationMetadata();
boolean hasMetadata = annotationMetadata != AnnotationMetadata.EMPTY_METADATA;
List<String> qualifierTypes = hasMetadata ? annotationMetadata.getAnnotationNamesByStereotype(AnnotationUtil.QUALIFIER) : null;
if (CollectionUtils.isNotEmpty(qualifierTypes)) {
if (qualifierTypes.size() == 1) {
return Qualifiers.byAnnotation(annotationMetadata, qualifierTypes.iterator().next());
} else {
final Qualifier[] qualifiers = qualifierTypes.stream().map((type) -> Qualifiers.byAnnotation(annotationMetadata, type)).toArray(Qualifier[]::new);
return Qualifiers.<T>byQualifiers(qualifiers);
}
}
return null;
}
use of io.micronaut.core.annotation.AnnotationMetadata in project micronaut-micrometer by micronaut-projects.
the class CountedInterceptor method intercept.
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
final AnnotationMetadata metadata = context.getAnnotationMetadata();
final String metricName = metadata.stringValue(Counted.class).orElse(DEFAULT_METRIC_NAME);
if (StringUtils.isNotEmpty(metricName)) {
InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
try {
InterceptedMethod.ResultType resultType = interceptedMethod.resultType();
switch(resultType) {
case PUBLISHER:
Object interceptResult = context.proceed();
if (interceptResult == null) {
return null;
}
Object reactiveResult;
if (context.getReturnType().isSingleResult()) {
Mono<?> single = Publishers.convertPublisher(interceptResult, Mono.class);
reactiveResult = single.doOnError(throwable -> doCount(metadata, metricName, throwable)).doOnSuccess(o -> doCount(metadata, metricName, null));
} else {
Flux<?> flowable = Publishers.convertPublisher(interceptResult, Flux.class);
reactiveResult = flowable.doOnError(throwable -> doCount(metadata, metricName, throwable)).doOnComplete(() -> doCount(metadata, metricName, null));
}
return Publishers.convertPublisher(reactiveResult, context.getReturnType().getType());
case COMPLETION_STAGE:
CompletionStage<?> completionStage = interceptedMethod.interceptResultAsCompletionStage();
CompletionStage<?> completionStageResult = completionStage.whenComplete((o, throwable) -> doCount(metadata, metricName, throwable));
return interceptedMethod.handleResult(completionStageResult);
case SYNCHRONOUS:
final Object result = context.proceed();
try {
return result;
} finally {
if (metadata.isFalse(Counted.class, "recordFailuresOnly")) {
doCount(metadata, metricName, null);
}
}
default:
return interceptedMethod.unsupported();
}
} catch (Exception e) {
try {
return interceptedMethod.handleException(e);
} finally {
doCount(metadata, metricName, e);
}
}
}
return context.proceed();
}
use of io.micronaut.core.annotation.AnnotationMetadata in project micronaut-micrometer by micronaut-projects.
the class TimedInterceptor method intercept.
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
final AnnotationMetadata metadata = context.getAnnotationMetadata();
final AnnotationValue<TimedSet> timedSet = metadata.getAnnotation(TimedSet.class);
if (timedSet != null) {
final List<AnnotationValue<Timed>> timedAnnotations = timedSet.getAnnotations(AnnotationMetadata.VALUE_MEMBER, Timed.class);
if (!timedAnnotations.isEmpty()) {
String exceptionClass = "none";
List<Timer.Sample> syncInvokeSamples = null;
InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
try {
InterceptedMethod.ResultType resultType = interceptedMethod.resultType();
switch(resultType) {
case PUBLISHER:
Object interceptResult = context.proceed();
if (interceptResult == null) {
return null;
}
Object result;
AtomicReference<List<Timer.Sample>> reactiveInvokeSample = new AtomicReference<>();
if (context.getReturnType().isSingleResult()) {
Mono<?> single = Publishers.convertPublisher(interceptResult, Mono.class);
result = single.doOnSubscribe(d -> reactiveInvokeSample.set(initSamples(timedAnnotations))).doOnError(throwable -> finalizeSamples(timedAnnotations, throwable.getClass().getSimpleName(), reactiveInvokeSample.get())).doOnSuccess(o -> finalizeSamples(timedAnnotations, "none", reactiveInvokeSample.get()));
} else {
AtomicReference<String> exceptionClassHolder = new AtomicReference<>("none");
Flux<?> flowable = Publishers.convertPublisher(interceptResult, Flux.class);
result = flowable.doOnRequest(n -> reactiveInvokeSample.set(initSamples(timedAnnotations))).doOnError(throwable -> exceptionClassHolder.set(throwable.getClass().getSimpleName())).doOnComplete(() -> finalizeSamples(timedAnnotations, exceptionClassHolder.get(), reactiveInvokeSample.get()));
}
return Publishers.convertPublisher(result, context.getReturnType().getType());
case COMPLETION_STAGE:
List<Timer.Sample> completionStageInvokeSamples = initSamples(timedAnnotations);
CompletionStage<?> completionStage = interceptedMethod.interceptResultAsCompletionStage();
CompletionStage<?> completionStageResult = completionStage.whenComplete((o, throwable) -> finalizeSamples(timedAnnotations, throwable == null ? "none" : throwable.getClass().getSimpleName(), completionStageInvokeSamples));
return interceptedMethod.handleResult(completionStageResult);
case SYNCHRONOUS:
syncInvokeSamples = initSamples(timedAnnotations);
return context.proceed();
default:
return interceptedMethod.unsupported();
}
} catch (Exception e) {
exceptionClass = e.getClass().getSimpleName();
return interceptedMethod.handleException(e);
} finally {
finalizeSamples(timedAnnotations, exceptionClass, syncInvokeSamples);
}
}
}
return context.proceed();
}
Aggregations