use of io.micronaut.messaging.annotation.MessageHeader in project micronaut-kafka by micronaut-projects.
the class KafkaClientIntroductionAdvice method getProducer.
@SuppressWarnings("unchecked")
private ProducerState getProducer(MethodInvocationContext<?, ?> context) {
ProducerKey key = new ProducerKey(context.getTarget(), context.getExecutableMethod());
return producerMap.computeIfAbsent(key, producerKey -> {
String clientId = context.stringValue(KafkaClient.class).orElse(null);
List<ContextSupplier<Iterable<Header>>> headersSuppliers = new LinkedList<>();
List<AnnotationValue<MessageHeader>> headers = context.getAnnotationValuesByType(MessageHeader.class);
if (!headers.isEmpty()) {
List<Header> kafkaHeaders = new ArrayList<>(headers.size());
for (AnnotationValue<MessageHeader> header : headers) {
String name = header.stringValue("name").orElse(null);
String value = header.stringValue().orElse(null);
if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
kafkaHeaders.add(new RecordHeader(name, value.getBytes(StandardCharsets.UTF_8)));
}
}
if (!kafkaHeaders.isEmpty()) {
headersSuppliers.add(ctx -> kafkaHeaders);
}
}
Argument keyArgument = null;
Argument bodyArgument = null;
ContextSupplier<String>[] topicSupplier = new ContextSupplier[1];
topicSupplier[0] = ctx -> ctx.stringValue(Topic.class).filter(StringUtils::isNotEmpty).orElseThrow(() -> new MessagingClientException("No topic specified for method: " + context));
ContextSupplier<Object> keySupplier = NULL_SUPPLIER;
ContextSupplier<Object> valueSupplier = NULL_SUPPLIER;
ContextSupplier<Long> timestampSupplier = NULL_SUPPLIER;
BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> partitionFromProducerFn = (ctx, producer) -> null;
Argument[] arguments = context.getArguments();
for (int i = 0; i < arguments.length; i++) {
int finalI = i;
Argument<Object> argument = arguments[i];
if (ProducerRecord.class.isAssignableFrom(argument.getType()) || argument.isAnnotationPresent(MessageBody.class)) {
bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
valueSupplier = ctx -> ctx.getParameterValues()[finalI];
} else if (argument.isAnnotationPresent(KafkaKey.class)) {
keyArgument = argument;
keySupplier = ctx -> ctx.getParameterValues()[finalI];
} else if (argument.isAnnotationPresent(Topic.class)) {
ContextSupplier<String> prevTopicSupplier = topicSupplier[0];
topicSupplier[0] = ctx -> {
Object o = ctx.getParameterValues()[finalI];
if (o != null) {
String topic = o.toString();
if (StringUtils.isNotEmpty(topic)) {
return topic;
}
}
return prevTopicSupplier.get(ctx);
};
} else if (argument.isAnnotationPresent(KafkaTimestamp.class)) {
timestampSupplier = ctx -> {
Object o = ctx.getParameterValues()[finalI];
if (o instanceof Long) {
return (Long) o;
}
return null;
};
} else if (argument.isAnnotationPresent(KafkaPartition.class)) {
partitionFromProducerFn = (ctx, producer) -> {
Object o = ctx.getParameterValues()[finalI];
if (o != null && Integer.class.isAssignableFrom(o.getClass())) {
return (Integer) o;
}
return null;
};
} else if (argument.isAnnotationPresent(KafkaPartitionKey.class)) {
partitionFromProducerFn = (ctx, producer) -> {
Object partitionKey = ctx.getParameterValues()[finalI];
if (partitionKey != null) {
Serializer serializer = serdeRegistry.pickSerializer(argument);
if (serializer == null) {
serializer = new ByteArraySerializer();
}
String topic = topicSupplier[0].get(ctx);
byte[] partitionKeyBytes = serializer.serialize(topic, partitionKey);
return Utils.toPositive(Utils.murmur2(partitionKeyBytes)) % producer.partitionsFor(topic).size();
}
return null;
};
} else if (argument.isAnnotationPresent(MessageHeader.class)) {
final AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
String name = annotationMetadata.stringValue(MessageHeader.class, "name").orElseGet(() -> annotationMetadata.stringValue(MessageHeader.class).orElseGet(argument::getName));
headersSuppliers.add(ctx -> {
Object headerValue = ctx.getParameterValues()[finalI];
if (headerValue != null) {
Serializer<Object> serializer = serdeRegistry.pickSerializer(argument);
if (serializer != null) {
try {
return Collections.singleton(new RecordHeader(name, serializer.serialize(null, headerValue)));
} catch (Exception e) {
throw new MessagingClientException("Cannot serialize header argument [" + argument + "] for method [" + ctx + "]: " + e.getMessage(), e);
}
}
}
return Collections.emptySet();
});
} else {
if (argument.isContainerType() && Header.class.isAssignableFrom(argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT).getType())) {
headersSuppliers.add(ctx -> {
Collection<Header> parameterHeaders = (Collection<Header>) ctx.getParameterValues()[finalI];
if (parameterHeaders != null) {
return parameterHeaders;
}
return Collections.emptySet();
});
} else {
Class argumentType = argument.getType();
if (argumentType == Headers.class || argumentType == RecordHeaders.class) {
headersSuppliers.add(ctx -> {
Headers parameterHeaders = (Headers) ctx.getParameterValues()[finalI];
if (parameterHeaders != null) {
return parameterHeaders;
}
return Collections.emptySet();
});
}
}
}
}
if (bodyArgument == null) {
for (int i = 0; i < arguments.length; i++) {
int finalI = i;
Argument argument = arguments[i];
if (!argument.getAnnotationMetadata().hasStereotype(Bindable.class)) {
bodyArgument = argument.isAsyncOrReactive() ? argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT) : argument;
valueSupplier = ctx -> ctx.getParameterValues()[finalI];
break;
}
}
if (bodyArgument == null) {
throw new MessagingClientException("No valid message body argument found for method: " + context);
}
}
AbstractKafkaProducerConfiguration configuration;
if (clientId != null) {
Optional<KafkaProducerConfiguration> namedConfig = beanContext.findBean(KafkaProducerConfiguration.class, Qualifiers.byName(clientId));
if (namedConfig.isPresent()) {
configuration = namedConfig.get();
} else {
configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
}
} else {
configuration = beanContext.getBean(AbstractKafkaProducerConfiguration.class);
}
DefaultKafkaProducerConfiguration<?, ?> newConfiguration = new DefaultKafkaProducerConfiguration<>(configuration);
Properties newProperties = newConfiguration.getConfig();
String transactionalId = context.stringValue(KafkaClient.class, "transactionalId").filter(StringUtils::isNotEmpty).orElse(null);
if (clientId != null) {
newProperties.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, clientId);
}
if (transactionalId != null) {
newProperties.putIfAbsent(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId);
}
context.getValue(KafkaClient.class, "maxBlock", Duration.class).ifPresent(maxBlock -> newProperties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(maxBlock.toMillis())));
Integer ack = context.intValue(KafkaClient.class, "acks").orElse(KafkaClient.Acknowledge.DEFAULT);
if (ack != KafkaClient.Acknowledge.DEFAULT) {
String acksValue = ack == -1 ? "all" : String.valueOf(ack);
newProperties.put(ProducerConfig.ACKS_CONFIG, acksValue);
}
context.findAnnotation(KafkaClient.class).map(ann -> ann.getProperties("properties", "name")).ifPresent(newProperties::putAll);
LOG.debug("Creating new KafkaProducer.");
if (!newProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
Serializer<?> keySerializer = newConfiguration.getKeySerializer().orElse(null);
if (keySerializer == null) {
if (keyArgument != null) {
keySerializer = serdeRegistry.pickSerializer(keyArgument);
} else {
keySerializer = new ByteArraySerializer();
}
LOG.debug("Using Kafka key serializer: {}", keySerializer);
newConfiguration.setKeySerializer((Serializer) keySerializer);
}
}
boolean isBatchSend = context.isTrue(KafkaClient.class, "batch");
if (!newProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
Serializer<?> valueSerializer = newConfiguration.getValueSerializer().orElse(null);
if (valueSerializer == null) {
valueSerializer = serdeRegistry.pickSerializer(isBatchSend ? bodyArgument.getFirstTypeVariable().orElse(bodyArgument) : bodyArgument);
LOG.debug("Using Kafka value serializer: {}", valueSerializer);
newConfiguration.setValueSerializer((Serializer) valueSerializer);
}
}
Producer<?, ?> producer = beanContext.createBean(Producer.class, newConfiguration);
boolean transactional = StringUtils.isNotEmpty(transactionalId);
timestampSupplier = context.isTrue(KafkaClient.class, "timestamp") ? ctx -> System.currentTimeMillis() : timestampSupplier;
Duration maxBlock = context.getValue(KafkaClient.class, "maxBlock", Duration.class).orElse(null);
if (transactional) {
producer.initTransactions();
}
ContextSupplier<Collection<Header>> headersSupplier = ctx -> {
if (headersSuppliers.isEmpty()) {
return null;
}
List<Header> headerList = new ArrayList<>(headersSuppliers.size());
for (ContextSupplier<Iterable<Header>> supplier : headersSuppliers) {
for (Header header : supplier.get(ctx)) {
headerList.add(header);
}
}
if (headerList.isEmpty()) {
return null;
}
return headerList;
};
BiFunction<MethodInvocationContext<?, ?>, Producer, Integer> finalPartitionFromProducerFn = partitionFromProducerFn;
ContextSupplier<Integer> partitionSupplier = ctx -> finalPartitionFromProducerFn.apply(ctx, producer);
return new ProducerState(producer, keySupplier, topicSupplier[0], valueSupplier, timestampSupplier, partitionSupplier, headersSupplier, transactional, transactionalId, maxBlock, isBatchSend, bodyArgument);
});
}
use of io.micronaut.messaging.annotation.MessageHeader in project micronaut-gcp by micronaut-projects.
the class PubSubClientIntroductionAdvice method intercept.
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
if (context.hasAnnotation(Topic.class)) {
PubSubPublisherState publisherState = publisherStateCache.computeIfAbsent(context.getExecutableMethod(), method -> {
String projectId = method.stringValue(PubSubClient.class).orElse(googleCloudConfiguration.getProjectId());
Optional<Argument> orderingArgument = Arrays.stream(method.getArguments()).filter(argument -> argument.getAnnotationMetadata().hasAnnotation(OrderingKey.class)).findFirst();
String topic = method.stringValue(Topic.class).orElse(context.getName());
String endpoint = method.stringValue(Topic.class, "endpoint").orElse("");
String configurationName = method.stringValue(Topic.class, "configuration").orElse("");
String contentType = method.stringValue(Topic.class, "contentType").orElse(MediaType.APPLICATION_JSON);
ProjectTopicName projectTopicName = PubSubTopicUtils.toProjectTopicName(topic, projectId);
Map<String, String> staticMessageAttributes = new HashMap<>();
List<AnnotationValue<MessageHeader>> headerAnnotations = context.getAnnotationValuesByType(MessageHeader.class);
headerAnnotations.forEach((header) -> {
String name = header.stringValue("name").orElse(null);
String value = header.stringValue().orElse(null);
if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
staticMessageAttributes.put(name, value);
}
});
Argument<?> bodyArgument = findBodyArgument(method).orElseThrow(() -> new PubSubClientException("No valid message body argument found for method: " + context.getExecutableMethod()));
PubSubPublisherState.TopicState topicState = new PubSubPublisherState.TopicState(contentType, projectTopicName, configurationName, endpoint, orderingArgument.isPresent());
logger.debug("Created a new publisher[{}] for topic: {}", context.getExecutableMethod().getName(), topic);
PublisherInterface publisher = publisherFactory.createPublisher(new PublisherFactoryConfig(topicState, pubSubConfigurationProperties.getPublishingExecutor()));
return new PubSubPublisherState(topicState, staticMessageAttributes, bodyArgument, publisher, orderingArgument);
});
Map<String, String> messageAttributes = new HashMap<>(publisherState.getStaticMessageAttributes());
String contentType = publisherState.getTopicState().getContentType();
Argument<?> bodyArgument = publisherState.getBodyArgument();
Map<String, Object> parameterValues = context.getParameterValueMap();
final ReturnType<Object> returnTypeInfo = context.getReturnType();
ReturnType<Object> returnType = returnTypeInfo;
Class<?> javaReturnType = returnType.getType();
Argument[] arguments = context.getArguments();
for (Argument arg : arguments) {
AnnotationValue<MessageHeader> headerAnn = arg.getAnnotation(MessageHeader.class);
if (headerAnn != null) {
Map.Entry<String, String> entry = getNameAndValue(arg, headerAnn, parameterValues);
messageAttributes.put(entry.getKey(), entry.getValue());
}
}
PublisherInterface publisher = publisherState.getPublisher();
Object body = parameterValues.get(bodyArgument.getName());
PubsubMessage pubsubMessage = null;
if (body.getClass() == PubsubMessage.class) {
pubsubMessage = (PubsubMessage) body;
} else {
// if target type is byte[] we bypass serdes completely
byte[] serialized = null;
if (body.getClass() == byte[].class) {
serialized = (byte[]) body;
} else {
PubSubMessageSerDes serDes = serDesRegistry.find(contentType).orElseThrow(() -> new PubSubClientException("Could not locate a valid SerDes implementation for type: " + contentType));
serialized = serDes.serialize(body);
}
messageAttributes.put("Content-Type", contentType);
PubsubMessage.Builder messageBuilder = PubsubMessage.newBuilder();
messageBuilder.setData(ByteString.copyFrom(serialized)).putAllAttributes(messageAttributes);
if (publisherState.getOrderingArgument().isPresent()) {
String orderingKey = conversionService.convert(parameterValues.get(publisherState.getOrderingArgument().get().getName()), String.class).orElseThrow(() -> new PubSubClientException("Could not convert argument annotated with @OrderingKey to String type"));
messageBuilder.setOrderingKey(orderingKey);
}
pubsubMessage = messageBuilder.build();
}
PubsubMessage finalPubsubMessage = pubsubMessage;
Mono<String> reactiveResult = Mono.create(sink -> {
ApiFuture<String> future = publisher.publish(finalPubsubMessage);
future.addListener(() -> {
try {
final String result = future.get();
sink.success(result);
} catch (Throwable e) {
sink.error(e);
}
}, executorService);
});
if (javaReturnType == void.class || javaReturnType == Void.class) {
String result = reactiveResult.block();
return null;
} else {
if (returnTypeInfo.isReactive()) {
return Publishers.convertPublisher(reactiveResult, javaReturnType);
} else if (returnTypeInfo.isAsync()) {
return reactiveResult.toFuture();
} else {
String result = reactiveResult.block();
return conversionService.convert(result, javaReturnType).orElseThrow(() -> new PubSubClientException("Could not convert publisher result to method return type: " + javaReturnType));
}
}
} else {
return context.proceed();
}
}
use of io.micronaut.messaging.annotation.MessageHeader in project micronaut-nats by micronaut-projects.
the class NatsIntroductionAdvice method intercept.
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
if (context.hasAnnotation(NatsClient.class)) {
StaticPublisherState publisherState = publisherCache.get(context.getExecutableMethod(), method -> {
if (!method.findAnnotation(NatsClient.class).isPresent()) {
throw new IllegalStateException("No @NatsClient annotation present on method: " + method);
}
Optional<String> subject = method.findAnnotation(Subject.class).flatMap(AnnotationValue::stringValue);
String connection = method.findAnnotation(NatsConnection.class).flatMap(conn -> conn.get("connection", String.class)).orElse(NatsConnection.DEFAULT_CONNECTION);
Argument<?> bodyArgument = findBodyArgument(method).orElseThrow(() -> new NatsClientException("No valid message body argument found for method: " + method));
Headers methodHeaders = new Headers();
List<AnnotationValue<MessageHeader>> headerAnnotations = method.getAnnotationValuesByType(MessageHeader.class);
// set the values in the class first so methods can override
Collections.reverse(headerAnnotations);
headerAnnotations.forEach(header -> {
String name = header.stringValue("name").orElse(null);
String value = header.stringValue().orElse(null);
if (StringUtils.isNotEmpty(name) && StringUtils.isNotEmpty(value)) {
methodHeaders.put(name, value);
}
});
NatsMessageSerDes<?> serDes = serDesRegistry.findSerdes(bodyArgument).orElseThrow(() -> new NatsClientException(String.format("Could not find a serializer for the body argument of type [%s]", bodyArgument.getType().getName())));
ReactivePublisher reactivePublisher;
try {
reactivePublisher = beanContext.getBean(ReactivePublisher.class, Qualifiers.byName(connection));
} catch (Throwable e) {
throw new NatsClientException(String.format("Failed to retrieve a publisher named [%s] to publish messages", connection), e);
}
return new StaticPublisherState(subject.orElse(null), bodyArgument, methodHeaders, method.getReturnType(), connection, serDes, reactivePublisher);
});
NatsMessage.Builder builder = NatsMessage.builder();
Headers headers = publisherState.getHeaders();
Argument[] arguments = context.getArguments();
Map<String, Object> parameterValues = context.getParameterValueMap();
for (Argument argument : arguments) {
AnnotationValue<MessageHeader> headerAnn = argument.getAnnotation(MessageHeader.class);
boolean headersObject = argument.getType() == Headers.class;
if (headerAnn != null) {
Map.Entry<String, List<String>> entry = getNameAndValue(argument, headerAnn, parameterValues);
String name = entry.getKey();
List<String> value = entry.getValue();
headers.put(name, value);
} else if (headersObject) {
Headers dynamicHeaders = (Headers) parameterValues.get(argument.getName());
dynamicHeaders.forEach(headers::put);
}
}
if (!headers.isEmpty()) {
builder.headers(headers);
}
Object body = parameterValues.get(publisherState.getBodyArgument().getName());
byte[] converted = publisherState.getSerDes().serialize(body);
builder = builder.data(converted);
String subject = publisherState.getSubject().orElse(findSubjectKey(context).orElse(null));
builder = builder.subject(subject);
if (subject == null) {
throw new IllegalStateException("No @Subject annotation present on method: " + context.getExecutableMethod());
}
Message message = builder.build();
ReactivePublisher reactivePublisher = publisherState.getReactivePublisher();
InterceptedMethod interceptedMethod = InterceptedMethod.of(context);
try {
boolean rpc = !interceptedMethod.returnTypeValue().isVoid();
Mono<?> reactive;
if (rpc) {
reactive = Mono.from(reactivePublisher.publishAndReply(message)).flatMap(response -> {
Object deserialized = deserialize(response, publisherState.getDataType(), publisherState.getDataType());
if (deserialized == null) {
return Mono.empty();
} else {
return Mono.just(deserialized);
}
});
if (interceptedMethod.resultType() == InterceptedMethod.ResultType.SYNCHRONOUS) {
if (LOG.isDebugEnabled()) {
LOG.debug("Publish is an RPC call. Blocking until a response is received.", context);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Publish is an RPC call. Publisher will complete when a response is received.", context);
}
reactive = reactive.subscribeOn(scheduler);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Sending the message.", context);
}
reactive = Mono.from(reactivePublisher.publish(message)).onErrorMap(throwable -> new NatsClientException(String.format("Failed to publish a message with subject: [%s]", subject), throwable, Collections.singletonList(message)));
}
switch(interceptedMethod.resultType()) {
case PUBLISHER:
return interceptedMethod.handleResult(reactive);
case COMPLETION_STAGE:
CompletableFuture<Object> future = new CompletableFuture<>();
reactive.subscribe(new Subscriber<Object>() {
Object value = null;
@Override
public void onSubscribe(Subscription s) {
s.request(1);
}
@Override
public void onNext(Object o) {
value = o;
}
@Override
public void onError(Throwable t) {
future.completeExceptionally(t);
}
@Override
public void onComplete() {
future.complete(value);
}
});
return interceptedMethod.handleResult(future);
case SYNCHRONOUS:
return interceptedMethod.handleResult(reactive.block());
default:
return interceptedMethod.unsupported();
}
} catch (Exception e) {
return interceptedMethod.handleException(e);
}
} else {
return context.proceed();
}
}
Aggregations