Search in sources :

Example 1 with KafkaMessageContext

use of org.eclipse.hono.application.client.kafka.KafkaMessageContext in project hono by eclipse.

the class KafkaBasedCommandSenderTest method sendCommandAndReceiveResponse.

private void sendCommandAndReceiveResponse(final VertxTestContext ctx, final String correlationId, final Integer responseStatus, final String responsePayload, final boolean expectSuccess, final int expectedStatusCode) {
    final Context context = vertx.getOrCreateContext();
    final Promise<Void> onProducerRecordSentPromise = Promise.promise();
    mockProducer = new MockProducer<>(true, new StringSerializer(), new BufferSerializer()) {

        @Override
        public synchronized java.util.concurrent.Future<RecordMetadata> send(final ProducerRecord<String, Buffer> record, final Callback callback) {
            return super.send(record, (metadata, exception) -> {
                callback.onCompletion(metadata, exception);
                context.runOnContext(v -> {
                    // decouple from current execution in order to run after the "send" result handler
                    onProducerRecordSentPromise.complete();
                });
            });
        }
    };
    final var producerFactory = CachingKafkaProducerFactory.testFactory(vertx, (n, c) -> KafkaClientUnitTestHelper.newKafkaProducer(mockProducer));
    commandSender = new KafkaBasedCommandSender(vertx, consumerConfig, producerFactory, producerConfig, NoopTracerFactory.create());
    final Map<String, Object> headerProperties = new HashMap<>();
    headerProperties.put("appKey", "appValue");
    final String command = "setVolume";
    final ConsumerRecord<String, Buffer> commandResponseRecord = commandResponseRecord(tenantId, deviceId, correlationId, responseStatus, Buffer.buffer(responsePayload));
    final String responseTopic = new HonoTopic(HonoTopic.Type.COMMAND_RESPONSE, tenantId).toString();
    final TopicPartition responseTopicPartition = new TopicPartition(responseTopic, 0);
    mockConsumer.setRebalancePartitionAssignmentAfterSubscribe(List.of(responseTopicPartition));
    mockConsumer.updatePartitions(responseTopicPartition, KafkaMockConsumer.DEFAULT_NODE);
    mockConsumer.updateBeginningOffsets(Map.of(responseTopicPartition, 0L));
    mockConsumer.updateEndOffsets(Map.of(responseTopicPartition, 0L));
    onProducerRecordSentPromise.future().onComplete(ar -> {
        LOG.debug("producer record sent, add command response record to mockConsumer");
        // Send a command response with the same correlation id as that of the command
        mockConsumer.addRecord(commandResponseRecord);
    });
    // This correlation id is used for both command and its response.
    commandSender.setCorrelationIdSupplier(() -> correlationId);
    commandSender.setKafkaConsumerSupplier(() -> mockConsumer);
    context.runOnContext(v -> {
        // Send a command to the device
        commandSender.sendCommand(tenantId, deviceId, command, "text/plain", Buffer.buffer("test"), headerProperties).onComplete(ar -> {
            ctx.verify(() -> {
                if (expectSuccess) {
                    // assert that send operation succeeded
                    assertThat(ar.succeeded()).isTrue();
                    // Verify the command response that has been received
                    final DownstreamMessage<KafkaMessageContext> response = ar.result();
                    assertThat(response.getDeviceId()).isEqualTo(deviceId);
                    assertThat(response.getStatus()).isEqualTo(responseStatus);
                    assertThat(response.getPayload().toString()).isEqualTo(responsePayload);
                } else {
                    // assert that send operation failed
                    assertThat(ar.succeeded()).isFalse();
                    assertThat(ar.cause()).isInstanceOf(ServiceInvocationException.class);
                    assertThat(((ServiceInvocationException) ar.cause()).getErrorCode()).isEqualTo(expectedStatusCode);
                    assertThat(ar.cause().getMessage()).isEqualTo(responsePayload);
                }
            });
            ctx.completeNow();
            mockConsumer.close();
            commandSender.stop();
        });
    });
}
Also used : HttpURLConnection(java.net.HttpURLConnection) BeforeEach(org.junit.jupiter.api.BeforeEach) MessagingKafkaConsumerConfigProperties(org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties) DownstreamMessage(org.eclipse.hono.application.client.DownstreamMessage) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) LoggerFactory(org.slf4j.LoggerFactory) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) Context(io.vertx.core.Context) Timeout(io.vertx.junit5.Timeout) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TracingMockSupport(org.eclipse.hono.test.TracingMockSupport) JsonObject(io.vertx.core.json.JsonObject) TimestampType(org.apache.kafka.common.record.TimestampType) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaMockConsumer(org.eclipse.hono.kafka.test.KafkaMockConsumer) CachingKafkaProducerFactory(org.eclipse.hono.client.kafka.producer.CachingKafkaProducerFactory) UUID(java.util.UUID) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) MessageHelper(org.eclipse.hono.util.MessageHelper) VertxExtension(io.vertx.junit5.VertxExtension) Test(org.junit.jupiter.api.Test) List(java.util.List) Buffer(io.vertx.core.buffer.Buffer) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Span(io.opentracing.Span) Callback(org.apache.kafka.clients.producer.Callback) VertxTestContext(io.vertx.junit5.VertxTestContext) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) KafkaClientUnitTestHelper(org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper) Logger(org.slf4j.Logger) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) Promise(io.vertx.core.Promise) Vertx(io.vertx.core.Vertx) Truth.assertThat(com.google.common.truth.Truth.assertThat) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) AfterEach(org.junit.jupiter.api.AfterEach) SendMessageTimeoutException(org.eclipse.hono.client.SendMessageTimeoutException) NoopSpan(io.opentracing.noop.NoopSpan) MockProducer(org.apache.kafka.clients.producer.MockProducer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) Context(io.vertx.core.Context) VertxTestContext(io.vertx.junit5.VertxTestContext) Buffer(io.vertx.core.buffer.Buffer) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) Callback(org.apache.kafka.clients.producer.Callback) TopicPartition(org.apache.kafka.common.TopicPartition) JsonObject(io.vertx.core.json.JsonObject)

Example 2 with KafkaMessageContext

use of org.eclipse.hono.application.client.kafka.KafkaMessageContext in project hono by eclipse.

the class KafkaBasedCommandSender method sendCommand.

/**
 * {@inheritDoc}
 *
 * <p>
 * The replyId is not used in the Kafka based implementation. It can be set to {@code null}.
 * If set it will be ignored.
 * <p>
 * If the timeout duration is {@code null} then the default timeout value of
 * {@value DEFAULT_COMMAND_TIMEOUT_IN_MS} ms is used.
 */
@Override
public Future<DownstreamMessage<KafkaMessageContext>> sendCommand(final String tenantId, final String deviceId, final String command, final String contentType, final Buffer data, final String replyId, final Map<String, Object> properties, final Duration timeout, final SpanContext context) {
    Objects.requireNonNull(tenantId);
    Objects.requireNonNull(deviceId);
    Objects.requireNonNull(command);
    final long timeoutInMs = Optional.ofNullable(timeout).map(t -> {
        if (t.isNegative()) {
            throw new IllegalArgumentException("command timeout duration must be >= 0");
        }
        return t.toMillis();
    }).orElse(DEFAULT_COMMAND_TIMEOUT_IN_MS);
    final String correlationId = correlationIdSupplier.get();
    final Span span = TracingHelper.buildChildSpan(tracer, context, "send command and receive response", getClass().getSimpleName()).withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CLIENT).withTag(TracingHelper.TAG_TENANT_ID, tenantId).withTag(TracingHelper.TAG_DEVICE_ID, deviceId).withTag(TracingHelper.TAG_CORRELATION_ID, correlationId).start();
    final ExpiringCommandPromise expiringCommandPromise = new ExpiringCommandPromise(correlationId, timeoutInMs, // Remove the corresponding pending response entry if times out
    x -> removePendingCommandResponse(tenantId, correlationId), span);
    subscribeForCommandResponse(tenantId, span).compose(ok -> {
        // Store the correlation id and the expiring command promise
        pendingCommandResponses.computeIfAbsent(tenantId, k -> new ConcurrentHashMap<>()).put(correlationId, expiringCommandPromise);
        return sendCommand(tenantId, deviceId, command, contentType, data, correlationId, properties, true, "send command", span.context()).onSuccess(sent -> {
            LOGGER.debug("sent command [correlation-id: {}], waiting for response", correlationId);
            span.log("sent command, waiting for response");
        }).onFailure(error -> {
            LOGGER.debug("error sending command", error);
            // To ensure that the span is not already finished.
            if (!expiringCommandPromise.future().isComplete()) {
                TracingHelper.logError(span, "error sending command", error);
            }
            removePendingCommandResponse(tenantId, correlationId);
            expiringCommandPromise.tryCompleteAndCancelTimer(Future.failedFuture(error));
        });
    });
    return expiringCommandPromise.future().onComplete(o -> span.finish());
}
Also used : HttpURLConnection(java.net.HttpURLConnection) MessagingKafkaConsumerConfigProperties(org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties) DownstreamMessage(org.eclipse.hono.application.client.DownstreamMessage) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) HonoKafkaConsumer(org.eclipse.hono.client.kafka.consumer.HonoKafkaConsumer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) Supplier(java.util.function.Supplier) Tags(io.opentracing.tag.Tags) CompositeFuture(io.vertx.core.CompositeFuture) StatusCodeMapper(org.eclipse.hono.client.StatusCodeMapper) Duration(java.time.Duration) Map(java.util.Map) TracingHelper(org.eclipse.hono.tracing.TracingHelper) AsyncResult(io.vertx.core.AsyncResult) AbstractKafkaBasedMessageSender(org.eclipse.hono.client.kafka.producer.AbstractKafkaBasedMessageSender) Consumer(org.apache.kafka.clients.consumer.Consumer) Logger(org.slf4j.Logger) Tracer(io.opentracing.Tracer) Promise(io.vertx.core.Promise) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Vertx(io.vertx.core.Vertx) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) UUID(java.util.UUID) KafkaRecordHelper(org.eclipse.hono.client.kafka.KafkaRecordHelper) MessageHelper(org.eclipse.hono.util.MessageHelper) Collectors(java.util.stream.Collectors) Future(io.vertx.core.Future) StandardCharsets(java.nio.charset.StandardCharsets) SpanContext(io.opentracing.SpanContext) Objects(java.util.Objects) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) List(java.util.List) KafkaProducerFactory(org.eclipse.hono.client.kafka.producer.KafkaProducerFactory) Buffer(io.vertx.core.buffer.Buffer) KafkaConsumerRecord(io.vertx.kafka.client.consumer.KafkaConsumerRecord) CommandSender(org.eclipse.hono.application.client.CommandSender) Optional(java.util.Optional) SendMessageTimeoutException(org.eclipse.hono.client.SendMessageTimeoutException) Span(io.opentracing.Span) Handler(io.vertx.core.Handler) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Span(io.opentracing.Span)

Example 3 with KafkaMessageContext

use of org.eclipse.hono.application.client.kafka.KafkaMessageContext in project hono by eclipse.

the class KafkaApplicationClientImpl method createKafkaBasedDownstreamMessageConsumer.

private Future<MessageConsumer> createKafkaBasedDownstreamMessageConsumer(final String tenantId, final HonoTopic.Type type, final Handler<DownstreamMessage<KafkaMessageContext>> messageHandler) {
    Objects.requireNonNull(tenantId);
    Objects.requireNonNull(type);
    Objects.requireNonNull(messageHandler);
    final String topic = new HonoTopic(type, tenantId).toString();
    final Handler<KafkaConsumerRecord<String, Buffer>> recordHandler = record -> {
        messageHandler.handle(new KafkaDownstreamMessage(record));
    };
    final HonoKafkaConsumer consumer = new HonoKafkaConsumer(vertx, Set.of(topic), recordHandler, consumerConfig.getConsumerConfig(type.toString()));
    consumer.setPollTimeout(Duration.ofMillis(consumerConfig.getPollTimeout()));
    Optional.ofNullable(kafkaConsumerSupplier).ifPresent(consumer::setKafkaConsumerSupplier);
    return consumer.start().map(v -> (MessageConsumer) new MessageConsumer() {

        @Override
        public Future<Void> close() {
            return consumer.stop();
        }
    }).onSuccess(consumersToCloseOnStop::add);
}
Also used : MessagingKafkaConsumerConfigProperties(org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties) DownstreamMessage(org.eclipse.hono.application.client.DownstreamMessage) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) HonoKafkaConsumer(org.eclipse.hono.client.kafka.consumer.HonoKafkaConsumer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) Supplier(java.util.function.Supplier) CompositeFuture(io.vertx.core.CompositeFuture) KafkaApplicationClient(org.eclipse.hono.application.client.kafka.KafkaApplicationClient) Duration(java.time.Duration) LinkedList(java.util.LinkedList) Consumer(org.apache.kafka.clients.consumer.Consumer) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) Vertx(io.vertx.core.Vertx) Set(java.util.Set) Collectors(java.util.stream.Collectors) Future(io.vertx.core.Future) Objects(java.util.Objects) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) List(java.util.List) KafkaProducerFactory(org.eclipse.hono.client.kafka.producer.KafkaProducerFactory) Buffer(io.vertx.core.buffer.Buffer) KafkaConsumerRecord(io.vertx.kafka.client.consumer.KafkaConsumerRecord) Optional(java.util.Optional) MessageConsumer(org.eclipse.hono.application.client.MessageConsumer) Handler(io.vertx.core.Handler) MessageConsumer(org.eclipse.hono.application.client.MessageConsumer) HonoKafkaConsumer(org.eclipse.hono.client.kafka.consumer.HonoKafkaConsumer) KafkaConsumerRecord(io.vertx.kafka.client.consumer.KafkaConsumerRecord) CompositeFuture(io.vertx.core.CompositeFuture) Future(io.vertx.core.Future) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic)

Aggregations

Tracer (io.opentracing.Tracer)3 Vertx (io.vertx.core.Vertx)3 Buffer (io.vertx.core.buffer.Buffer)3 Duration (java.time.Duration)3 List (java.util.List)3 DownstreamMessage (org.eclipse.hono.application.client.DownstreamMessage)3 KafkaMessageContext (org.eclipse.hono.application.client.kafka.KafkaMessageContext)3 HonoTopic (org.eclipse.hono.client.kafka.HonoTopic)3 MessagingKafkaConsumerConfigProperties (org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties)3 Span (io.opentracing.Span)2 NoopTracerFactory (io.opentracing.noop.NoopTracerFactory)2 CompositeFuture (io.vertx.core.CompositeFuture)2 Future (io.vertx.core.Future)2 Handler (io.vertx.core.Handler)2 Promise (io.vertx.core.Promise)2 KafkaConsumerRecord (io.vertx.kafka.client.consumer.KafkaConsumerRecord)2 HttpURLConnection (java.net.HttpURLConnection)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 Objects (java.util.Objects)2