Search in sources :

Example 36 with Callback

use of org.apache.kafka.clients.producer.Callback in project brave by openzipkin.

the class TracingCallbackTest method onCompletion_shouldKeepContext_whenNotSampled.

@Test
public void onCompletion_shouldKeepContext_whenNotSampled() {
    Span span = tracing.tracer().nextSpan(TraceContextOrSamplingFlags.NOT_SAMPLED);
    Callback delegate = (metadata, exception) -> assertThat(tracing.tracer().currentSpan()).isEqualTo(span);
    Callback tracingCallback = TracingCallback.create(delegate, span, tracing.currentTraceContext());
    tracingCallback.onCompletion(null, null);
}
Also used : TraceContextOrSamplingFlags(brave.propagation.TraceContextOrSamplingFlags) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) Span(brave.Span) Test(org.junit.Test) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) Callback(org.apache.kafka.clients.producer.Callback) Mockito.mock(org.mockito.Mockito.mock) Mockito.verify(org.mockito.Mockito.verify) Callback(org.apache.kafka.clients.producer.Callback) Span(brave.Span) Test(org.junit.Test)

Example 37 with Callback

use of org.apache.kafka.clients.producer.Callback in project brave by openzipkin.

the class TracingCallbackTest method on_completion_should_finish_span.

@Test
public void on_completion_should_finish_span() {
    Span span = tracing.tracer().nextSpan().start();
    Callback tracingCallback = TracingCallback.create(null, span, currentTraceContext);
    tracingCallback.onCompletion(createRecordMetadata(), null);
    assertThat(spans.get(0).finishTimestamp()).isNotZero();
}
Also used : Callback(org.apache.kafka.clients.producer.Callback) Span(brave.Span) Test(org.junit.Test)

Example 38 with Callback

use of org.apache.kafka.clients.producer.Callback in project hono by eclipse.

the class KafkaBasedCommandSenderTest method sendCommandAndReceiveResponse.

private void sendCommandAndReceiveResponse(final VertxTestContext ctx, final String correlationId, final Integer responseStatus, final String responsePayload, final boolean expectSuccess, final int expectedStatusCode) {
    final Context context = vertx.getOrCreateContext();
    final Promise<Void> onProducerRecordSentPromise = Promise.promise();
    mockProducer = new MockProducer<>(true, new StringSerializer(), new BufferSerializer()) {

        @Override
        public synchronized java.util.concurrent.Future<RecordMetadata> send(final ProducerRecord<String, Buffer> record, final Callback callback) {
            return super.send(record, (metadata, exception) -> {
                callback.onCompletion(metadata, exception);
                context.runOnContext(v -> {
                    // decouple from current execution in order to run after the "send" result handler
                    onProducerRecordSentPromise.complete();
                });
            });
        }
    };
    final var producerFactory = CachingKafkaProducerFactory.testFactory(vertx, (n, c) -> KafkaClientUnitTestHelper.newKafkaProducer(mockProducer));
    commandSender = new KafkaBasedCommandSender(vertx, consumerConfig, producerFactory, producerConfig, NoopTracerFactory.create());
    final Map<String, Object> headerProperties = new HashMap<>();
    headerProperties.put("appKey", "appValue");
    final String command = "setVolume";
    final ConsumerRecord<String, Buffer> commandResponseRecord = commandResponseRecord(tenantId, deviceId, correlationId, responseStatus, Buffer.buffer(responsePayload));
    final String responseTopic = new HonoTopic(HonoTopic.Type.COMMAND_RESPONSE, tenantId).toString();
    final TopicPartition responseTopicPartition = new TopicPartition(responseTopic, 0);
    mockConsumer.setRebalancePartitionAssignmentAfterSubscribe(List.of(responseTopicPartition));
    mockConsumer.updatePartitions(responseTopicPartition, KafkaMockConsumer.DEFAULT_NODE);
    mockConsumer.updateBeginningOffsets(Map.of(responseTopicPartition, 0L));
    mockConsumer.updateEndOffsets(Map.of(responseTopicPartition, 0L));
    onProducerRecordSentPromise.future().onComplete(ar -> {
        LOG.debug("producer record sent, add command response record to mockConsumer");
        // Send a command response with the same correlation id as that of the command
        mockConsumer.addRecord(commandResponseRecord);
    });
    // This correlation id is used for both command and its response.
    commandSender.setCorrelationIdSupplier(() -> correlationId);
    commandSender.setKafkaConsumerSupplier(() -> mockConsumer);
    context.runOnContext(v -> {
        // Send a command to the device
        commandSender.sendCommand(tenantId, deviceId, command, "text/plain", Buffer.buffer("test"), headerProperties).onComplete(ar -> {
            ctx.verify(() -> {
                if (expectSuccess) {
                    // assert that send operation succeeded
                    assertThat(ar.succeeded()).isTrue();
                    // Verify the command response that has been received
                    final DownstreamMessage<KafkaMessageContext> response = ar.result();
                    assertThat(response.getDeviceId()).isEqualTo(deviceId);
                    assertThat(response.getStatus()).isEqualTo(responseStatus);
                    assertThat(response.getPayload().toString()).isEqualTo(responsePayload);
                } else {
                    // assert that send operation failed
                    assertThat(ar.succeeded()).isFalse();
                    assertThat(ar.cause()).isInstanceOf(ServiceInvocationException.class);
                    assertThat(((ServiceInvocationException) ar.cause()).getErrorCode()).isEqualTo(expectedStatusCode);
                    assertThat(ar.cause().getMessage()).isEqualTo(responsePayload);
                }
            });
            ctx.completeNow();
            mockConsumer.close();
            commandSender.stop();
        });
    });
}
Also used : HttpURLConnection(java.net.HttpURLConnection) BeforeEach(org.junit.jupiter.api.BeforeEach) MessagingKafkaConsumerConfigProperties(org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties) DownstreamMessage(org.eclipse.hono.application.client.DownstreamMessage) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) LoggerFactory(org.slf4j.LoggerFactory) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) Context(io.vertx.core.Context) Timeout(io.vertx.junit5.Timeout) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TracingMockSupport(org.eclipse.hono.test.TracingMockSupport) JsonObject(io.vertx.core.json.JsonObject) TimestampType(org.apache.kafka.common.record.TimestampType) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaMockConsumer(org.eclipse.hono.kafka.test.KafkaMockConsumer) CachingKafkaProducerFactory(org.eclipse.hono.client.kafka.producer.CachingKafkaProducerFactory) UUID(java.util.UUID) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) MessageHelper(org.eclipse.hono.util.MessageHelper) VertxExtension(io.vertx.junit5.VertxExtension) Test(org.junit.jupiter.api.Test) List(java.util.List) Buffer(io.vertx.core.buffer.Buffer) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Span(io.opentracing.Span) Callback(org.apache.kafka.clients.producer.Callback) VertxTestContext(io.vertx.junit5.VertxTestContext) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) KafkaClientUnitTestHelper(org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper) Logger(org.slf4j.Logger) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) Promise(io.vertx.core.Promise) Vertx(io.vertx.core.Vertx) Truth.assertThat(com.google.common.truth.Truth.assertThat) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) AfterEach(org.junit.jupiter.api.AfterEach) SendMessageTimeoutException(org.eclipse.hono.client.SendMessageTimeoutException) NoopSpan(io.opentracing.noop.NoopSpan) MockProducer(org.apache.kafka.clients.producer.MockProducer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) Context(io.vertx.core.Context) VertxTestContext(io.vertx.junit5.VertxTestContext) Buffer(io.vertx.core.buffer.Buffer) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) Callback(org.apache.kafka.clients.producer.Callback) TopicPartition(org.apache.kafka.common.TopicPartition) JsonObject(io.vertx.core.json.JsonObject)

Example 39 with Callback

use of org.apache.kafka.clients.producer.Callback in project open-kilda by telstra.

the class KafkaProducerServiceTest method errorReporting.

@Test
public void errorReporting() throws Exception {
    final ExecutionException error = new ExecutionException("Emulate kafka send error", new IOException());
    Future promise = mock(Future.class);
    expect(promise.get()).andThrow(error).anyTimes();
    replay(promise);
    expect(kafkaProducer.send(anyObject(), anyObject(Callback.class))).andAnswer(new IAnswer<Future<RecordMetadata>>() {

        @Override
        public Future<RecordMetadata> answer() {
            Callback callback = (Callback) getCurrentArguments()[1];
            callback.onCompletion(null, error);
            return promise;
        }
    });
    replay(kafkaProducer);
    subject.sendMessageAndTrack(TOPIC, makePayload());
    verify(kafkaProducer);
// This test does not do any assertions, because the only action is log message with error
// you can locate this message in test's output.
}
Also used : Callback(org.apache.kafka.clients.producer.Callback) Future(java.util.concurrent.Future) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 40 with Callback

use of org.apache.kafka.clients.producer.Callback in project kafka by apache.

the class RecordCollectorTest method shouldRetryWhenTimeoutExceptionOccursOnSend.

@SuppressWarnings("unchecked")
@Test
public void shouldRetryWhenTimeoutExceptionOccursOnSend() throws Exception {
    final AtomicInteger attempt = new AtomicInteger(0);
    RecordCollectorImpl collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {

        @Override
        public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
            if (attempt.getAndIncrement() == 0) {
                throw new TimeoutException();
            }
            return super.send(record, callback);
        }
    }, "test");
    collector.send("topic1", "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
    final Long offset = collector.offsets().get(new TopicPartition("topic1", 0));
    assertEquals(Long.valueOf(0L), offset);
}
Also used : MockProducer(org.apache.kafka.clients.producer.MockProducer) Callback(org.apache.kafka.clients.producer.Callback) DefaultPartitioner(org.apache.kafka.clients.producer.internals.DefaultPartitioner) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Future(java.util.concurrent.Future) TimeoutException(org.apache.kafka.common.errors.TimeoutException) Test(org.junit.Test)

Aggregations

Callback (org.apache.kafka.clients.producer.Callback)81 Test (org.junit.Test)47 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)39 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)37 KafkaException (org.apache.kafka.common.KafkaException)21 Future (java.util.concurrent.Future)18 TimeoutException (org.apache.kafka.common.errors.TimeoutException)18 ExecutionException (java.util.concurrent.ExecutionException)15 ArrayList (java.util.ArrayList)14 List (java.util.List)13 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)13 MockProducer (org.apache.kafka.clients.producer.MockProducer)13 HashMap (java.util.HashMap)12 Properties (java.util.Properties)12 DefaultPartitioner (org.apache.kafka.clients.producer.internals.DefaultPartitioner)12 TopicPartition (org.apache.kafka.common.TopicPartition)12 Schema (org.apache.kafka.connect.data.Schema)12 Struct (org.apache.kafka.connect.data.Struct)12 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)11 StreamsException (org.apache.kafka.streams.errors.StreamsException)11