Search in sources :

Example 16 with MockProducer

use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.

the class RecordCollectorTest method shouldNotAbortTxnOnEOSCloseDirtyIfNothingSent.

@Test
public void shouldNotAbortTxnOnEOSCloseDirtyIfNothingSent() {
    final AtomicBoolean functionCalled = new AtomicBoolean(false);
    final RecordCollector collector = new RecordCollectorImpl(logContext, taskId, new StreamsProducer(eosConfig, "-StreamThread-1", new MockClientSupplier() {

        @Override
        public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) {
            return new MockProducer<byte[], byte[]>(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {

                @Override
                public void abortTransaction() {
                    functionCalled.set(true);
                }
            };
        }
    }, taskId, processId, logContext, Time.SYSTEM), productionExceptionHandler, streamsMetrics);
    collector.closeDirty();
    assertFalse(functionCalled.get());
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) MockProducer(org.apache.kafka.clients.producer.MockProducer) DefaultPartitioner(org.apache.kafka.clients.producer.internals.DefaultPartitioner) MockClientSupplier(org.apache.kafka.test.MockClientSupplier) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) Map(java.util.Map) Test(org.junit.Test)

Example 17 with MockProducer

use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.

the class RecordCollectorTest method shouldThrowIfTopicIsUnknownOnSendWithPartitioner.

@Test
public void shouldThrowIfTopicIsUnknownOnSendWithPartitioner() {
    final RecordCollector collector = new RecordCollectorImpl(logContext, taskId, new StreamsProducer(config, processId + "-StreamThread-1", new MockClientSupplier() {

        @Override
        public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) {
            return new MockProducer<byte[], byte[]>(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {

                @Override
                public List<PartitionInfo> partitionsFor(final String topic) {
                    return Collections.emptyList();
                }
            };
        }
    }, null, null, logContext, Time.SYSTEM), productionExceptionHandler, streamsMetrics);
    collector.initialize();
    final StreamsException thrown = assertThrows(StreamsException.class, () -> collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner));
    assertThat(thrown.getMessage(), equalTo("Could not get partition information for topic topic for task 0_0." + " This can happen if the topic does not exist."));
}
Also used : MockProducer(org.apache.kafka.clients.producer.MockProducer) DefaultPartitioner(org.apache.kafka.clients.producer.internals.DefaultPartitioner) MockClientSupplier(org.apache.kafka.test.MockClientSupplier) StreamsException(org.apache.kafka.streams.errors.StreamsException) PartitionInfo(org.apache.kafka.common.PartitionInfo) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) Map(java.util.Map) Test(org.junit.Test)

Example 18 with MockProducer

use of org.apache.kafka.clients.producer.MockProducer in project hono by eclipse.

the class KafkaBasedCommandSenderTest method sendCommandAndReceiveResponse.

private void sendCommandAndReceiveResponse(final VertxTestContext ctx, final String correlationId, final Integer responseStatus, final String responsePayload, final boolean expectSuccess, final int expectedStatusCode) {
    final Context context = vertx.getOrCreateContext();
    final Promise<Void> onProducerRecordSentPromise = Promise.promise();
    mockProducer = new MockProducer<>(true, new StringSerializer(), new BufferSerializer()) {

        @Override
        public synchronized java.util.concurrent.Future<RecordMetadata> send(final ProducerRecord<String, Buffer> record, final Callback callback) {
            return super.send(record, (metadata, exception) -> {
                callback.onCompletion(metadata, exception);
                context.runOnContext(v -> {
                    // decouple from current execution in order to run after the "send" result handler
                    onProducerRecordSentPromise.complete();
                });
            });
        }
    };
    final var producerFactory = CachingKafkaProducerFactory.testFactory(vertx, (n, c) -> KafkaClientUnitTestHelper.newKafkaProducer(mockProducer));
    commandSender = new KafkaBasedCommandSender(vertx, consumerConfig, producerFactory, producerConfig, NoopTracerFactory.create());
    final Map<String, Object> headerProperties = new HashMap<>();
    headerProperties.put("appKey", "appValue");
    final String command = "setVolume";
    final ConsumerRecord<String, Buffer> commandResponseRecord = commandResponseRecord(tenantId, deviceId, correlationId, responseStatus, Buffer.buffer(responsePayload));
    final String responseTopic = new HonoTopic(HonoTopic.Type.COMMAND_RESPONSE, tenantId).toString();
    final TopicPartition responseTopicPartition = new TopicPartition(responseTopic, 0);
    mockConsumer.setRebalancePartitionAssignmentAfterSubscribe(List.of(responseTopicPartition));
    mockConsumer.updatePartitions(responseTopicPartition, KafkaMockConsumer.DEFAULT_NODE);
    mockConsumer.updateBeginningOffsets(Map.of(responseTopicPartition, 0L));
    mockConsumer.updateEndOffsets(Map.of(responseTopicPartition, 0L));
    onProducerRecordSentPromise.future().onComplete(ar -> {
        LOG.debug("producer record sent, add command response record to mockConsumer");
        // Send a command response with the same correlation id as that of the command
        mockConsumer.addRecord(commandResponseRecord);
    });
    // This correlation id is used for both command and its response.
    commandSender.setCorrelationIdSupplier(() -> correlationId);
    commandSender.setKafkaConsumerSupplier(() -> mockConsumer);
    context.runOnContext(v -> {
        // Send a command to the device
        commandSender.sendCommand(tenantId, deviceId, command, "text/plain", Buffer.buffer("test"), headerProperties).onComplete(ar -> {
            ctx.verify(() -> {
                if (expectSuccess) {
                    // assert that send operation succeeded
                    assertThat(ar.succeeded()).isTrue();
                    // Verify the command response that has been received
                    final DownstreamMessage<KafkaMessageContext> response = ar.result();
                    assertThat(response.getDeviceId()).isEqualTo(deviceId);
                    assertThat(response.getStatus()).isEqualTo(responseStatus);
                    assertThat(response.getPayload().toString()).isEqualTo(responsePayload);
                } else {
                    // assert that send operation failed
                    assertThat(ar.succeeded()).isFalse();
                    assertThat(ar.cause()).isInstanceOf(ServiceInvocationException.class);
                    assertThat(((ServiceInvocationException) ar.cause()).getErrorCode()).isEqualTo(expectedStatusCode);
                    assertThat(ar.cause().getMessage()).isEqualTo(responsePayload);
                }
            });
            ctx.completeNow();
            mockConsumer.close();
            commandSender.stop();
        });
    });
}
Also used : HttpURLConnection(java.net.HttpURLConnection) BeforeEach(org.junit.jupiter.api.BeforeEach) MessagingKafkaConsumerConfigProperties(org.eclipse.hono.client.kafka.consumer.MessagingKafkaConsumerConfigProperties) DownstreamMessage(org.eclipse.hono.application.client.DownstreamMessage) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) LoggerFactory(org.slf4j.LoggerFactory) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) Context(io.vertx.core.Context) Timeout(io.vertx.junit5.Timeout) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TracingMockSupport(org.eclipse.hono.test.TracingMockSupport) JsonObject(io.vertx.core.json.JsonObject) TimestampType(org.apache.kafka.common.record.TimestampType) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaMockConsumer(org.eclipse.hono.kafka.test.KafkaMockConsumer) CachingKafkaProducerFactory(org.eclipse.hono.client.kafka.producer.CachingKafkaProducerFactory) UUID(java.util.UUID) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) MessageHelper(org.eclipse.hono.util.MessageHelper) VertxExtension(io.vertx.junit5.VertxExtension) Test(org.junit.jupiter.api.Test) List(java.util.List) Buffer(io.vertx.core.buffer.Buffer) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Span(io.opentracing.Span) Callback(org.apache.kafka.clients.producer.Callback) VertxTestContext(io.vertx.junit5.VertxTestContext) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) KafkaClientUnitTestHelper(org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper) Logger(org.slf4j.Logger) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) Promise(io.vertx.core.Promise) Vertx(io.vertx.core.Vertx) Truth.assertThat(com.google.common.truth.Truth.assertThat) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) AfterEach(org.junit.jupiter.api.AfterEach) SendMessageTimeoutException(org.eclipse.hono.client.SendMessageTimeoutException) NoopSpan(io.opentracing.noop.NoopSpan) MockProducer(org.apache.kafka.clients.producer.MockProducer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) BufferSerializer(io.vertx.kafka.client.serialization.BufferSerializer) HashMap(java.util.HashMap) ServiceInvocationException(org.eclipse.hono.client.ServiceInvocationException) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KafkaMessageContext(org.eclipse.hono.application.client.kafka.KafkaMessageContext) Context(io.vertx.core.Context) VertxTestContext(io.vertx.junit5.VertxTestContext) Buffer(io.vertx.core.buffer.Buffer) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) Callback(org.apache.kafka.clients.producer.Callback) TopicPartition(org.apache.kafka.common.TopicPartition) JsonObject(io.vertx.core.json.JsonObject)

Example 19 with MockProducer

use of org.apache.kafka.clients.producer.MockProducer in project hono by eclipse.

the class KafkaBasedEventSenderTest method testSendEventCreatesCorrectRecord.

/**
 * Verifies that the Kafka record is created as expected.
 *
 * @param ctx The vert.x test context.
 */
@Test
public void testSendEventCreatesCorrectRecord(final VertxTestContext ctx) {
    // GIVEN a sender
    final String contentType = "text/plain";
    final String payload = "the-payload";
    final Map<String, Object> properties = Map.of("foo", "bar", MessageHelper.SYS_HEADER_PROPERTY_TTL, 5);
    final var span = TracingMockSupport.mockSpan();
    final var tracer = TracingMockSupport.mockTracer(span);
    final var mockProducer = KafkaClientUnitTestHelper.newMockProducer(true);
    final var factory = newProducerFactory(mockProducer);
    final var sender = new KafkaBasedEventSender(vertxMock, factory, kafkaProducerConfig, true, tracer);
    // WHEN sending a message
    sender.sendEvent(tenant, device, contentType, Buffer.buffer(payload), properties, null).onComplete(ctx.succeeding(t -> {
        ctx.verify(() -> {
            // THEN the producer record is created from the given values...
            final var producerRecord = mockProducer.history().get(0);
            assertThat(producerRecord.key()).isEqualTo(device.getDeviceId());
            assertThat(producerRecord.topic()).isEqualTo(new HonoTopic(HonoTopic.Type.EVENT, tenant.getTenantId()).toString());
            assertThat(producerRecord.value().toString()).isEqualTo(payload);
            KafkaClientUnitTestHelper.assertUniqueHeaderWithExpectedValue(producerRecord.headers(), "foo", "bar");
            KafkaClientUnitTestHelper.assertUniqueHeaderWithExpectedValue(producerRecord.headers(), MessageHelper.SYS_HEADER_PROPERTY_TTL, 5000L);
            // ...AND contains the standard headers
            KafkaClientUnitTestHelper.assertStandardHeaders(producerRecord, device.getDeviceId(), contentType, QoS.AT_LEAST_ONCE.ordinal());
            verify(span).finish();
        });
        ctx.completeNow();
    }));
}
Also used : Assertions.assertThrows(org.junit.jupiter.api.Assertions.assertThrows) VertxTestContext(io.vertx.junit5.VertxTestContext) BeforeEach(org.junit.jupiter.api.BeforeEach) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) EventBus(io.vertx.core.eventbus.EventBus) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Map(java.util.Map) TracingMockSupport(org.eclipse.hono.test.TracingMockSupport) KafkaClientUnitTestHelper(org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) CachingKafkaProducerFactory(org.eclipse.hono.client.kafka.producer.CachingKafkaProducerFactory) Vertx(io.vertx.core.Vertx) RegistrationAssertion(org.eclipse.hono.util.RegistrationAssertion) Mockito.when(org.mockito.Mockito.when) Truth.assertThat(com.google.common.truth.Truth.assertThat) MessageHelper(org.eclipse.hono.util.MessageHelper) VertxExtension(io.vertx.junit5.VertxExtension) Mockito.verify(org.mockito.Mockito.verify) TenantObject(org.eclipse.hono.util.TenantObject) Test(org.junit.jupiter.api.Test) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) Buffer(io.vertx.core.buffer.Buffer) QoS(org.eclipse.hono.util.QoS) MockProducer(org.apache.kafka.clients.producer.MockProducer) Mockito.mock(org.mockito.Mockito.mock) TenantObject(org.eclipse.hono.util.TenantObject) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) Test(org.junit.jupiter.api.Test)

Example 20 with MockProducer

use of org.apache.kafka.clients.producer.MockProducer in project hono by eclipse.

the class KafkaBasedTelemetrySenderTest method testSendTelemetryCreatesCorrectRecord.

/**
 * Verifies that the Kafka record is created as expected when sending telemetry data.
 *
 * @param qos The quality of service used for sending the message.
 * @param expectedTtl The ttl expected in the message.
 * @param ctx The vert.x test context.
 */
@ParameterizedTest
@CsvSource(value = { "AT_MOST_ONCE,10000", "AT_LEAST_ONCE,60000" })
public void testSendTelemetryCreatesCorrectRecord(final QoS qos, final long expectedTtl, final VertxTestContext ctx) {
    // GIVEN a telemetry sender
    final String payload = "the-payload";
    final String contentType = "text/plain";
    final Map<String, Object> properties = Map.of("foo", "bar");
    final var spanFinished = ctx.checkpoint();
    final var messageHasHeaders = ctx.checkpoint();
    final var span = TracingMockSupport.mockSpan();
    doAnswer(invocation -> {
        spanFinished.flag();
        return null;
    }).when(span).finish();
    final var tracer = TracingMockSupport.mockTracer(span);
    final var mockProducer = KafkaClientUnitTestHelper.newMockProducer(true);
    final var factory = CachingKafkaProducerFactory.testFactory(vertxMock, (n, c) -> KafkaClientUnitTestHelper.newKafkaProducer(mockProducer));
    final var sender = new KafkaBasedTelemetrySender(vertxMock, factory, kafkaProducerConfig, true, tracer);
    tenant.setResourceLimits(new ResourceLimits().setMaxTtlTelemetryQoS0(10L).setMaxTtlTelemetryQoS1(60L));
    // WHEN sending telemetry data
    sender.sendTelemetry(tenant, device, qos, contentType, Buffer.buffer(payload), properties, null).onComplete(ctx.succeeding(t -> {
        ctx.verify(() -> {
            // THEN the producer record is created from the given values...
            final var producerRecord = mockProducer.history().get(0);
            assertThat(producerRecord.key()).isEqualTo(device.getDeviceId());
            assertThat(producerRecord.topic()).isEqualTo(new HonoTopic(HonoTopic.Type.TELEMETRY, tenant.getTenantId()).toString());
            assertThat(producerRecord.value().toString()).isEqualTo(payload);
            KafkaClientUnitTestHelper.assertUniqueHeaderWithExpectedValue(producerRecord.headers(), "foo", "bar");
            KafkaClientUnitTestHelper.assertUniqueHeaderWithExpectedValue(producerRecord.headers(), MessageHelper.SYS_HEADER_PROPERTY_TTL, expectedTtl);
            // ...AND contains the standard headers
            KafkaClientUnitTestHelper.assertStandardHeaders(producerRecord, device.getDeviceId(), contentType, qos.ordinal());
        });
        messageHasHeaders.flag();
    }));
}
Also used : Assertions.assertThrows(org.junit.jupiter.api.Assertions.assertThrows) VertxTestContext(io.vertx.junit5.VertxTestContext) BeforeEach(org.junit.jupiter.api.BeforeEach) CsvSource(org.junit.jupiter.params.provider.CsvSource) MessagingKafkaProducerConfigProperties(org.eclipse.hono.client.kafka.producer.MessagingKafkaProducerConfigProperties) HashMap(java.util.HashMap) EventBus(io.vertx.core.eventbus.EventBus) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Mockito.doAnswer(org.mockito.Mockito.doAnswer) Map(java.util.Map) TracingMockSupport(org.eclipse.hono.test.TracingMockSupport) KafkaClientUnitTestHelper(org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper) ResourceLimits(org.eclipse.hono.util.ResourceLimits) Tracer(io.opentracing.Tracer) NoopTracerFactory(io.opentracing.noop.NoopTracerFactory) CachingKafkaProducerFactory(org.eclipse.hono.client.kafka.producer.CachingKafkaProducerFactory) Vertx(io.vertx.core.Vertx) RegistrationAssertion(org.eclipse.hono.util.RegistrationAssertion) Mockito.when(org.mockito.Mockito.when) Truth.assertThat(com.google.common.truth.Truth.assertThat) MessageHelper(org.eclipse.hono.util.MessageHelper) VertxExtension(io.vertx.junit5.VertxExtension) TenantObject(org.eclipse.hono.util.TenantObject) Test(org.junit.jupiter.api.Test) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Buffer(io.vertx.core.buffer.Buffer) QoS(org.eclipse.hono.util.QoS) MockProducer(org.apache.kafka.clients.producer.MockProducer) Mockito.mock(org.mockito.Mockito.mock) ResourceLimits(org.eclipse.hono.util.ResourceLimits) TenantObject(org.eclipse.hono.util.TenantObject) HonoTopic(org.eclipse.hono.client.kafka.HonoTopic) CsvSource(org.junit.jupiter.params.provider.CsvSource) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Aggregations

MockProducer (org.apache.kafka.clients.producer.MockProducer)32 Test (org.junit.Test)25 DefaultPartitioner (org.apache.kafka.clients.producer.internals.DefaultPartitioner)17 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)14 StreamsException (org.apache.kafka.streams.errors.StreamsException)14 Callback (org.apache.kafka.clients.producer.Callback)13 Future (java.util.concurrent.Future)12 KafkaException (org.apache.kafka.common.KafkaException)10 Map (java.util.Map)8 HashMap (java.util.HashMap)7 Truth.assertThat (com.google.common.truth.Truth.assertThat)6 Vertx (io.vertx.core.Vertx)5 Buffer (io.vertx.core.buffer.Buffer)5 VertxExtension (io.vertx.junit5.VertxExtension)5 VertxTestContext (io.vertx.junit5.VertxTestContext)5 TimeoutException (org.apache.kafka.common.errors.TimeoutException)5 KafkaClientUnitTestHelper (org.eclipse.hono.kafka.test.KafkaClientUnitTestHelper)5 BeforeEach (org.junit.jupiter.api.BeforeEach)5 Test (org.junit.jupiter.api.Test)5 ExtendWith (org.junit.jupiter.api.extension.ExtendWith)5