Search in sources :

Example 96 with Time

use of org.apache.kafka.common.utils.Time in project kafka by apache.

the class KafkaConsumerTest method testEnforceRebalanceTriggersRebalanceOnNextPoll.

@Test
public void testEnforceRebalanceTriggersRebalanceOnNextPoll() {
    Time time = new MockTime(1L);
    ConsumerMetadata metadata = createMetadata(subscription);
    MockClient client = new MockClient(time, metadata);
    KafkaConsumer<String, String> consumer = newConsumer(time, client, subscription, metadata, assignor, true, groupInstanceId);
    MockRebalanceListener countingRebalanceListener = new MockRebalanceListener();
    initMetadata(client, Utils.mkMap(Utils.mkEntry(topic, 1), Utils.mkEntry(topic2, 1), Utils.mkEntry(topic3, 1)));
    consumer.subscribe(Arrays.asList(topic, topic2), countingRebalanceListener);
    Node node = metadata.fetch().nodes().get(0);
    prepareRebalance(client, node, assignor, Arrays.asList(tp0, t2p0), null);
    // a first rebalance to get the assignment, we need two poll calls since we need two round trips to finish join / sync-group
    consumer.poll(Duration.ZERO);
    consumer.poll(Duration.ZERO);
    // onPartitionsRevoked is not invoked when first joining the group
    assertEquals(countingRebalanceListener.revokedCount, 0);
    assertEquals(countingRebalanceListener.assignedCount, 1);
    consumer.enforceRebalance();
    // the next poll should trigger a rebalance
    consumer.poll(Duration.ZERO);
    assertEquals(countingRebalanceListener.revokedCount, 1);
}
Also used : ConsumerMetadata(org.apache.kafka.clients.consumer.internals.ConsumerMetadata) MockRebalanceListener(org.apache.kafka.clients.consumer.internals.MockRebalanceListener) Node(org.apache.kafka.common.Node) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) Test(org.junit.jupiter.api.Test)

Example 97 with Time

use of org.apache.kafka.common.utils.Time in project kafka by apache.

the class KTableSuppressProcessorMetricsTest method shouldRecordMetricsWithBuiltInMetricsVersionLatest.

@Test
public void shouldRecordMetricsWithBuiltInMetricsVersionLatest() {
    final String storeName = "test-store";
    final StateStore buffer = new InMemoryTimeOrderedKeyValueBuffer.Builder<>(storeName, Serdes.String(), Serdes.Long()).withLoggingDisabled().build();
    final KTableImpl<String, ?, Long> mock = EasyMock.mock(KTableImpl.class);
    final Processor<String, Change<Long>, String, Change<Long>> processor = new KTableSuppressProcessorSupplier<>((SuppressedInternal<String>) Suppressed.<String>untilTimeLimit(Duration.ofDays(100), maxRecords(1)), storeName, mock).get();
    streamsConfig.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, StreamsConfig.METRICS_LATEST);
    final MockInternalNewProcessorContext<String, Change<Long>> context = new MockInternalNewProcessorContext<>(streamsConfig, TASK_ID, TestUtils.tempDirectory());
    final Time time = new SystemTime();
    context.setCurrentNode(new ProcessorNode("testNode"));
    context.setSystemTimeMs(time.milliseconds());
    buffer.init((StateStoreContext) context, buffer);
    processor.init(context);
    final long timestamp = 100L;
    context.setRecordMetadata("", 0, 0L);
    context.setTimestamp(timestamp);
    final String key = "longKey";
    final Change<Long> value = new Change<>(null, ARBITRARY_LONG);
    processor.process(new Record<>(key, value, timestamp));
    final MetricName evictionRateMetric = evictionRateMetricLatest;
    final MetricName evictionTotalMetric = evictionTotalMetricLatest;
    final MetricName bufferSizeAvgMetric = bufferSizeAvgMetricLatest;
    final MetricName bufferSizeMaxMetric = bufferSizeMaxMetricLatest;
    final MetricName bufferCountAvgMetric = bufferCountAvgMetricLatest;
    final MetricName bufferCountMaxMetric = bufferCountMaxMetricLatest;
    {
        final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
        verifyMetric(metrics, evictionRateMetric, is(0.0));
        verifyMetric(metrics, evictionTotalMetric, is(0.0));
        verifyMetric(metrics, bufferSizeAvgMetric, is(21.5));
        verifyMetric(metrics, bufferSizeMaxMetric, is(43.0));
        verifyMetric(metrics, bufferCountAvgMetric, is(0.5));
        verifyMetric(metrics, bufferCountMaxMetric, is(1.0));
    }
    context.setRecordMetadata("", 0, 1L);
    context.setTimestamp(timestamp + 1);
    processor.process(new Record<>("key", value, timestamp + 1));
    {
        final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
        verifyMetric(metrics, evictionRateMetric, greaterThan(0.0));
        verifyMetric(metrics, evictionTotalMetric, is(1.0));
        verifyMetric(metrics, bufferSizeAvgMetric, is(41.0));
        verifyMetric(metrics, bufferSizeMaxMetric, is(82.0));
        verifyMetric(metrics, bufferCountAvgMetric, is(1.0));
        verifyMetric(metrics, bufferCountMaxMetric, is(2.0));
    }
}
Also used : StateStore(org.apache.kafka.streams.processor.StateStore) Time(org.apache.kafka.common.utils.Time) SystemTime(org.apache.kafka.common.utils.SystemTime) Change(org.apache.kafka.streams.kstream.internals.Change) MetricName(org.apache.kafka.common.MetricName) ProcessorNode(org.apache.kafka.streams.processor.internals.ProcessorNode) MockInternalNewProcessorContext(org.apache.kafka.test.MockInternalNewProcessorContext) Metric(org.apache.kafka.common.Metric) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) Map(java.util.Map) SystemTime(org.apache.kafka.common.utils.SystemTime) Test(org.junit.Test)

Example 98 with Time

use of org.apache.kafka.common.utils.Time in project kafka by apache.

the class KafkaProducerTest method testTransactionalMethodThrowsWhenSenderClosed.

@Test
public void testTransactionalMethodThrowsWhenSenderClosed() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "this-is-a-transactional-id");
    Time time = new MockTime();
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, emptyMap());
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    metadata.updateWithCurrentRequestVersion(initialUpdateResponse, false, time.milliseconds());
    MockClient client = new MockClient(time, metadata);
    Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time);
    producer.close();
    assertThrows(IllegalStateException.class, producer::initTransactions);
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 99 with Time

use of org.apache.kafka.common.utils.Time in project kafka by apache.

the class KafkaProducerTest method testCallbackHandlesError.

@Test
public void testCallbackHandlesError() throws Exception {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    configs.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "1000");
    Time time = new MockTime();
    ProducerMetadata producerMetadata = newMetadata(0, Long.MAX_VALUE);
    MockClient client = new MockClient(time, producerMetadata);
    // Invalid topic name due to space
    String invalidTopicName = "topic abc";
    try (Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), producerMetadata, client, null, time)) {
        ProducerRecord<String, String> record = new ProducerRecord<>(invalidTopicName, "HelloKafka");
        // Here's the important piece of the test. Let's make sure that the RecordMetadata we get
        // is non-null and adheres to the onCompletion contract.
        Callback callBack = (recordMetadata, exception) -> {
            assertNotNull(exception);
            assertNotNull(recordMetadata);
            assertNotNull(recordMetadata.topic(), "Topic name should be valid even on send failure");
            assertEquals(invalidTopicName, recordMetadata.topic());
            assertNotNull(recordMetadata.partition(), "Partition should be valid even on send failure");
            assertFalse(recordMetadata.hasOffset());
            assertEquals(ProduceResponse.INVALID_OFFSET, recordMetadata.offset());
            assertFalse(recordMetadata.hasTimestamp());
            assertEquals(RecordBatch.NO_TIMESTAMP, recordMetadata.timestamp());
            assertEquals(-1, recordMetadata.serializedKeySize());
            assertEquals(-1, recordMetadata.serializedValueSize());
            assertEquals(-1, recordMetadata.partition());
        };
        producer.send(record, callBack);
    }
}
Also used : Selectable(org.apache.kafka.common.network.Selectable) MockTime(org.apache.kafka.common.utils.MockTime) Arrays(java.util.Arrays) ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) Sender(org.apache.kafka.clients.producer.internals.Sender) KafkaException(org.apache.kafka.common.KafkaException) Cluster(org.apache.kafka.common.Cluster) Future(java.util.concurrent.Future) RecordBatch(org.apache.kafka.common.record.RecordBatch) LogContext(org.apache.kafka.common.utils.LogContext) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockProducerInterceptor(org.apache.kafka.test.MockProducerInterceptor) CommonClientConfigs(org.apache.kafka.clients.CommonClientConfigs) Sensor(org.apache.kafka.common.metrics.Sensor) TestUtils(org.apache.kafka.test.TestUtils) InvalidTopicException(org.apache.kafka.common.errors.InvalidTopicException) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) PartitionInfo(org.apache.kafka.common.PartitionInfo) StandardCharsets(java.nio.charset.StandardCharsets) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) Metrics(org.apache.kafka.common.metrics.Metrics) Stream(java.util.stream.Stream) MockMetricsReporter(org.apache.kafka.test.MockMetricsReporter) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) Errors(org.apache.kafka.common.protocol.Errors) Node(org.apache.kafka.common.Node) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) ArgumentMatchers.notNull(org.mockito.ArgumentMatchers.notNull) NodeApiVersions(org.apache.kafka.clients.NodeApiVersions) Mockito.mock(org.mockito.Mockito.mock) Assertions.assertThrows(org.junit.jupiter.api.Assertions.assertThrows) Assertions.fail(org.junit.jupiter.api.Assertions.fail) Assertions.assertNotNull(org.junit.jupiter.api.Assertions.assertNotNull) Assertions.assertNull(org.junit.jupiter.api.Assertions.assertNull) RequestTestUtils(org.apache.kafka.common.requests.RequestTestUtils) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ArrayList(java.util.ArrayList) InterruptException(org.apache.kafka.common.errors.InterruptException) EndTxnResponseData(org.apache.kafka.common.message.EndTxnResponseData) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) FindCoordinatorRequest(org.apache.kafka.common.requests.FindCoordinatorRequest) ManagementFactory(java.lang.management.ManagementFactory) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) SslConfigs(org.apache.kafka.common.config.SslConfigs) ProduceResponse(org.apache.kafka.common.requests.ProduceResponse) ProducerInterceptors(org.apache.kafka.clients.producer.internals.ProducerInterceptors) ValueSource(org.junit.jupiter.params.provider.ValueSource) TxnOffsetCommitResponse(org.apache.kafka.common.requests.TxnOffsetCommitResponse) Properties(java.util.Properties) Mockito.times(org.mockito.Mockito.times) ExecutionException(java.util.concurrent.ExecutionException) Assertions.assertArrayEquals(org.junit.jupiter.api.Assertions.assertArrayEquals) AddOffsetsToTxnResponse(org.apache.kafka.common.requests.AddOffsetsToTxnResponse) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Serializer(org.apache.kafka.common.serialization.Serializer) Avg(org.apache.kafka.common.metrics.stats.Avg) TxnOffsetCommitRequestData(org.apache.kafka.common.message.TxnOffsetCommitRequestData) Exchanger(java.util.concurrent.Exchanger) FindCoordinatorResponse(org.apache.kafka.common.requests.FindCoordinatorResponse) JoinGroupRequest(org.apache.kafka.common.requests.JoinGroupRequest) Assertions.assertNotEquals(org.junit.jupiter.api.Assertions.assertNotEquals) Assertions.assertFalse(org.junit.jupiter.api.Assertions.assertFalse) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) KafkaClient(org.apache.kafka.clients.KafkaClient) Metric(org.apache.kafka.common.Metric) MetricName(org.apache.kafka.common.MetricName) TopicPartition(org.apache.kafka.common.TopicPartition) InitProducerIdResponseData(org.apache.kafka.common.message.InitProducerIdResponseData) Time(org.apache.kafka.common.utils.Time) EndTxnResponse(org.apache.kafka.common.requests.EndTxnResponse) Collection(java.util.Collection) ObjectName(javax.management.ObjectName) ClusterResourceListeners(org.apache.kafka.common.internals.ClusterResourceListeners) Collectors(java.util.stream.Collectors) Test(org.junit.jupiter.api.Test) List(java.util.List) InitProducerIdResponse(org.apache.kafka.common.requests.InitProducerIdResponse) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) RecordTooLargeException(org.apache.kafka.common.errors.RecordTooLargeException) Optional(java.util.Optional) IntStream(java.util.stream.IntStream) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) MBeanServer(javax.management.MBeanServer) Collections.singletonMap(java.util.Collections.singletonMap) ArgumentMatchers.anyInt(org.mockito.ArgumentMatchers.anyInt) ExecutorService(java.util.concurrent.ExecutorService) MockPartitioner(org.apache.kafka.test.MockPartitioner) Collections.emptyMap(java.util.Collections.emptyMap) TimeoutException(org.apache.kafka.common.errors.TimeoutException) MockClient(org.apache.kafka.clients.MockClient) TxnOffsetCommitRequest(org.apache.kafka.common.requests.TxnOffsetCommitRequest) Mockito.when(org.mockito.Mockito.when) ApiKeys(org.apache.kafka.common.protocol.ApiKeys) ConfigException(org.apache.kafka.common.config.ConfigException) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) MockSerializer(org.apache.kafka.test.MockSerializer) Collections(java.util.Collections) AddOffsetsToTxnResponseData(org.apache.kafka.common.message.AddOffsetsToTxnResponseData) ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 100 with Time

use of org.apache.kafka.common.utils.Time in project kafka by apache.

the class KafkaProducerTest method testAbortTransaction.

@Test
public void testAbortTransaction() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "some.id");
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    Time time = new MockTime(1);
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, singletonMap("topic", 1));
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    MockClient client = new MockClient(time, metadata);
    client.updateMetadata(initialUpdateResponse);
    client.prepareResponse(FindCoordinatorResponse.prepareResponse(Errors.NONE, "some.id", NODE));
    client.prepareResponse(initProducerIdResponse(1L, (short) 5, Errors.NONE));
    client.prepareResponse(endTxnResponse(Errors.NONE));
    try (Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time)) {
        producer.initTransactions();
        producer.beginTransaction();
        producer.abortTransaction();
    }
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Aggregations

Time (org.apache.kafka.common.utils.Time)125 MockTime (org.apache.kafka.common.utils.MockTime)107 Test (org.junit.jupiter.api.Test)63 MockClient (org.apache.kafka.clients.MockClient)55 HashMap (java.util.HashMap)53 Cluster (org.apache.kafka.common.Cluster)41 Test (org.junit.Test)40 Node (org.apache.kafka.common.Node)39 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)32 MetadataResponse (org.apache.kafka.common.requests.MetadataResponse)31 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)30 Metadata (org.apache.kafka.clients.Metadata)28 ProducerMetadata (org.apache.kafka.clients.producer.internals.ProducerMetadata)25 TopicPartition (org.apache.kafka.common.TopicPartition)22 PartitionAssignor (org.apache.kafka.clients.consumer.internals.PartitionAssignor)21 LogContext (org.apache.kafka.common.utils.LogContext)17 Map (java.util.Map)14 Properties (java.util.Properties)14 MetricName (org.apache.kafka.common.MetricName)14 ExecutionException (java.util.concurrent.ExecutionException)13