Search in sources :

Example 6 with MetricListener

use of org.apache.flink.metrics.testutils.MetricListener in project flink by apache.

the class KafkaSourceReaderMetricsTest method testCurrentOffsetTracking.

@Test
public void testCurrentOffsetTracking() {
    MetricListener metricListener = new MetricListener();
    final KafkaSourceReaderMetrics kafkaSourceReaderMetrics = new KafkaSourceReaderMetrics(InternalSourceReaderMetricGroup.mock(metricListener.getMetricGroup()));
    kafkaSourceReaderMetrics.registerTopicPartition(FOO_0);
    kafkaSourceReaderMetrics.registerTopicPartition(FOO_1);
    kafkaSourceReaderMetrics.registerTopicPartition(BAR_0);
    kafkaSourceReaderMetrics.registerTopicPartition(BAR_1);
    kafkaSourceReaderMetrics.recordCurrentOffset(FOO_0, 15213L);
    kafkaSourceReaderMetrics.recordCurrentOffset(FOO_1, 18213L);
    kafkaSourceReaderMetrics.recordCurrentOffset(BAR_0, 18613L);
    kafkaSourceReaderMetrics.recordCurrentOffset(BAR_1, 15513L);
    assertCurrentOffset(FOO_0, 15213L, metricListener);
    assertCurrentOffset(FOO_1, 18213L, metricListener);
    assertCurrentOffset(BAR_0, 18613L, metricListener);
    assertCurrentOffset(BAR_1, 15513L, metricListener);
}
Also used : MetricListener(org.apache.flink.metrics.testutils.MetricListener) Test(org.junit.Test)

Example 7 with MetricListener

use of org.apache.flink.metrics.testutils.MetricListener in project flink by apache.

the class KafkaSourceReaderTest method testKafkaSourceMetrics.

@Test
void testKafkaSourceMetrics() throws Exception {
    final MetricListener metricListener = new MetricListener();
    final String groupId = "testKafkaSourceMetrics";
    final TopicPartition tp0 = new TopicPartition(TOPIC, 0);
    final TopicPartition tp1 = new TopicPartition(TOPIC, 1);
    try (KafkaSourceReader<Integer> reader = (KafkaSourceReader<Integer>) createReader(Boundedness.CONTINUOUS_UNBOUNDED, groupId, metricListener.getMetricGroup())) {
        KafkaPartitionSplit split0 = new KafkaPartitionSplit(tp0, KafkaPartitionSplit.EARLIEST_OFFSET);
        KafkaPartitionSplit split1 = new KafkaPartitionSplit(tp1, KafkaPartitionSplit.EARLIEST_OFFSET);
        reader.addSplits(Arrays.asList(split0, split1));
        TestingReaderOutput<Integer> output = new TestingReaderOutput<>();
        pollUntil(reader, output, () -> output.getEmittedRecords().size() == NUM_RECORDS_PER_SPLIT * 2, String.format("Failed to poll %d records until timeout", NUM_RECORDS_PER_SPLIT * 2));
        // Metric "records-consumed-total" of KafkaConsumer should be NUM_RECORDS_PER_SPLIT
        assertThat(getKafkaConsumerMetric("records-consumed-total", metricListener)).isEqualTo(NUM_RECORDS_PER_SPLIT * 2);
        // Current consuming offset should be NUM_RECORD_PER_SPLIT - 1
        assertThat(getCurrentOffsetMetric(tp0, metricListener)).isEqualTo(NUM_RECORDS_PER_SPLIT - 1);
        assertThat(getCurrentOffsetMetric(tp1, metricListener)).isEqualTo(NUM_RECORDS_PER_SPLIT - 1);
        // No offset is committed till now
        assertThat(getCommittedOffsetMetric(tp0, metricListener)).isEqualTo(INITIAL_OFFSET);
        assertThat(getCommittedOffsetMetric(tp1, metricListener)).isEqualTo(INITIAL_OFFSET);
        // Trigger offset commit
        final long checkpointId = 15213L;
        reader.snapshotState(checkpointId);
        waitUtil(() -> {
            try {
                reader.notifyCheckpointComplete(checkpointId);
            } catch (Exception e) {
                throw new RuntimeException("Failed to notify checkpoint complete to reader", e);
            }
            return reader.getOffsetsToCommit().isEmpty();
        }, Duration.ofSeconds(60), Duration.ofSeconds(1), String.format("Offsets are not committed successfully. Dangling offsets: %s", reader.getOffsetsToCommit()));
        // Metric "commit-total" of KafkaConsumer should be greater than 0
        // It's hard to know the exactly number of commit because of the retry
        MatcherAssert.assertThat(getKafkaConsumerMetric("commit-total", metricListener), Matchers.greaterThan(0L));
        // Committed offset should be NUM_RECORD_PER_SPLIT
        assertThat(getCommittedOffsetMetric(tp0, metricListener)).isEqualTo(NUM_RECORDS_PER_SPLIT);
        assertThat(getCommittedOffsetMetric(tp1, metricListener)).isEqualTo(NUM_RECORDS_PER_SPLIT);
        // Number of successful commits should be greater than 0
        final Optional<Counter> commitsSucceeded = metricListener.getCounter(KAFKA_SOURCE_READER_METRIC_GROUP, COMMITS_SUCCEEDED_METRIC_COUNTER);
        assertThat(commitsSucceeded).isPresent();
        MatcherAssert.assertThat(commitsSucceeded.get().getCount(), Matchers.greaterThan(0L));
    }
}
Also used : KafkaPartitionSplit(org.apache.flink.connector.kafka.source.split.KafkaPartitionSplit) MetricListener(org.apache.flink.metrics.testutils.MetricListener) TestingReaderOutput(org.apache.flink.connector.testutils.source.reader.TestingReaderOutput) Counter(org.apache.flink.metrics.Counter) TopicPartition(org.apache.kafka.common.TopicPartition) Test(org.junit.jupiter.api.Test)

Example 8 with MetricListener

use of org.apache.flink.metrics.testutils.MetricListener in project flink by apache.

the class ElasticsearchWriterITCase method setUp.

@BeforeEach
void setUp() {
    metricListener = new MetricListener();
    client = new RestHighLevelClient(RestClient.builder(HttpHost.create(ES_CONTAINER.getHttpHostAddress())));
    context = new TestClient(client);
}
Also used : RestHighLevelClient(org.elasticsearch.client.RestHighLevelClient) MetricListener(org.apache.flink.metrics.testutils.MetricListener) BeforeEach(org.junit.jupiter.api.BeforeEach)

Example 9 with MetricListener

use of org.apache.flink.metrics.testutils.MetricListener in project flink by apache.

the class MetricListenerTest method testRegisterMetrics.

@Test
public void testRegisterMetrics() {
    MetricListener metricListener = new MetricListener();
    final MetricGroup metricGroup = metricListener.getMetricGroup();
    // Counter
    final Counter counter = metricGroup.counter(COUNTER_NAME);
    counter.inc(15213);
    final Optional<Counter> registeredCounter = metricListener.getCounter(COUNTER_NAME);
    assertTrue(registeredCounter.isPresent());
    assertEquals(15213L, registeredCounter.get().getCount());
    // Gauge
    metricGroup.gauge(GAUGE_NAME, () -> 15213);
    final Optional<Gauge<Integer>> registeredGauge = metricListener.getGauge(GAUGE_NAME);
    assertTrue(registeredGauge.isPresent());
    assertEquals(Integer.valueOf(15213), registeredGauge.get().getValue());
    // Meter
    metricGroup.meter(METER_NAME, new Meter() {

        @Override
        public void markEvent() {
        }

        @Override
        public void markEvent(long n) {
        }

        @Override
        public double getRate() {
            return 15213.0;
        }

        @Override
        public long getCount() {
            return 18213L;
        }
    });
    final Optional<Meter> registeredMeter = metricListener.getMeter(METER_NAME);
    assertTrue(registeredMeter.isPresent());
    assertEquals(15213.0, registeredMeter.get().getRate(), 0.1);
    assertEquals(18213L, registeredMeter.get().getCount());
    // Histogram
    metricGroup.histogram(HISTOGRAM_NAME, new Histogram() {

        @Override
        public void update(long value) {
        }

        @Override
        public long getCount() {
            return 15213L;
        }

        @Override
        public HistogramStatistics getStatistics() {
            return null;
        }
    });
    final Optional<Histogram> registeredHistogram = metricListener.getHistogram(HISTOGRAM_NAME);
    assertTrue(registeredHistogram.isPresent());
    assertEquals(15213L, registeredHistogram.get().getCount());
}
Also used : Histogram(org.apache.flink.metrics.Histogram) Meter(org.apache.flink.metrics.Meter) MetricGroup(org.apache.flink.metrics.MetricGroup) MetricListener(org.apache.flink.metrics.testutils.MetricListener) Gauge(org.apache.flink.metrics.Gauge) Counter(org.apache.flink.metrics.Counter) HistogramStatistics(org.apache.flink.metrics.HistogramStatistics) Test(org.junit.Test)

Example 10 with MetricListener

use of org.apache.flink.metrics.testutils.MetricListener in project flink by apache.

the class MetricListenerTest method testRegisterMetricGroup.

@Test
public void testRegisterMetricGroup() {
    MetricListener metricListener = new MetricListener();
    final MetricGroup rootGroup = metricListener.getMetricGroup();
    final MetricGroup groupA1 = rootGroup.addGroup(GROUP_A).addGroup(GROUP_A_1);
    final MetricGroup groupB = rootGroup.addGroup(GROUP_B);
    final MetricGroup groupB1 = groupB.addGroup(GROUP_B_1);
    final MetricGroup groupB2 = groupB.addGroup(GROUP_B_2);
    groupA1.counter(COUNTER_NAME).inc(18213L);
    groupB1.gauge(GAUGE_NAME, () -> 15213L);
    groupB2.counter(COUNTER_NAME).inc(15513L);
    // groupA.groupA_1.testCounter
    final Optional<Counter> counterA = metricListener.getCounter(GROUP_A, GROUP_A_1, COUNTER_NAME);
    assertTrue(counterA.isPresent());
    assertEquals(18213L, counterA.get().getCount());
    // groupB.groupB_1.testGauge
    final Optional<Gauge<Long>> gauge = metricListener.getGauge(GROUP_B, GROUP_B_1, GAUGE_NAME);
    assertTrue(gauge.isPresent());
    assertEquals(15213L, (long) gauge.get().getValue());
    // groupB.groupB_2.testCounter
    final Optional<Counter> counterB = metricListener.getCounter(GROUP_B, GROUP_B_2, COUNTER_NAME);
    assertTrue(counterB.isPresent());
    assertEquals(15513L, counterB.get().getCount());
}
Also used : Counter(org.apache.flink.metrics.Counter) MetricGroup(org.apache.flink.metrics.MetricGroup) MetricListener(org.apache.flink.metrics.testutils.MetricListener) Gauge(org.apache.flink.metrics.Gauge) Test(org.junit.Test)

Aggregations

MetricListener (org.apache.flink.metrics.testutils.MetricListener)10 Test (org.junit.Test)6 Counter (org.apache.flink.metrics.Counter)5 Gauge (org.apache.flink.metrics.Gauge)3 KafkaPartitionSplit (org.apache.flink.connector.kafka.source.split.KafkaPartitionSplit)2 MetricGroup (org.apache.flink.metrics.MetricGroup)2 TopicPartition (org.apache.kafka.common.TopicPartition)2 BeforeEach (org.junit.jupiter.api.BeforeEach)2 Properties (java.util.Properties)1 KafkaSourceTestEnv (org.apache.flink.connector.kafka.testutils.KafkaSourceTestEnv)1 TestingReaderOutput (org.apache.flink.connector.testutils.source.reader.TestingReaderOutput)1 Histogram (org.apache.flink.metrics.Histogram)1 HistogramStatistics (org.apache.flink.metrics.HistogramStatistics)1 Meter (org.apache.flink.metrics.Meter)1 RestHighLevelClient (org.elasticsearch.client.RestHighLevelClient)1 Test (org.junit.jupiter.api.Test)1 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)1 EmptySource (org.junit.jupiter.params.provider.EmptySource)1 ValueSource (org.junit.jupiter.params.provider.ValueSource)1