Search in sources :

Example 36 with Metric

use of org.apache.kafka.common.Metric in project kafka by apache.

the class TaskManagerTest method shouldTransmitProducerMetrics.

@Test
public void shouldTransmitProducerMetrics() {
    final MetricName testMetricName = new MetricName("test_metric", "", "", new HashMap<>());
    final Metric testMetric = new KafkaMetric(new Object(), testMetricName, (Measurable) (config, now) -> 0, null, new MockTime());
    final Map<MetricName, Metric> dummyProducerMetrics = singletonMap(testMetricName, testMetric);
    expect(activeTaskCreator.producerMetrics()).andReturn(dummyProducerMetrics);
    replay(activeTaskCreator);
    assertThat(taskManager.producerMetrics(), is(dummyProducerMetrics));
}
Also used : MockTime(org.apache.kafka.common.utils.MockTime) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) Mock(org.easymock.Mock) IsEqual.equalTo(org.hamcrest.core.IsEqual.equalTo) KafkaException(org.apache.kafka.common.KafkaException) StreamsException(org.apache.kafka.streams.errors.StreamsException) Collections.singletonList(java.util.Collections.singletonList) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) DeleteRecordsResult(org.apache.kafka.clients.admin.DeleteRecordsResult) Collections.singleton(java.util.Collections.singleton) EasyMock.reset(org.easymock.EasyMock.reset) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) Metric(org.apache.kafka.common.Metric) MetricName(org.apache.kafka.common.MetricName) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) Assert.fail(org.junit.Assert.fail) EasyMock.eq(org.easymock.EasyMock.eq) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) StreamsConfigUtils(org.apache.kafka.streams.internals.StreamsConfigUtils) Time(org.apache.kafka.common.utils.Time) Collections.emptyList(java.util.Collections.emptyList) State(org.apache.kafka.streams.processor.internals.Task.State) Collection(java.util.Collection) Utils.mkSet(org.apache.kafka.common.utils.Utils.mkSet) Set(java.util.Set) Measurable(org.apache.kafka.common.metrics.Measurable) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) Matchers.instanceOf(org.hamcrest.Matchers.instanceOf) List(java.util.List) Metrics(org.apache.kafka.common.metrics.Metrics) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Assert.assertFalse(org.junit.Assert.assertFalse) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Optional(java.util.Optional) KafkaMetric(org.apache.kafka.common.metrics.KafkaMetric) LockException(org.apache.kafka.streams.errors.LockException) Matchers.is(org.hamcrest.Matchers.is) MockType(org.easymock.MockType) StreamsConfig(org.apache.kafka.streams.StreamsConfig) TaskId(org.apache.kafka.streams.processor.TaskId) Assert.assertThrows(org.junit.Assert.assertThrows) RunWith(org.junit.runner.RunWith) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) EasyMock.mock(org.easymock.EasyMock.mock) HashMap(java.util.HashMap) Deque(java.util.Deque) AtomicReference(java.util.concurrent.atomic.AtomicReference) RecordsToDelete(org.apache.kafka.clients.admin.RecordsToDelete) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) TaskMigratedException(org.apache.kafka.streams.errors.TaskMigratedException) EasyMock.resetToStrict(org.easymock.EasyMock.resetToStrict) TaskDirectory(org.apache.kafka.streams.processor.internals.StateDirectory.TaskDirectory) Admin(org.apache.kafka.clients.admin.Admin) EasyMockRunner(org.easymock.EasyMockRunner) Collections.singletonMap(java.util.Collections.singletonMap) EasyMock.replay(org.easymock.EasyMock.replay) KafkaFutureImpl(org.apache.kafka.common.internals.KafkaFutureImpl) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) LinkedList(java.util.LinkedList) TaskCorruptedException(org.apache.kafka.streams.errors.TaskCorruptedException) Before(org.junit.Before) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Collections.emptyMap(java.util.Collections.emptyMap) TimeoutException(org.apache.kafka.common.errors.TimeoutException) Matchers.empty(org.hamcrest.Matchers.empty) Collections.emptySet(java.util.Collections.emptySet) EasyMock.anyString(org.easymock.EasyMock.anyString) Matchers(org.hamcrest.Matchers) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ProcessingMode(org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode) EasyMock(org.easymock.EasyMock) EasyMock.expect(org.easymock.EasyMock.expect) File(java.io.File) Utils.union(org.apache.kafka.common.utils.Utils.union) EasyMock.expectLastCall(org.easymock.EasyMock.expectLastCall) Rule(org.junit.Rule) Assert.assertNull(org.junit.Assert.assertNull) StateStore(org.apache.kafka.streams.processor.StateStore) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) CommitFailedException(org.apache.kafka.clients.consumer.CommitFailedException) OffsetCheckpoint(org.apache.kafka.streams.state.internals.OffsetCheckpoint) EasyMock.verify(org.easymock.EasyMock.verify) DummyStreamsConfig(org.apache.kafka.streams.processor.internals.testutil.DummyStreamsConfig) Collections(java.util.Collections) DeletedRecords(org.apache.kafka.clients.admin.DeletedRecords) TemporaryFolder(org.junit.rules.TemporaryFolder) Assert.assertEquals(org.junit.Assert.assertEquals) MetricName(org.apache.kafka.common.MetricName) Metric(org.apache.kafka.common.Metric) KafkaMetric(org.apache.kafka.common.metrics.KafkaMetric) EasyMock.anyObject(org.easymock.EasyMock.anyObject) KafkaMetric(org.apache.kafka.common.metrics.KafkaMetric) MockTime(org.apache.kafka.common.utils.MockTime) Test(org.junit.Test)

Example 37 with Metric

use of org.apache.kafka.common.Metric in project kafka by apache.

the class ToolsUtils method printMetrics.

/**
 * print out the metrics in alphabetical order
 * @param metrics   the metrics to be printed out
 */
public static void printMetrics(Map<MetricName, ? extends Metric> metrics) {
    if (metrics != null && !metrics.isEmpty()) {
        int maxLengthOfDisplayName = 0;
        TreeMap<String, Object> sortedMetrics = new TreeMap<>();
        for (Metric metric : metrics.values()) {
            MetricName mName = metric.metricName();
            String mergedName = mName.group() + ":" + mName.name() + ":" + mName.tags();
            maxLengthOfDisplayName = maxLengthOfDisplayName < mergedName.length() ? mergedName.length() : maxLengthOfDisplayName;
            sortedMetrics.put(mergedName, metric.metricValue());
        }
        String doubleOutputFormat = "%-" + maxLengthOfDisplayName + "s : %.3f";
        String defaultOutputFormat = "%-" + maxLengthOfDisplayName + "s : %s";
        System.out.println(String.format("\n%-" + maxLengthOfDisplayName + "s   %s", "Metric Name", "Value"));
        for (Map.Entry<String, Object> entry : sortedMetrics.entrySet()) {
            String outputFormat;
            if (entry.getValue() instanceof Double)
                outputFormat = doubleOutputFormat;
            else
                outputFormat = defaultOutputFormat;
            System.out.println(String.format(outputFormat, entry.getKey(), entry.getValue()));
        }
    }
}
Also used : MetricName(org.apache.kafka.common.MetricName) Metric(org.apache.kafka.common.Metric) TreeMap(java.util.TreeMap) TreeMap(java.util.TreeMap) Map(java.util.Map)

Example 38 with Metric

use of org.apache.kafka.common.Metric in project kafka by apache.

the class RocksDBStoreTest method shouldVerifyThatPropertyBasedMetricsUseValidPropertyName.

@Test
public void shouldVerifyThatPropertyBasedMetricsUseValidPropertyName() {
    final TaskId taskId = new TaskId(0, 0);
    final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.INFO));
    final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
    final Properties props = StreamsTestUtils.getStreamsConfig();
    context = EasyMock.niceMock(InternalMockProcessorContext.class);
    EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
    EasyMock.expect(context.taskId()).andStubReturn(taskId);
    EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(props).originals());
    EasyMock.expect(context.stateDir()).andStubReturn(dir);
    EasyMock.replay(context);
    rocksDBStore.init((StateStoreContext) context, rocksDBStore);
    final List<String> propertyNames = Arrays.asList("num-entries-active-mem-table", "num-deletes-active-mem-table", "num-entries-imm-mem-tables", "num-deletes-imm-mem-tables", "num-immutable-mem-table", "cur-size-active-mem-table", "cur-size-all-mem-tables", "size-all-mem-tables", "mem-table-flush-pending", "num-running-flushes", "compaction-pending", "num-running-compactions", "estimate-pending-compaction-bytes", "total-sst-files-size", "live-sst-files-size", "num-live-versions", "block-cache-capacity", "block-cache-usage", "block-cache-pinned-usage", "estimate-num-keys", "estimate-table-readers-mem", "background-errors");
    for (final String propertyname : propertyNames) {
        final Metric metric = metrics.metric(new MetricName(propertyname, StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, "description is not verified", streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)));
        assertThat("Metric " + propertyname + " not found!", metric, notNullValue());
        metric.metricValue();
    }
}
Also used : MetricConfig(org.apache.kafka.common.metrics.MetricConfig) MetricName(org.apache.kafka.common.MetricName) Metrics(org.apache.kafka.common.metrics.Metrics) TaskId(org.apache.kafka.streams.processor.TaskId) Metric(org.apache.kafka.common.Metric) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) Properties(java.util.Properties) InternalMockProcessorContext(org.apache.kafka.test.InternalMockProcessorContext) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 39 with Metric

use of org.apache.kafka.common.Metric in project kafka by apache.

the class RocksDBStoreTest method shouldVerifyThatMetricsRecordedFromStatisticsGetMeasurementsFromRocksDB.

@Test
public void shouldVerifyThatMetricsRecordedFromStatisticsGetMeasurementsFromRocksDB() {
    final TaskId taskId = new TaskId(0, 0);
    final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.DEBUG));
    final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
    context = EasyMock.niceMock(InternalMockProcessorContext.class);
    EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
    EasyMock.expect(context.taskId()).andStubReturn(taskId);
    EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(StreamsTestUtils.getStreamsConfig()).originals());
    EasyMock.expect(context.stateDir()).andStubReturn(dir);
    final MonotonicProcessorRecordContext processorRecordContext = new MonotonicProcessorRecordContext("test", 0);
    EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(processorRecordContext));
    EasyMock.replay(context);
    rocksDBStore.init((StateStoreContext) context, rocksDBStore);
    final byte[] key = "hello".getBytes();
    final byte[] value = "world".getBytes();
    rocksDBStore.put(Bytes.wrap(key), value);
    streamsMetrics.rocksDBMetricsRecordingTrigger().run();
    final Metric bytesWrittenTotal = metrics.metric(new MetricName("bytes-written-total", StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, "description is not verified", streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)));
    assertThat((double) bytesWrittenTotal.metricValue(), greaterThan(0d));
}
Also used : MetricConfig(org.apache.kafka.common.metrics.MetricConfig) MetricName(org.apache.kafka.common.MetricName) Metrics(org.apache.kafka.common.metrics.Metrics) TaskId(org.apache.kafka.streams.processor.TaskId) Metric(org.apache.kafka.common.Metric) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) InternalMockProcessorContext(org.apache.kafka.test.InternalMockProcessorContext) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 40 with Metric

use of org.apache.kafka.common.Metric in project kafka by apache.

the class RocksDBStoreTest method shouldVerifyThatMetricsRecordedFromPropertiesGetMeasurementsFromRocksDB.

@Test
public void shouldVerifyThatMetricsRecordedFromPropertiesGetMeasurementsFromRocksDB() {
    final TaskId taskId = new TaskId(0, 0);
    final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.INFO));
    final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
    context = EasyMock.niceMock(InternalMockProcessorContext.class);
    EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
    EasyMock.expect(context.taskId()).andStubReturn(taskId);
    EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(StreamsTestUtils.getStreamsConfig()).originals());
    EasyMock.expect(context.stateDir()).andStubReturn(dir);
    final MonotonicProcessorRecordContext processorRecordContext = new MonotonicProcessorRecordContext("test", 0);
    EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(processorRecordContext));
    EasyMock.replay(context);
    rocksDBStore.init((StateStoreContext) context, rocksDBStore);
    final byte[] key = "hello".getBytes();
    final byte[] value = "world".getBytes();
    rocksDBStore.put(Bytes.wrap(key), value);
    final Metric numberOfEntriesActiveMemTable = metrics.metric(new MetricName("num-entries-active-mem-table", StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, "description is not verified", streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)));
    assertThat(numberOfEntriesActiveMemTable, notNullValue());
    assertThat((BigInteger) numberOfEntriesActiveMemTable.metricValue(), greaterThan(BigInteger.valueOf(0)));
}
Also used : MetricConfig(org.apache.kafka.common.metrics.MetricConfig) MetricName(org.apache.kafka.common.MetricName) Metrics(org.apache.kafka.common.metrics.Metrics) TaskId(org.apache.kafka.streams.processor.TaskId) Metric(org.apache.kafka.common.Metric) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) InternalMockProcessorContext(org.apache.kafka.test.InternalMockProcessorContext) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Aggregations

Metric (org.apache.kafka.common.Metric)42 MetricName (org.apache.kafka.common.MetricName)24 Test (org.junit.Test)18 MockTime (org.apache.kafka.common.utils.MockTime)14 StreamsConfig (org.apache.kafka.streams.StreamsConfig)13 Map (java.util.Map)11 Test (org.junit.jupiter.api.Test)11 Metrics (org.apache.kafka.common.metrics.Metrics)10 Collections (java.util.Collections)9 HashMap (java.util.HashMap)9 Properties (java.util.Properties)9 KafkaMetric (org.apache.kafka.common.metrics.KafkaMetric)9 StreamsMetricsImpl (org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl)9 ArrayList (java.util.ArrayList)8 TaskId (org.apache.kafka.streams.processor.TaskId)8 List (java.util.List)7 TopicPartition (org.apache.kafka.common.TopicPartition)7 StreamsException (org.apache.kafka.streams.errors.StreamsException)7 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)7 Utils.mkMap (org.apache.kafka.common.utils.Utils.mkMap)6