use of org.apache.kafka.common.metrics.MetricConfig in project kafka by apache.
the class StreamsMetricsImpl method addClientLevelImmutableMetric.
public <T> void addClientLevelImmutableMetric(final String name, final String description, final RecordingLevel recordingLevel, final T value) {
final MetricName metricName = metrics.metricName(name, CLIENT_LEVEL_GROUP, description, clientLevelTagMap());
final MetricConfig metricConfig = new MetricConfig().recordLevel(recordingLevel);
synchronized (clientLevelMetrics) {
metrics.addMetric(metricName, metricConfig, new ImmutableMetricValue<>(value));
clientLevelMetrics.push(metricName);
}
}
use of org.apache.kafka.common.metrics.MetricConfig in project kafka by apache.
the class StreamsMetricsImplTest method shouldAddClientLevelMutableMetric.
@Test
public void shouldAddClientLevelMutableMetric() {
final Metrics metrics = mock(Metrics.class);
final RecordingLevel recordingLevel = RecordingLevel.INFO;
final MetricConfig metricConfig = new MetricConfig().recordLevel(recordingLevel);
final Gauge<String> valueProvider = (config, now) -> "mutable-value";
expect(metrics.metricName(METRIC_NAME1, CLIENT_LEVEL_GROUP, DESCRIPTION1, clientLevelTags)).andReturn(metricName1);
metrics.addMetric(EasyMock.eq(metricName1), eqMetricConfig(metricConfig), eq(valueProvider));
replay(metrics);
final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, VERSION, time);
streamsMetrics.addClientLevelMutableMetric(METRIC_NAME1, DESCRIPTION1, recordingLevel, valueProvider);
verify(metrics);
}
use of org.apache.kafka.common.metrics.MetricConfig in project kafka by apache.
the class StreamsMetricsImplTest method verifyMetric.
private void verifyMetric(final String name, final String description, final double valueToRecord1, final double valueToRecord2, final double expectedMetricValue) {
final KafkaMetric metric = metrics.metric(new MetricName(name, group, description, tags));
assertThat(metric, is(notNullValue()));
assertThat(metric.metricName().description(), equalTo(description));
sensor.record(valueToRecord1, time.milliseconds());
sensor.record(valueToRecord2, time.milliseconds());
assertThat(metric.measurable().measure(new MetricConfig(), time.milliseconds()), equalTo(expectedMetricValue));
}
use of org.apache.kafka.common.metrics.MetricConfig in project kafka by apache.
the class RocksDBStoreTest method shouldVerifyThatPropertyBasedMetricsUseValidPropertyName.
@Test
public void shouldVerifyThatPropertyBasedMetricsUseValidPropertyName() {
final TaskId taskId = new TaskId(0, 0);
final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.INFO));
final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
final Properties props = StreamsTestUtils.getStreamsConfig();
context = EasyMock.niceMock(InternalMockProcessorContext.class);
EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
EasyMock.expect(context.taskId()).andStubReturn(taskId);
EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(props).originals());
EasyMock.expect(context.stateDir()).andStubReturn(dir);
EasyMock.replay(context);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final List<String> propertyNames = Arrays.asList("num-entries-active-mem-table", "num-deletes-active-mem-table", "num-entries-imm-mem-tables", "num-deletes-imm-mem-tables", "num-immutable-mem-table", "cur-size-active-mem-table", "cur-size-all-mem-tables", "size-all-mem-tables", "mem-table-flush-pending", "num-running-flushes", "compaction-pending", "num-running-compactions", "estimate-pending-compaction-bytes", "total-sst-files-size", "live-sst-files-size", "num-live-versions", "block-cache-capacity", "block-cache-usage", "block-cache-pinned-usage", "estimate-num-keys", "estimate-table-readers-mem", "background-errors");
for (final String propertyname : propertyNames) {
final Metric metric = metrics.metric(new MetricName(propertyname, StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, "description is not verified", streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)));
assertThat("Metric " + propertyname + " not found!", metric, notNullValue());
metric.metricValue();
}
}
use of org.apache.kafka.common.metrics.MetricConfig in project kafka by apache.
the class RocksDBStoreTest method shouldVerifyThatMetricsRecordedFromStatisticsGetMeasurementsFromRocksDB.
@Test
public void shouldVerifyThatMetricsRecordedFromStatisticsGetMeasurementsFromRocksDB() {
final TaskId taskId = new TaskId(0, 0);
final Metrics metrics = new Metrics(new MetricConfig().recordLevel(RecordingLevel.DEBUG));
final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, "test-application", StreamsConfig.METRICS_LATEST, time);
context = EasyMock.niceMock(InternalMockProcessorContext.class);
EasyMock.expect(context.metrics()).andStubReturn(streamsMetrics);
EasyMock.expect(context.taskId()).andStubReturn(taskId);
EasyMock.expect(context.appConfigs()).andStubReturn(new StreamsConfig(StreamsTestUtils.getStreamsConfig()).originals());
EasyMock.expect(context.stateDir()).andStubReturn(dir);
final MonotonicProcessorRecordContext processorRecordContext = new MonotonicProcessorRecordContext("test", 0);
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(processorRecordContext));
EasyMock.replay(context);
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
final byte[] key = "hello".getBytes();
final byte[] value = "world".getBytes();
rocksDBStore.put(Bytes.wrap(key), value);
streamsMetrics.rocksDBMetricsRecordingTrigger().run();
final Metric bytesWrittenTotal = metrics.metric(new MetricName("bytes-written-total", StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, "description is not verified", streamsMetrics.storeLevelTagMap(taskId.toString(), METRICS_SCOPE, DB_NAME)));
assertThat((double) bytesWrittenTotal.metricValue(), greaterThan(0d));
}
Aggregations