use of org.apache.kafka.common.metrics.KafkaMetric in project micrometer by micrometer-metrics.
the class KafkaMetricsTest method shouldKeepMetersWhenMetricsDoNotChange.
@Test
void shouldKeepMetersWhenMetricsDoNotChange() {
// Given
Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
MetricName metricName = new MetricName("a", "b", "c", new LinkedHashMap<>());
KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
return Collections.singletonMap(metricName, metric);
};
kafkaMetrics = new KafkaMetrics(supplier);
MeterRegistry registry = new SimpleMeterRegistry();
kafkaMetrics.bindTo(registry);
assertThat(registry.getMeters()).hasSize(1);
kafkaMetrics.checkAndBindMetrics(registry);
assertThat(registry.getMeters()).hasSize(1);
}
use of org.apache.kafka.common.metrics.KafkaMetric in project micrometer by micrometer-metrics.
the class KafkaMetricsTest method shouldRemoveMeterWithLessTags.
@Test
void shouldRemoveMeterWithLessTags() {
Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
MetricName firstName = new MetricName("a", "b", "c", Collections.emptyMap());
KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM);
Map<String, String> tags = new LinkedHashMap<>();
tags.put("key0", "value0");
MetricName secondName = new MetricName("a", "b", "c", tags);
KafkaMetric secondMetric = new KafkaMetric(this, secondName, new Value(), new MetricConfig(), Time.SYSTEM);
Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>();
metrics.put(firstName, firstMetric);
metrics.put(secondName, secondMetric);
return metrics;
};
kafkaMetrics = new KafkaMetrics(supplier);
MeterRegistry registry = new SimpleMeterRegistry();
kafkaMetrics.bindTo(registry);
assertThat(registry.getMeters()).hasSize(1);
// version + key0
assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2);
}
use of org.apache.kafka.common.metrics.KafkaMetric in project micrometer by micrometer-metrics.
the class KafkaMetricsTest method shouldAddNewMetersWhenMetricsChange.
@Test
void shouldAddNewMetersWhenMetricsChange() {
// Given
AtomicReference<Map<MetricName, KafkaMetric>> metrics = new AtomicReference<>(new LinkedHashMap<>());
Supplier<Map<MetricName, ? extends Metric>> supplier = () -> metrics.updateAndGet(map -> {
MetricName metricName = new MetricName("a0", "b0", "c0", new LinkedHashMap<>());
KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
map.put(metricName, metric);
return map;
});
kafkaMetrics = new KafkaMetrics(supplier);
MeterRegistry registry = new SimpleMeterRegistry();
kafkaMetrics.bindTo(registry);
assertThat(registry.getMeters()).hasSize(1);
metrics.updateAndGet(map -> {
MetricName metricName = new MetricName("a1", "b1", "c1", new LinkedHashMap<>());
KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
map.put(metricName, metric);
return map;
});
kafkaMetrics.checkAndBindMetrics(registry);
assertThat(registry.getMeters()).hasSize(2);
}
use of org.apache.kafka.common.metrics.KafkaMetric in project ksql by confluentinc.
the class RocksDBMetricsCollectorTest method shouldRemoveMetric.
@Test
public void shouldRemoveMetric() {
// Given:
final KafkaMetric metric = mockMetric(StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, RocksDBMetricsCollector.BLOCK_CACHE_USAGE, "a", BigInteger.valueOf(2));
collector.metricChange(metric);
// When:
collector.metricRemoval(metric);
// Then:
final Gauge<?> gauge = verifyAndGetRegisteredMetric(RocksDBMetricsCollector.BLOCK_CACHE_USAGE + "-total");
final Object value = gauge.value(null, 0);
assertThat(value, equalTo(BigInteger.valueOf(0)));
}
use of org.apache.kafka.common.metrics.KafkaMetric in project ksql by confluentinc.
the class RocksDBMetricsCollectorTest method shouldNotUpdateIfWithinInterval.
@Test
public void shouldNotUpdateIfWithinInterval() {
// Given:
final RocksDBMetricsCollector collector = new RocksDBMetricsCollector();
final KafkaMetric metric = mockMetric(StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP, RocksDBMetricsCollector.BLOCK_CACHE_USAGE, "a", BigInteger.valueOf(2));
collector.metricChange(metric);
collector.configure(ImmutableMap.of(RocksDBMetricsCollector.UPDATE_INTERVAL_CONFIG, 3600, KsqlConfig.KSQL_INTERNAL_METRICS_CONFIG, metrics));
final Gauge<?> gauge = verifyAndGetRegisteredMetric(RocksDBMetricsCollector.BLOCK_CACHE_USAGE + "-total");
// When:
gauge.value(null, 0);
gauge.value(null, 0);
gauge.value(null, 0);
// Then:
verify(metric, times(1)).metricValue();
}
Aggregations