use of org.apache.kafka.common.Metric in project kafka by apache.
the class MetricsTest method testPercentilesWithRandomNumbersAndLinearBucketing.
@Test
public void testPercentilesWithRandomNumbersAndLinearBucketing() {
long seed = new Random().nextLong();
// 100kB
int sizeInBytes = 100 * 1000;
// if values are ms, max is 1000 days
long maximumValue = 1000 * 24 * 60 * 60 * 1000L;
try {
Random prng = new Random(seed);
// range is [5000, 15000]
int numberOfValues = 5000 + prng.nextInt(10_000);
Percentiles percs = new Percentiles(sizeInBytes, maximumValue, BucketSizing.LINEAR, new Percentile(metrics.metricName("test.p90", "grp1"), 90), new Percentile(metrics.metricName("test.p99", "grp1"), 99));
MetricConfig config = new MetricConfig().eventWindow(50).samples(2);
Sensor sensor = metrics.sensor("test", config);
sensor.add(percs);
Metric p90 = this.metrics.metrics().get(metrics.metricName("test.p90", "grp1"));
Metric p99 = this.metrics.metrics().get(metrics.metricName("test.p99", "grp1"));
final List<Long> values = new ArrayList<>(numberOfValues);
// record two windows worth of sequential values
for (int i = 0; i < numberOfValues; ++i) {
long value = (Math.abs(prng.nextLong()) - 1) % maximumValue;
values.add(value);
sensor.record(value);
}
Collections.sort(values);
int p90Index = (int) Math.ceil(((double) (90 * numberOfValues)) / 100);
int p99Index = (int) Math.ceil(((double) (99 * numberOfValues)) / 100);
double expectedP90 = values.get(p90Index - 1);
double expectedP99 = values.get(p99Index - 1);
assertEquals(expectedP90, (Double) p90.metricValue(), expectedP90 / 5);
assertEquals(expectedP99, (Double) p99.metricValue(), expectedP99 / 5);
} catch (AssertionError e) {
throw new AssertionError("Assertion failed in randomized test. Reproduce with seed = " + seed + " .", e);
}
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class FrequenciesTest method testUseWithMetrics.
@Test
public void testUseWithMetrics() {
MetricName name1 = name("1");
MetricName name2 = name("2");
MetricName name3 = name("3");
MetricName name4 = name("4");
Frequencies frequencies = new Frequencies(4, 1.0, 4.0, new Frequency(name1, 1.0), new Frequency(name2, 2.0), new Frequency(name3, 3.0), new Frequency(name4, 4.0));
Sensor sensor = metrics.sensor("test", config);
sensor.add(frequencies);
Metric metric1 = this.metrics.metrics().get(name1);
Metric metric2 = this.metrics.metrics().get(name2);
Metric metric3 = this.metrics.metrics().get(name3);
Metric metric4 = this.metrics.metrics().get(name4);
// Record 2 windows worth of values
for (int i = 0; i != 100; ++i) {
frequencies.record(config, i % 4 + 1, time.milliseconds());
}
assertEquals(0.25, (Double) metric1.metricValue(), DELTA);
assertEquals(0.25, (Double) metric2.metricValue(), DELTA);
assertEquals(0.25, (Double) metric3.metricValue(), DELTA);
assertEquals(0.25, (Double) metric4.metricValue(), DELTA);
// Record 2 windows worth of values
for (int i = 0; i != 100; ++i) {
frequencies.record(config, i % 2 + 1, time.milliseconds());
}
assertEquals(0.50, (Double) metric1.metricValue(), DELTA);
assertEquals(0.50, (Double) metric2.metricValue(), DELTA);
assertEquals(0.00, (Double) metric3.metricValue(), DELTA);
assertEquals(0.00, (Double) metric4.metricValue(), DELTA);
// that is half 1.0 and half 2.0
for (int i = 0; i != 50; ++i) {
frequencies.record(config, 4.0, time.milliseconds());
}
assertEquals(0.25, (Double) metric1.metricValue(), DELTA);
assertEquals(0.25, (Double) metric2.metricValue(), DELTA);
assertEquals(0.00, (Double) metric3.metricValue(), DELTA);
assertEquals(0.50, (Double) metric4.metricValue(), DELTA);
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class KafkaConsumerTest method testMeasureCommittedDuration.
@Test
public void testMeasureCommittedDuration() {
long offset1 = 10000;
Time time = new MockTime(Duration.ofSeconds(1).toMillis());
SubscriptionState subscription = new SubscriptionState(new LogContext(), OffsetResetStrategy.EARLIEST);
ConsumerMetadata metadata = createMetadata(subscription);
MockClient client = new MockClient(time, metadata);
initMetadata(client, Collections.singletonMap(topic, 2));
Node node = metadata.fetch().nodes().get(0);
KafkaConsumer<String, String> consumer = newConsumer(time, client, subscription, metadata, assignor, true, groupInstanceId);
consumer.assign(singletonList(tp0));
// lookup coordinator
client.prepareResponseFrom(FindCoordinatorResponse.prepareResponse(Errors.NONE, groupId, node), node);
Node coordinator = new Node(Integer.MAX_VALUE - node.id(), node.host(), node.port());
// fetch offset for one topic
client.prepareResponseFrom(offsetResponse(Collections.singletonMap(tp0, offset1), Errors.NONE), coordinator);
consumer.committed(Collections.singleton(tp0)).get(tp0).offset();
final Metric metric = consumer.metrics().get(consumer.metrics.metricName("committed-time-ns-total", "consumer-metrics"));
assertTrue((Double) metric.metricValue() >= Duration.ofMillis(999).toNanos());
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class StreamThreadTest method runAndVerifyFailedStreamThreadRecording.
public void runAndVerifyFailedStreamThreadRecording(final boolean shouldFail) {
final Consumer<byte[], byte[]> consumer = EasyMock.createNiceMock(Consumer.class);
final ConsumerGroupMetadata consumerGroupMetadata = mock(ConsumerGroupMetadata.class);
expect(consumer.groupMetadata()).andStubReturn(consumerGroupMetadata);
expect(consumerGroupMetadata.groupInstanceId()).andReturn(Optional.empty());
EasyMock.replay(consumer, consumerGroupMetadata);
final TaskManager taskManager = EasyMock.createNiceMock(TaskManager.class);
expect(taskManager.producerClientIds()).andStubReturn(Collections.emptySet());
final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, StreamsConfig.METRICS_LATEST, mockTime);
final TopologyMetadata topologyMetadata = new TopologyMetadata(internalTopologyBuilder, config);
topologyMetadata.buildAndRewriteTopology();
final StreamThread thread = new StreamThread(mockTime, config, null, consumer, consumer, null, null, taskManager, streamsMetrics, topologyMetadata, CLIENT_ID, new LogContext(""), new AtomicInteger(), new AtomicLong(Long.MAX_VALUE), new LinkedList<>(), null, (e, b) -> {
}, null) {
@Override
void runOnce() {
setState(StreamThread.State.PENDING_SHUTDOWN);
if (shouldFail) {
throw new StreamsException(Thread.currentThread().getName());
}
}
};
EasyMock.replay(taskManager);
thread.updateThreadMetadata("metadata");
thread.run();
final Metric failedThreads = StreamsTestUtils.getMetricByName(metrics.metrics(), "failed-stream-threads", "stream-metrics");
assertThat(failedThreads.metricValue(), is(shouldFail ? 1.0 : 0.0));
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class StreamThreadTest method shouldTransmitTaskManagerMetrics.
@Test
public void shouldTransmitTaskManagerMetrics() {
final Consumer<byte[], byte[]> consumer = EasyMock.createNiceMock(Consumer.class);
final ConsumerGroupMetadata consumerGroupMetadata = mock(ConsumerGroupMetadata.class);
expect(consumer.groupMetadata()).andStubReturn(consumerGroupMetadata);
expect(consumerGroupMetadata.groupInstanceId()).andReturn(Optional.empty());
EasyMock.replay(consumer, consumerGroupMetadata);
final TaskManager taskManager = EasyMock.createNiceMock(TaskManager.class);
final MetricName testMetricName = new MetricName("test_metric", "", "", new HashMap<>());
final Metric testMetric = new KafkaMetric(new Object(), testMetricName, (Measurable) (config, now) -> 0, null, new MockTime());
final Map<MetricName, Metric> dummyProducerMetrics = singletonMap(testMetricName, testMetric);
expect(taskManager.producerMetrics()).andReturn(dummyProducerMetrics);
EasyMock.replay(taskManager);
final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, StreamsConfig.METRICS_LATEST, mockTime);
final TopologyMetadata topologyMetadata = new TopologyMetadata(internalTopologyBuilder, config);
topologyMetadata.buildAndRewriteTopology();
final StreamThread thread = buildStreamThread(consumer, taskManager, config, topologyMetadata);
assertThat(dummyProducerMetrics, is(thread.producerMetrics()));
}
Aggregations