use of org.apache.kafka.common.Metric in project kafka by apache.
the class StreamTaskTest method shouldRecordE2ELatencyOnSourceNodeAndTerminalNodes.
@Test
public void shouldRecordE2ELatencyOnSourceNodeAndTerminalNodes() {
time = new MockTime(0L, 0L, 0L);
metrics = new Metrics(new MetricConfig().recordLevel(Sensor.RecordingLevel.INFO), time);
// Create a processor that only forwards even keys to test the metrics at the source and terminal nodes
final MockSourceNode<Integer, Integer> evenKeyForwardingSourceNode = new MockSourceNode<Integer, Integer>(intDeserializer, intDeserializer) {
InternalProcessorContext<Integer, Integer> context;
@Override
public void init(final InternalProcessorContext<Integer, Integer> context) {
this.context = context;
super.init(context);
}
@Override
public void process(final Record<Integer, Integer> record) {
if (record.key() % 2 == 0) {
context.forward(record);
}
}
};
task = createStatelessTaskWithForwardingTopology(evenKeyForwardingSourceNode);
task.initializeIfNeeded();
task.completeRestoration(noOpResetter -> {
});
final String sourceNodeName = evenKeyForwardingSourceNode.name();
final String terminalNodeName = processorStreamTime.name();
final Metric sourceAvg = getProcessorMetric("record-e2e-latency", "%s-avg", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric sourceMin = getProcessorMetric("record-e2e-latency", "%s-min", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric sourceMax = getProcessorMetric("record-e2e-latency", "%s-max", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalAvg = getProcessorMetric("record-e2e-latency", "%s-avg", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalMin = getProcessorMetric("record-e2e-latency", "%s-min", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalMax = getProcessorMetric("record-e2e-latency", "%s-max", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
// e2e latency = 10
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(0, 0L)));
task.process(10L);
assertThat(sourceAvg.metricValue(), equalTo(10.0));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(10.0));
// key 0: reaches terminal node
assertThat(terminalAvg.metricValue(), equalTo(10.0));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(10.0));
// e2e latency = 15
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(1, 0L)));
task.process(15L);
assertThat(sourceAvg.metricValue(), equalTo(12.5));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(15.0));
// key 1: stops at source, doesn't affect terminal node metrics
assertThat(terminalAvg.metricValue(), equalTo(10.0));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(10.0));
// e2e latency = 23
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(2, 0L)));
task.process(23L);
assertThat(sourceAvg.metricValue(), equalTo(16.0));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(23.0));
// key 2: reaches terminal node
assertThat(terminalAvg.metricValue(), equalTo(16.5));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(23.0));
// e2e latency = 5
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(3, 0L)));
task.process(5L);
assertThat(sourceAvg.metricValue(), equalTo(13.25));
assertThat(sourceMin.metricValue(), equalTo(5.0));
assertThat(sourceMax.metricValue(), equalTo(23.0));
// key 3: stops at source, doesn't affect terminal node metrics
assertThat(terminalAvg.metricValue(), equalTo(16.5));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(23.0));
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class MetricsIntegrationTest method checkMetricByName.
private void checkMetricByName(final List<Metric> listMetric, final String metricName, final int numMetric) {
final List<Metric> metrics = listMetric.stream().filter(m -> m.metricName().name().equals(metricName)).collect(Collectors.toList());
Assert.assertEquals("Size of metrics of type:'" + metricName + "' must be equal to " + numMetric + " but it's equal to " + metrics.size(), numMetric, metrics.size());
for (final Metric m : metrics) {
Assert.assertNotNull("Metric:'" + m.metricName() + "' must be not null", m.metricValue());
}
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class ActiveTaskCreatorTest method addMetric.
private void addMetric(final MockProducer<?, ?> producer, final String name, final double value) {
final MetricName metricName = metricName(name);
producer.setMockMetrics(metricName, new Metric() {
@Override
public MetricName metricName() {
return metricName;
}
@Override
public Object metricValue() {
return value;
}
});
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class ActiveTaskCreatorTest method shouldConstructProducerMetricsPerTask.
private void shouldConstructProducerMetricsPerTask() {
mockClientSupplier.setApplicationIdForProducer("appId");
createTasks();
final MetricName testMetricName1 = new MetricName("test_metric_1", "", "", new HashMap<>());
final Metric testMetric1 = new KafkaMetric(new Object(), testMetricName1, (Measurable) (config, now) -> 0, null, new MockTime());
mockClientSupplier.producers.get(0).setMockMetrics(testMetricName1, testMetric1);
final MetricName testMetricName2 = new MetricName("test_metric_2", "", "", new HashMap<>());
final Metric testMetric2 = new KafkaMetric(new Object(), testMetricName2, (Measurable) (config, now) -> 0, null, new MockTime());
mockClientSupplier.producers.get(0).setMockMetrics(testMetricName2, testMetric2);
final Map<MetricName, Metric> producerMetrics = activeTaskCreator.producerMetrics();
assertThat(producerMetrics, is(mkMap(mkEntry(testMetricName1, testMetric1), mkEntry(testMetricName2, testMetric2))));
}
use of org.apache.kafka.common.Metric in project kafka by apache.
the class ActiveTaskCreatorTest method shouldConstructThreadProducerMetric.
private void shouldConstructThreadProducerMetric() {
createTasks();
final MetricName testMetricName = new MetricName("test_metric", "", "", new HashMap<>());
final Metric testMetric = new KafkaMetric(new Object(), testMetricName, (Measurable) (config, now) -> 0, null, new MockTime());
mockClientSupplier.producers.get(0).setMockMetrics(testMetricName, testMetric);
assertThat(mockClientSupplier.producers.size(), is(1));
final Map<MetricName, Metric> producerMetrics = activeTaskCreator.producerMetrics();
assertThat(producerMetrics.size(), is(1));
assertThat(producerMetrics.get(testMetricName), is(testMetric));
}
Aggregations