use of org.apache.beam.sdk.metrics.MetricName in project beam by apache.
the class SparkBeamMetric method renderName.
@VisibleForTesting
String renderName(MetricResult<?> metricResult) {
String renderedStepName = metricResult.step().replaceAll(ILLEGAL_CHARACTERS_AND_PERIOD, "_");
if (renderedStepName.endsWith("_")) {
renderedStepName = renderedStepName.substring(0, renderedStepName.length() - 1);
}
MetricName metricName = metricResult.name();
return (renderedStepName + "." + metricName.namespace() + "." + metricName.name()).replaceAll(ILLEGAL_CHARACTERS, "_");
}
use of org.apache.beam.sdk.metrics.MetricName in project beam by apache.
the class KafkaIOTest method testSinkMetrics.
@Test
public void testSinkMetrics() throws Exception {
// Simply read from kafka source and write to kafka sink. Then verify the metrics are reported.
int numElements = 1000;
synchronized (MOCK_PRODUCER_LOCK) {
MOCK_PRODUCER.clear();
ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread().start();
String topic = "test";
p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()).apply("writeToKafka", KafkaIO.<Integer, Long>write().withBootstrapServers("none").withTopic(topic).withKeySerializer(IntegerSerializer.class).withValueSerializer(LongSerializer.class).withProducerFactoryFn(new ProducerFactoryFn()));
PipelineResult result = p.run();
MetricName elementsWritten = SinkMetrics.elementsWritten().getName();
MetricQueryResults metrics = result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.inNamespace(elementsWritten.namespace())).build());
assertThat(metrics.counters(), hasItem(attemptedMetricsResult(elementsWritten.namespace(), elementsWritten.name(), "writeToKafka", 1000L)));
completionThread.shutdown();
}
}
use of org.apache.beam.sdk.metrics.MetricName in project beam by apache.
the class KafkaIOTest method testUnboundedSourceMetrics.
@Test
public void testUnboundedSourceMetrics() {
int numElements = 1000;
String readStep = "readFromKafka";
p.apply(readStep, mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata());
PipelineResult result = p.run();
String splitId = "0";
MetricName elementsRead = SourceMetrics.elementsRead().getName();
MetricName elementsReadBySplit = SourceMetrics.elementsReadBySplit(splitId).getName();
MetricName bytesRead = SourceMetrics.bytesRead().getName();
MetricName bytesReadBySplit = SourceMetrics.bytesReadBySplit(splitId).getName();
MetricName backlogElementsOfSplit = SourceMetrics.backlogElementsOfSplit(splitId).getName();
MetricName backlogBytesOfSplit = SourceMetrics.backlogBytesOfSplit(splitId).getName();
MetricQueryResults metrics = result.metrics().queryMetrics(MetricsFilter.builder().build());
Iterable<MetricResult<Long>> counters = metrics.counters();
assertThat(counters, hasItem(attemptedMetricsResult(elementsRead.namespace(), elementsRead.name(), readStep, 1000L)));
assertThat(counters, hasItem(attemptedMetricsResult(elementsReadBySplit.namespace(), elementsReadBySplit.name(), readStep, 1000L)));
assertThat(counters, hasItem(attemptedMetricsResult(bytesRead.namespace(), bytesRead.name(), readStep, 12000L)));
assertThat(counters, hasItem(attemptedMetricsResult(bytesReadBySplit.namespace(), bytesReadBySplit.name(), readStep, 12000L)));
MetricQueryResults backlogElementsMetrics = result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.named(backlogElementsOfSplit.namespace(), backlogElementsOfSplit.name())).build());
// since gauge values may be inconsistent in some environments assert only on their existence.
assertThat(backlogElementsMetrics.gauges(), IsIterableWithSize.<MetricResult<GaugeResult>>iterableWithSize(1));
MetricQueryResults backlogBytesMetrics = result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.named(backlogBytesOfSplit.namespace(), backlogBytesOfSplit.name())).build());
// since gauge values may be inconsistent in some environments assert only on their existence.
assertThat(backlogBytesMetrics.gauges(), IsIterableWithSize.<MetricResult<GaugeResult>>iterableWithSize(1));
}
Aggregations