use of org.apache.beam.sdk.metrics.GaugeResult in project beam by apache.
the class FlinkMetricContainer method updateGauge.
private void updateGauge(Iterable<MetricResult<GaugeResult>> gauges) {
for (MetricResult<GaugeResult> metricResult : gauges) {
String flinkMetricName = getFlinkMetricNameString(GAUGE_PREFIX, metricResult);
GaugeResult update = metricResult.attempted();
// update flink metric
FlinkGauge gauge = flinkGaugeCache.get(flinkMetricName);
if (gauge == null) {
gauge = runtimeContext.getMetricGroup().gauge(flinkMetricName, new FlinkGauge(update));
flinkGaugeCache.put(flinkMetricName, gauge);
} else {
gauge.update(update);
}
}
}
use of org.apache.beam.sdk.metrics.GaugeResult in project beam by apache.
the class KafkaIOTest method testUnboundedSourceMetrics.
@Test
public void testUnboundedSourceMetrics() {
int numElements = 1000;
String readStep = "readFromKafka";
p.apply(readStep, mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata());
PipelineResult result = p.run();
String splitId = "0";
MetricName elementsRead = SourceMetrics.elementsRead().getName();
MetricName elementsReadBySplit = SourceMetrics.elementsReadBySplit(splitId).getName();
MetricName bytesRead = SourceMetrics.bytesRead().getName();
MetricName bytesReadBySplit = SourceMetrics.bytesReadBySplit(splitId).getName();
MetricName backlogElementsOfSplit = SourceMetrics.backlogElementsOfSplit(splitId).getName();
MetricName backlogBytesOfSplit = SourceMetrics.backlogBytesOfSplit(splitId).getName();
MetricQueryResults metrics = result.metrics().queryMetrics(MetricsFilter.builder().build());
Iterable<MetricResult<Long>> counters = metrics.counters();
assertThat(counters, hasItem(attemptedMetricsResult(elementsRead.namespace(), elementsRead.name(), readStep, 1000L)));
assertThat(counters, hasItem(attemptedMetricsResult(elementsReadBySplit.namespace(), elementsReadBySplit.name(), readStep, 1000L)));
assertThat(counters, hasItem(attemptedMetricsResult(bytesRead.namespace(), bytesRead.name(), readStep, 12000L)));
assertThat(counters, hasItem(attemptedMetricsResult(bytesReadBySplit.namespace(), bytesReadBySplit.name(), readStep, 12000L)));
MetricQueryResults backlogElementsMetrics = result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.named(backlogElementsOfSplit.namespace(), backlogElementsOfSplit.name())).build());
// since gauge values may be inconsistent in some environments assert only on their existence.
assertThat(backlogElementsMetrics.gauges(), IsIterableWithSize.<MetricResult<GaugeResult>>iterableWithSize(1));
MetricQueryResults backlogBytesMetrics = result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.named(backlogBytesOfSplit.namespace(), backlogBytesOfSplit.name())).build());
// since gauge values may be inconsistent in some environments assert only on their existence.
assertThat(backlogBytesMetrics.gauges(), IsIterableWithSize.<MetricResult<GaugeResult>>iterableWithSize(1));
}
use of org.apache.beam.sdk.metrics.GaugeResult in project beam by apache.
the class SparkBeamMetric method renderAll.
Map<String, ?> renderAll() {
Map<String, Object> metrics = new HashMap<>();
MetricResults metricResults = asAttemptedOnlyMetricResults(MetricsAccumulator.getInstance().value());
MetricQueryResults metricQueryResults = metricResults.queryMetrics(MetricsFilter.builder().build());
for (MetricResult<Long> metricResult : metricQueryResults.counters()) {
metrics.put(renderName(metricResult), metricResult.attempted());
}
for (MetricResult<DistributionResult> metricResult : metricQueryResults.distributions()) {
DistributionResult result = metricResult.attempted();
metrics.put(renderName(metricResult) + ".count", result.count());
metrics.put(renderName(metricResult) + ".sum", result.sum());
metrics.put(renderName(metricResult) + ".min", result.min());
metrics.put(renderName(metricResult) + ".max", result.max());
metrics.put(renderName(metricResult) + ".mean", result.mean());
}
for (MetricResult<GaugeResult> metricResult : metricQueryResults.gauges()) {
metrics.put(renderName(metricResult), metricResult.attempted().value());
}
return metrics;
}
Aggregations