use of org.apache.beam.sdk.metrics.DistributionResult in project flink by apache.
the class FlinkMetricContainerTest method testDistributionMonitoringInfoUpdate.
@Test
public void testDistributionMonitoringInfoUpdate() {
MonitoringInfo userMonitoringInfo = new SimpleMonitoringInfoBuilder().setUrn(MonitoringInfoConstants.Urns.USER_DISTRIBUTION_INT64).setLabel(MonitoringInfoConstants.Labels.NAMESPACE, DEFAULT_NAMESPACE).setLabel(MonitoringInfoConstants.Labels.NAME, "myDistribution").setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "anyPTransform").setInt64DistributionValue(DistributionData.create(30, 10, 1, 5)).build();
container.updateMetrics("step", ImmutableList.of(userMonitoringInfo));
// The one Flink distribution that gets created is a FlinkDistributionGauge; here we verify
// its initial (and in this test, final) value
verify(metricGroup).gauge(eq("myDistribution"), argThat((ArgumentMatcher<FlinkMetricContainer.FlinkDistributionGauge>) argument -> {
DistributionResult actual = argument.getValue();
DistributionResult expected = DistributionResult.create(30, 10, 1, 5);
return actual.equals(expected);
}));
}
use of org.apache.beam.sdk.metrics.DistributionResult in project beam by apache.
the class SparkBeamMetric method renderAll.
Map<String, ?> renderAll() {
Map<String, Object> metrics = new HashMap<>();
MetricResults metricResults = asAttemptedOnlyMetricResults(MetricsAccumulator.getInstance().value());
MetricQueryResults metricQueryResults = metricResults.allMetrics();
for (MetricResult<Long> metricResult : metricQueryResults.getCounters()) {
metrics.put(renderName(metricResult), metricResult.getAttempted());
}
for (MetricResult<DistributionResult> metricResult : metricQueryResults.getDistributions()) {
DistributionResult result = metricResult.getAttempted();
metrics.put(renderName(metricResult) + ".count", result.getCount());
metrics.put(renderName(metricResult) + ".sum", result.getSum());
metrics.put(renderName(metricResult) + ".min", result.getMin());
metrics.put(renderName(metricResult) + ".max", result.getMax());
metrics.put(renderName(metricResult) + ".mean", result.getMean());
}
for (MetricResult<GaugeResult> metricResult : metricQueryResults.getGauges()) {
metrics.put(renderName(metricResult), metricResult.getAttempted().getValue());
}
return metrics;
}
use of org.apache.beam.sdk.metrics.DistributionResult in project beam by apache.
the class SparkBeamMetric method renderAll.
static Map<String, ?> renderAll(MetricResults metricResults) {
Map<String, Object> metrics = new HashMap<>();
MetricQueryResults metricQueryResults = metricResults.allMetrics();
for (MetricResult<Long> metricResult : metricQueryResults.getCounters()) {
metrics.put(renderName(metricResult), metricResult.getAttempted());
}
for (MetricResult<DistributionResult> metricResult : metricQueryResults.getDistributions()) {
DistributionResult result = metricResult.getAttempted();
metrics.put(renderName(metricResult) + ".count", result.getCount());
metrics.put(renderName(metricResult) + ".sum", result.getSum());
metrics.put(renderName(metricResult) + ".min", result.getMin());
metrics.put(renderName(metricResult) + ".max", result.getMax());
metrics.put(renderName(metricResult) + ".mean", result.getMean());
}
for (MetricResult<GaugeResult> metricResult : metricQueryResults.getGauges()) {
metrics.put(renderName(metricResult), metricResult.getAttempted().getValue());
}
return metrics;
}
use of org.apache.beam.sdk.metrics.DistributionResult in project beam by apache.
the class MetricsGraphiteSink method writeMetrics.
@Override
public void writeMetrics(MetricQueryResults metricQueryResults) throws Exception {
final long metricTimestamp = System.currentTimeMillis() / 1000L;
Socket socket = new Socket(InetAddress.getByName(address), port);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), charset));
StringBuilder messagePayload = new StringBuilder();
Iterable<MetricResult<Long>> counters = metricQueryResults.getCounters();
Iterable<MetricResult<GaugeResult>> gauges = metricQueryResults.getGauges();
Iterable<MetricResult<DistributionResult>> distributions = metricQueryResults.getDistributions();
for (MetricResult<Long> counter : counters) {
messagePayload.append(new CounterMetricMessage(counter, "value", metricTimestamp).toString());
}
for (MetricResult<GaugeResult> gauge : gauges) {
messagePayload.append(new GaugeMetricMessage(gauge, "value").toString());
}
for (MetricResult<DistributionResult> distribution : distributions) {
messagePayload.append(new DistributionMetricMessage(distribution, "min", metricTimestamp).toString());
messagePayload.append(new DistributionMetricMessage(distribution, "max", metricTimestamp).toString());
messagePayload.append(new DistributionMetricMessage(distribution, "count", metricTimestamp).toString());
messagePayload.append(new DistributionMetricMessage(distribution, "sum", metricTimestamp).toString());
messagePayload.append(new DistributionMetricMessage(distribution, "mean", metricTimestamp).toString());
}
writer.write(messagePayload.toString());
writer.flush();
writer.close();
socket.close();
}
use of org.apache.beam.sdk.metrics.DistributionResult in project beam by apache.
the class PortableMetrics method convertDistributionMonitoringInfoToDistribution.
private static MetricResult<DistributionResult> convertDistributionMonitoringInfoToDistribution(MetricsApi.MonitoringInfo monitoringInfo) {
Map<String, String> labelsMap = monitoringInfo.getLabelsMap();
MetricKey key = MetricKey.create(labelsMap.get(STEP_NAME_LABEL), MetricName.named(labelsMap.get(NAMESPACE_LABEL), labelsMap.get(METRIC_NAME_LABEL)));
DistributionData data = decodeInt64Distribution(monitoringInfo.getPayload());
DistributionResult result = DistributionResult.create(data.sum(), data.count(), data.min(), data.max());
return MetricResult.create(key, false, result);
}
Aggregations