use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class TestFrameworkTestRun method verifyMapperJobOutput.
private void verifyMapperJobOutput(Class<?> appClass, DataSetManager<KeyValueTable> outTableManager) throws Exception {
KeyValueTable outputTable = outTableManager.get();
Assert.assertEquals("world", Bytes.toString(outputTable.read("hello")));
// Verify dataset metrics
String readCountName = "system." + Constants.Metrics.Name.Dataset.READ_COUNT;
String writeCountName = "system." + Constants.Metrics.Name.Dataset.WRITE_COUNT;
Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000, Integer.MAX_VALUE, ImmutableMap.of(readCountName, AggregationFunction.SUM, writeCountName, AggregationFunction.SUM), ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, appClass.getSimpleName(), Constants.Metrics.Tag.MAPREDUCE, DatasetWithMRApp.MAPREDUCE_PROGRAM), ImmutableList.<String>of()));
// Transform the collection of metrics into a map from metrics name to aggregated sum
Map<String, Long> aggs = Maps.transformEntries(Maps.uniqueIndex(metrics, new Function<MetricTimeSeries, String>() {
@Override
public String apply(MetricTimeSeries input) {
return input.getMetricName();
}
}), new Maps.EntryTransformer<String, MetricTimeSeries, Long>() {
@Override
public Long transformEntry(String key, MetricTimeSeries value) {
Preconditions.checkArgument(value.getTimeValues().size() == 1, "Expected one value for aggregated sum for metrics %s", key);
return value.getTimeValues().get(0).getValue();
}
});
Assert.assertEquals(Long.valueOf(1), aggs.get(readCountName));
Assert.assertEquals(Long.valueOf(1), aggs.get(writeCountName));
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class MetricsAdminSubscriberServiceTest method test.
@Test
public void test() throws Exception {
MetricsAdminSubscriberService adminService = injector.getInstance(MetricsAdminSubscriberService.class);
adminService.startAndWait();
// publish a metrics
MetricsContext metricsContext = metricsCollectionService.getContext(Collections.singletonMap(Constants.Metrics.Tag.NAMESPACE, NamespaceId.SYSTEM.getNamespace()));
metricsContext.increment("test.increment", 10L);
metricsContext.gauge("test.gauge", 20L);
MetricsSystemClient systemClient = injector.getInstance(RemoteMetricsSystemClient.class);
// Search for metrics names
Tasks.waitFor(true, () -> {
Set<String> names = new HashSet<>(systemClient.search(metricsContext.getTags()));
return names.contains("system.test.increment") && names.contains("system.test.gauge");
}, 10, TimeUnit.SECONDS, 1, TimeUnit.SECONDS);
// Query for metrics values
Tasks.waitFor(true, () -> {
Collection<MetricTimeSeries> values = systemClient.query(metricsContext.getTags(), Arrays.asList("system.test.increment", "system.test.gauge"));
// Find and match the values for the increment and gauge
boolean incMatched = values.stream().filter(timeSeries -> timeSeries.getMetricName().equals("system.test.increment")).flatMap(timeSeries -> timeSeries.getTimeValues().stream()).findFirst().filter(timeValue -> timeValue.getValue() == 10L).isPresent();
boolean gaugeMatched = values.stream().filter(timeSeries -> timeSeries.getMetricName().equals("system.test.gauge")).flatMap(timeSeries -> timeSeries.getTimeValues().stream()).findFirst().filter(timeValue -> timeValue.getValue() == 20L).isPresent();
return incMatched && gaugeMatched;
}, 10, TimeUnit.SECONDS, 1, TimeUnit.SECONDS);
// Emit more metrics
metricsContext.increment("test.increment", 40L);
metricsContext.gauge("test.gauge", 40L);
// Query for metrics values. Should see the latest aggregates
Tasks.waitFor(true, () -> {
Collection<MetricTimeSeries> values = systemClient.query(metricsContext.getTags(), Arrays.asList("system.test.increment", "system.test.gauge"));
// Find and match the values for the increment and gauge
boolean incMatched = values.stream().filter(timeSeries -> timeSeries.getMetricName().equals("system.test.increment")).flatMap(timeSeries -> timeSeries.getTimeValues().stream()).findFirst().filter(timeValue -> timeValue.getValue() == 50L).isPresent();
boolean gaugeMatched = values.stream().filter(timeSeries -> timeSeries.getMetricName().equals("system.test.gauge")).flatMap(timeSeries -> timeSeries.getTimeValues().stream()).findFirst().filter(timeValue -> timeValue.getValue() == 40L).isPresent();
return incMatched && gaugeMatched;
}, 10, TimeUnit.SECONDS, 1, TimeUnit.SECONDS);
// Delete the increment metrics
systemClient.delete(new MetricDeleteQuery(0, Integer.MAX_VALUE, Collections.emptySet(), metricsContext.getTags(), new ArrayList<>(metricsContext.getTags().keySet())));
Tasks.waitFor(true, () -> {
Collection<MetricTimeSeries> values = systemClient.query(metricsContext.getTags(), Arrays.asList("system.test.increment", "system.test.gauge"));
// increment should be missing
boolean foundInc = values.stream().anyMatch(timeSeries -> timeSeries.getMetricName().equals("system.test.increment"));
// Find and match the values for gauge
boolean foundGauge = values.stream().anyMatch(timeSeries -> timeSeries.getMetricName().equals("system.test.gauge"));
return !foundInc && !foundGauge;
}, 1000, TimeUnit.SECONDS, 1, TimeUnit.SECONDS);
adminService.stopAndWait();
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class MetricsProcessorServiceTest method assertMetricsResult.
private void assertMetricsResult(MetricStore metricStore, Map<String, String> metricsContext, Map<String, Long> expected) {
for (Map.Entry<String, Long> metric : expected.entrySet()) {
Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metric.getKey(), AggregationFunction.SUM, metricsContext, ImmutableList.<String>of()));
MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
List<TimeValue> timeValues = timeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
Assert.assertEquals(String.format("Actual value of metric: %s does not match expected", metric.getKey()), metric.getValue().longValue(), timeValue.getValue());
}
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandler method getSparkDetails.
private Map<String, Long> getSparkDetails(ProgramId sparkProgram, String runId) throws IOException {
Map<String, String> context = new HashMap<>();
context.put(Constants.Metrics.Tag.NAMESPACE, sparkProgram.getNamespace());
context.put(Constants.Metrics.Tag.APP, sparkProgram.getApplication());
context.put(Constants.Metrics.Tag.SPARK, sparkProgram.getProgram());
context.put(Constants.Metrics.Tag.RUN_ID, runId);
Collection<String> metricNames = metricsSystemClient.search(context);
Collection<MetricTimeSeries> queryResult = metricsSystemClient.query(context, metricNames);
Map<String, Long> overallResult = new HashMap<>();
for (MetricTimeSeries timeSeries : queryResult) {
overallResult.put(timeSeries.getMetricName(), timeSeries.getTimeValues().get(0).getValue());
}
return overallResult;
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class ProfileMetricServiceTest method getMetric.
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, String metricName) {
Map<String, String> tags = ImmutableMap.<String, String>builder().put(Constants.Metrics.Tag.PROFILE_SCOPE, profileId.getScope().name()).put(Constants.Metrics.Tag.PROFILE, profileId.getProfile()).put(Constants.Metrics.Tag.NAMESPACE, programRunId.getNamespace()).put(Constants.Metrics.Tag.PROGRAM_TYPE, programRunId.getType().getPrettyName()).put(Constants.Metrics.Tag.APP, programRunId.getApplication()).put(Constants.Metrics.Tag.PROGRAM, programRunId.getProgram()).build();
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = metricStore.query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
Aggregations