use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class ProfileServiceTest method getMetric.
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, String metricName) {
Map<String, String> tags = getMetricsTags(programRunId, profileId);
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = metricStore.query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class AbstractCubeTest method testMetricsAggregationOptionLatest.
@Test
public void testMetricsAggregationOptionLatest() throws Exception {
Aggregation agg = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1"));
int resolution = 1;
Cube cube = getCube("testAggOptionLatest", new int[] { resolution }, ImmutableMap.of("agg", agg));
Map<String, String> aggDims = new LinkedHashMap<>();
aggDims.put("dim1", "tag1");
aggDims.put("dim2", "tag2");
aggDims.put("dim3", "tag3");
// write 100 data points to agg
for (int i = 1; i <= 100; i++) {
writeGauge(cube, "metric1", i, i, aggDims);
}
// query for latest, should have the latest value for each interval, 20, 40, 60, 80, 100
CubeQuery query = new CubeQuery(null, 0, 200, 1, 5, ImmutableMap.of("metric1", AggregationFunction.SUM), aggDims, Collections.emptyList(), AggregationOption.LATEST, null);
List<TimeSeries> result = new ArrayList<>(cube.query(query));
Assert.assertEquals(1, result.size());
List<TimeValue> timeValues = result.get(0).getTimeValues();
for (int i = 0; i < timeValues.size(); i++) {
Assert.assertEquals(20 * (i + 1), timeValues.get(i).getValue());
}
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class ProgramNotificationSubscriberServiceTest method getMetric.
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, Map<String, String> additionalTags, String metricName) {
Map<String, String> tags = ImmutableMap.<String, String>builder().put(Constants.Metrics.Tag.PROFILE_SCOPE, profileId.getScope().name()).put(Constants.Metrics.Tag.PROFILE, profileId.getProfile()).put(Constants.Metrics.Tag.NAMESPACE, programRunId.getNamespace()).put(Constants.Metrics.Tag.PROGRAM_TYPE, programRunId.getType().getPrettyName()).put(Constants.Metrics.Tag.APP, programRunId.getApplication()).put(Constants.Metrics.Tag.PROGRAM, programRunId.getProgram()).putAll(additionalTags).build();
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = metricStore.query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by cdapio.
the class ProfileServiceTest method getMetric.
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, String metricName) {
Map<String, String> tags = getMetricsTags(programRunId, profileId);
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = metricStore.query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by cdapio.
the class DefaultCube method convertToQueryResult.
private Collection<TimeSeries> convertToQueryResult(CubeQuery query, Table<Map<String, String>, String, Map<Long, Long>> resultTable) {
List<TimeSeries> result = new ArrayList<>();
// iterating each groupValue dimensions
for (Map.Entry<Map<String, String>, Map<String, Map<Long, Long>>> row : resultTable.rowMap().entrySet()) {
// iterating each measure
for (Map.Entry<String, Map<Long, Long>> measureEntry : row.getValue().entrySet()) {
// generating time series for a grouping and a measure
int count = 0;
List<TimeValue> timeValues = new ArrayList<>();
for (Map.Entry<Long, Long> timeValue : measureEntry.getValue().entrySet()) {
timeValues.add(new TimeValue(timeValue.getKey(), timeValue.getValue()));
}
Collections.sort(timeValues);
List<TimeValue> resultTimeValues = new ArrayList<>();
AggregationOption aggregationOption = query.getAggregationOption();
// this should not happen in production, since the check has been made in the handler
if (query.getLimit() <= 0) {
throw new IllegalArgumentException("The query limit cannot be less than 0");
}
// option LATEST and SUM.
if (query.getLimit() < timeValues.size() && PARTITION_AGG_OPTIONS.contains(aggregationOption)) {
int partitionSize = timeValues.size() / query.getLimit();
int remainder = timeValues.size() % query.getLimit();
// ignore the first reminderth data points
for (List<TimeValue> interval : Iterables.partition(timeValues.subList(remainder, timeValues.size()), partitionSize)) {
// for LATEST we only need to get the last data point in the interval
if (aggregationOption.equals(AggregationOption.LATEST)) {
resultTimeValues.add(interval.get(interval.size() - 1));
continue;
}
// for SUM we want to sum up all the values in the interval
if (aggregationOption.equals(AggregationOption.SUM)) {
long sum = interval.stream().mapToLong(TimeValue::getValue).sum();
resultTimeValues.add(new TimeValue(interval.get(interval.size() - 1).getTimestamp(), sum));
}
}
} else {
// TODO: CDAP-15565 remove the interpolation logic since it is never maintained and adds huge complexity
PeekingIterator<TimeValue> timeValueItor = Iterators.peekingIterator(new TimeSeriesInterpolator(timeValues, query.getInterpolator(), query.getResolution()).iterator());
while (timeValueItor.hasNext()) {
TimeValue timeValue = timeValueItor.next();
resultTimeValues.add(new TimeValue(timeValue.getTimestamp(), timeValue.getValue()));
if (++count >= query.getLimit()) {
break;
}
}
}
result.add(new TimeSeries(measureEntry.getKey(), row.getKey(), resultTimeValues));
}
}
return result;
}
Aggregations