use of io.cdap.cdap.api.dataset.lib.cube.AggregationOption in project cdap by caskdata.
the class MetricsQueryHelper method setTimeRangeInQueryRequest.
private void setTimeRangeInQueryRequest(MetricQueryRequest request, Map<String, List<String>> queryTimeParams) {
Long start = queryTimeParams.containsKey(PARAM_START_TIME) ? TimeMathParser.parseTimeInSeconds(queryTimeParams.get(PARAM_START_TIME).get(0)) : null;
Long end = queryTimeParams.containsKey(PARAM_END_TIME) ? TimeMathParser.parseTimeInSeconds(queryTimeParams.get(PARAM_END_TIME).get(0)) : null;
Integer count = null;
AggregationOption aggregationOption = queryTimeParams.containsKey(PARAM_AGGREGATE) ? AggregationOption.valueOf(queryTimeParams.get(PARAM_AGGREGATE).get(0).toUpperCase()) : AggregationOption.FALSE;
boolean aggregate = aggregationOption.equals(AggregationOption.TRUE) || ((start == null) && (end == null));
Integer resolution = queryTimeParams.containsKey(PARAM_RESOLUTION) ? getResolution(queryTimeParams.get(PARAM_RESOLUTION).get(0), start, end) : getResolution(null, start, end);
Interpolator interpolator = null;
if (queryTimeParams.containsKey(PARAM_INTERPOLATE)) {
long timeLimit = queryTimeParams.containsKey(PARAM_MAX_INTERPOLATE_GAP) ? Long.parseLong(queryTimeParams.get(PARAM_MAX_INTERPOLATE_GAP).get(0)) : Long.MAX_VALUE;
interpolator = getInterpolator(queryTimeParams.get(PARAM_INTERPOLATE).get(0), timeLimit);
}
if (queryTimeParams.containsKey(PARAM_COUNT)) {
count = Integer.valueOf(queryTimeParams.get(PARAM_COUNT).get(0));
if (start == null && end != null) {
start = end - count * resolution;
} else if (start != null && end == null) {
end = start + count * resolution;
}
} else if (start != null && end != null) {
count = (int) (((end / resolution * resolution) - (start / resolution * resolution)) / resolution + 1);
} else if (!aggregate) {
throw new IllegalArgumentException("At least two of count/start/end parameters " + "are required for time-range queries ");
}
if (aggregate) {
request.setTimeRange(0L, 0L, 1, Integer.MAX_VALUE, null, aggregationOption);
} else {
request.setTimeRange(start, end, count, resolution, interpolator, aggregationOption);
}
}
use of io.cdap.cdap.api.dataset.lib.cube.AggregationOption in project cdap by caskdata.
the class MetricsQueryHelper method executeQuery.
private MetricQueryResult executeQuery(MetricQueryRequest queryRequest) throws Exception {
if (queryRequest.getMetrics().size() == 0) {
throw new IllegalArgumentException("Missing metrics parameter in the query");
}
MetricQueryRequest.TimeRange timeRange = queryRequest.getTimeRange();
AggregationOption aggregation = timeRange.getAggregation();
if (timeRange.getCount() <= 0) {
throw new IllegalArgumentException("Invalid metrics aggregation request, the limit must be greater than 0");
}
Map<String, String> tagsSliceBy = humanToTagNames(transformTagMap(queryRequest.getTags()));
MetricDataQuery query = new MetricDataQuery(timeRange.getStart(), timeRange.getEnd(), timeRange.getResolutionInSeconds(), timeRange.getCount(), toMetrics(queryRequest.getMetrics()), tagsSliceBy, transformGroupByTags(queryRequest.getGroupBy()), aggregation, timeRange.getInterpolate());
Collection<MetricTimeSeries> queryResult = metricStore.query(query);
long endTime = timeRange.getEnd();
if (timeRange.getResolutionInSeconds() == Integer.MAX_VALUE && endTime == 0) {
// for aggregate query, we set the end time to be query time (current time)
endTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
}
return decorate(queryResult, timeRange.getStart(), endTime, timeRange.getResolutionInSeconds());
}
use of io.cdap.cdap.api.dataset.lib.cube.AggregationOption in project cdap by caskdata.
the class DefaultCube method convertToQueryResult.
private Collection<TimeSeries> convertToQueryResult(CubeQuery query, Table<Map<String, String>, String, Map<Long, Long>> resultTable) {
List<TimeSeries> result = new ArrayList<>();
// iterating each groupValue dimensions
for (Map.Entry<Map<String, String>, Map<String, Map<Long, Long>>> row : resultTable.rowMap().entrySet()) {
// iterating each measure
for (Map.Entry<String, Map<Long, Long>> measureEntry : row.getValue().entrySet()) {
// generating time series for a grouping and a measure
int count = 0;
List<TimeValue> timeValues = new ArrayList<>();
for (Map.Entry<Long, Long> timeValue : measureEntry.getValue().entrySet()) {
timeValues.add(new TimeValue(timeValue.getKey(), timeValue.getValue()));
}
Collections.sort(timeValues);
List<TimeValue> resultTimeValues = new ArrayList<>();
AggregationOption aggregationOption = query.getAggregationOption();
// this should not happen in production, since the check has been made in the handler
if (query.getLimit() <= 0) {
throw new IllegalArgumentException("The query limit cannot be less than 0");
}
// option LATEST and SUM.
if (query.getLimit() < timeValues.size() && PARTITION_AGG_OPTIONS.contains(aggregationOption)) {
int partitionSize = timeValues.size() / query.getLimit();
int remainder = timeValues.size() % query.getLimit();
// ignore the first reminderth data points
for (List<TimeValue> interval : Iterables.partition(timeValues.subList(remainder, timeValues.size()), partitionSize)) {
// for LATEST we only need to get the last data point in the interval
if (aggregationOption.equals(AggregationOption.LATEST)) {
resultTimeValues.add(interval.get(interval.size() - 1));
continue;
}
// for SUM we want to sum up all the values in the interval
if (aggregationOption.equals(AggregationOption.SUM)) {
long sum = interval.stream().mapToLong(TimeValue::getValue).sum();
resultTimeValues.add(new TimeValue(interval.get(interval.size() - 1).getTimestamp(), sum));
}
}
} else {
// TODO: CDAP-15565 remove the interpolation logic since it is never maintained and adds huge complexity
PeekingIterator<TimeValue> timeValueItor = Iterators.peekingIterator(new TimeSeriesInterpolator(timeValues, query.getInterpolator(), query.getResolution()).iterator());
while (timeValueItor.hasNext()) {
TimeValue timeValue = timeValueItor.next();
resultTimeValues.add(new TimeValue(timeValue.getTimestamp(), timeValue.getValue()));
if (++count >= query.getLimit()) {
break;
}
}
}
result.add(new TimeSeries(measureEntry.getKey(), row.getKey(), resultTimeValues));
}
}
return result;
}
Aggregations