use of co.cask.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.
the class DefaultCube method convertToQueryResult.
private Collection<TimeSeries> convertToQueryResult(CubeQuery query, Table<Map<String, String>, String, Map<Long, Long>> resultTable) {
List<TimeSeries> result = Lists.newArrayList();
// iterating each groupValue dimensions
for (Map.Entry<Map<String, String>, Map<String, Map<Long, Long>>> row : resultTable.rowMap().entrySet()) {
// iterating each measure
for (Map.Entry<String, Map<Long, Long>> measureEntry : row.getValue().entrySet()) {
// generating time series for a grouping and a measure
int count = 0;
List<TimeValue> timeValues = Lists.newArrayList();
for (Map.Entry<Long, Long> timeValue : measureEntry.getValue().entrySet()) {
timeValues.add(new TimeValue(timeValue.getKey(), timeValue.getValue()));
}
Collections.sort(timeValues);
PeekingIterator<TimeValue> timeValueItor = Iterators.peekingIterator(new TimeSeriesInterpolator(timeValues, query.getInterpolator(), query.getResolution()).iterator());
List<TimeValue> resultTimeValues = Lists.newArrayList();
while (timeValueItor.hasNext()) {
TimeValue timeValue = timeValueItor.next();
resultTimeValues.add(new TimeValue(timeValue.getTimestamp(), timeValue.getValue()));
if (++count >= query.getLimit()) {
break;
}
}
result.add(new TimeSeries(measureEntry.getKey(), row.getKey(), resultTimeValues));
}
}
return result;
}
use of co.cask.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.
the class DefaultCube method query.
@Override
public Collection<TimeSeries> query(CubeQuery query) {
/*
CubeQuery example: "dataset read ops for app per dataset". Or:
SELECT count('read.ops') << measure name and type
FROM aggregation1.1min_resolution << aggregation and resolution
GROUP BY dataset, << groupByDimensions
WHERE namespace='ns1' AND app='myApp' AND program='myFlow' AND << dimensionValues
ts>=1423370200 AND ts{@literal<}1423398198 << startTs and endTs
LIMIT 100 << limit
Execution:
1) (optional, if aggregation to query in is not provided) find aggregation to supply results
Here, we need aggregation that has following dimensions: 'namespace', 'app', 'program', 'dataset'.
Ideally (to reduce the scan range), 'dataset' should be in the end, other dimensions as close to the beginning
as possible, and minimal number of other "unspecified" dimensions.
Let's say we found aggregation: 'namespace', 'app', 'program', 'instance', 'dataset'
2) build a scan in the aggregation
For scan we set "any" into the dimension values that aggregation has but query doesn't define value for:
'namespace'='ns1', 'app'='myApp', 'program'='myFlow', 'instance'=*, 'dataset'=*
Plus specified measure & aggregation?:
'measureName'='read.ops'
'measureType'='COUNTER'
3) While scanning build a table: dimension values -> time -> value. Use measureType as values aggregate
function if needed.
*/
incrementMetric("cube.query.request.count", 1);
if (!resolutionToFactTable.containsKey(query.getResolution())) {
incrementMetric("cube.query.request.failure.count", 1);
throw new IllegalArgumentException("There's no data aggregated for specified resolution to satisfy the query: " + query.toString());
}
// 1) find aggregation to query
Aggregation agg;
String aggName;
if (query.getAggregation() != null) {
aggName = query.getAggregation();
agg = aggregations.get(query.getAggregation());
if (agg == null) {
incrementMetric("cube.query.request.failure.count", 1);
throw new IllegalArgumentException(String.format("Specified aggregation %s is not found in cube aggregations: %s", query.getAggregation(), aggregations.keySet().toString()));
}
} else {
ImmutablePair<String, Aggregation> aggregation = findAggregation(query);
if (aggregation == null) {
incrementMetric("cube.query.request.failure.count", 1);
throw new IllegalArgumentException("There's no data aggregated for specified dimensions " + "to satisfy the query: " + query.toString());
}
agg = aggregation.getSecond();
aggName = aggregation.getFirst();
}
// tell how many queries end up querying specific pre-aggregated views and resolutions
incrementMetric("cube.query.agg." + aggName + ".count", 1);
incrementMetric("cube.query.res." + query.getResolution() + ".count", 1);
// 2) build a scan for a query
List<DimensionValue> dimensionValues = Lists.newArrayList();
for (String dimensionName : agg.getDimensionNames()) {
// if not defined in query, will be set as null, which means "any"
dimensionValues.add(new DimensionValue(dimensionName, query.getDimensionValues().get(dimensionName)));
}
FactScan scan = new FactScan(query.getStartTs(), query.getEndTs(), query.getMeasurements().keySet(), dimensionValues);
// 3) execute scan query
FactTable table = resolutionToFactTable.get(query.getResolution());
FactScanner scanner = table.scan(scan);
Table<Map<String, String>, String, Map<Long, Long>> resultMap = getTimeSeries(query, scanner);
incrementMetric("cube.query.request.success.count", 1);
incrementMetric("cube.query.result.size", resultMap.size());
Collection<TimeSeries> timeSeries = convertToQueryResult(query, resultMap);
incrementMetric("cube.query.result.timeseries.count", timeSeries.size());
return timeSeries;
}
use of co.cask.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.
the class AbstractCubeTest method verifyCountQuery.
private void verifyCountQuery(Cube cube, String aggregation, long startTs, long endTs, int resolution, String measureName, AggregationFunction aggFunction, Map<String, String> dimValues, List<String> groupByDims, Collection<TimeSeries> expected, Interpolator interpolator) throws Exception {
CubeQuery query = CubeQuery.builder().select().measurement(measureName, aggFunction).from(aggregation).resolution(resolution, TimeUnit.SECONDS).where().dimensions(dimValues).timeRange(startTs, endTs).groupBy().dimensions(groupByDims).limit(Integer.MAX_VALUE).interpolator(interpolator).build();
Collection<TimeSeries> result = cube.query(query);
Assert.assertEquals(String.format("expected: %s, found: %s", expected, result), expected.size(), result.size());
Assert.assertTrue(String.format("expected: %s, found: %s", expected, result), expected.containsAll(result));
}
use of co.cask.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.
the class AbstractCubeTest method testIncrements.
@Test
public void testIncrements() throws Exception {
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1"));
Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim2"));
int res1 = 1;
int res100 = 100;
Cube cube = getCube("myIncCube", new int[] { res1, res100 }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
// write some data
writeInc(cube, "metric1", 1, 1, "1", "1");
writeInc(cube, "metric1", 1, 2, "2", "1");
writeInc(cube, "metric1", 1, 3, "1", "2");
writeInc(cube, "metric2", 1, 4, "1", "1");
writeInc(cube, "metric1", 1, 5, "1", "2");
writeInc(cube, "metric1", 10, 6, "1", "1");
writeInc(cube, "metric1", 101, 7, "1", "1");
// now let's query!
verifyCountQuery(cube, "agg1", 0, 150, res1, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 9, 10, 6, 101, 7))));
verifyCountQuery(cube, "agg1", 0, 150, res100, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 15, 100, 7))));
verifyCountQuery(cube, "agg2", 0, 150, res1, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 1, 10, 6, 101, 7))));
verifyCountQuery(cube, "agg2", 0, 150, res100, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 7, 100, 7))));
}
use of co.cask.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.
the class AbstractCubeTest method testGauges.
@Test
public void testGauges() throws Exception {
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1"));
Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim2"));
int res1 = 1;
int res100 = 100;
Cube cube = getCube("myGaugeCube", new int[] { res1, res100 }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
// write some data
writeGauge(cube, "metric1", 1, 1, "1", "1");
writeGauge(cube, "metric1", 1, 2, "2", "1");
writeGauge(cube, "metric1", 1, 3, "1", "2");
writeGauge(cube, "metric2", 1, 4, "1", "1");
writeGauge(cube, "metric1", 1, 5, "1", "2");
writeGauge(cube, "metric1", 10, 6, "1", "1");
writeGauge(cube, "metric1", 101, 7, "1", "1");
// now let's query!
verifyCountQuery(cube, "agg1", 0, 150, res1, "metric1", AggregationFunction.LATEST, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 5, 10, 6, 101, 7))));
verifyCountQuery(cube, "agg1", 0, 150, res100, "metric1", AggregationFunction.LATEST, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 6, 100, 7))));
verifyCountQuery(cube, "agg2", 0, 150, res1, "metric1", AggregationFunction.LATEST, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 1, 10, 6, 101, 7))));
verifyCountQuery(cube, "agg2", 0, 150, res100, "metric1", AggregationFunction.LATEST, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 6, 100, 7))));
}
Aggregations