use of io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery in project cdap by caskdata.
the class AbstractCubeTest method testBasics.
@Test
public void testBasics() throws Exception {
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1", "dim2"));
Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim2"), ImmutableList.of("dim1"));
int resolution = 1;
Cube cube = getCube("myCube", new int[] { resolution }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
// write some data
// NOTE: we mostly use different ts, as we are interested in checking incs not at persist, but rather at query time
writeInc(cube, "metric1", 1, 1, "1", "1", "1");
writeInc(cube, "metric1", 1, 1, "1", "1", "1");
writeInc(cube, "metric1", 2, 2, null, "1", "1");
writeInc(cube, "metric1", 3, 3, "1", "2", "1");
writeInc(cube, "metric1", 3, 5, "1", "2", "3");
writeInc(cube, "metric1", 3, 7, "2", "1", "1");
writeInc(cube, "metric1", 4, 4, "1", null, "2");
writeInc(cube, "metric1", 5, 5, null, null, "1");
writeInc(cube, "metric1", 6, 6, "1", null, null);
writeInc(cube, "metric1", 7, 3, "1", "1", null);
// writing using BatchWritable APIs
writeIncViaBatchWritable(cube, "metric1", 8, 2, null, "1", null);
writeIncViaBatchWritable(cube, "metric1", 9, 1, null, null, null);
// writing in batch
cube.add(ImmutableList.of(getFact("metric1", 10, 2, MeasureType.COUNTER, "1", "1", "1", "1"), getFact("metric1", 11, 3, MeasureType.COUNTER, "1", "1", "1", null), getFact("metric1", 12, 4, MeasureType.COUNTER, "2", "1", "1", "1"), getFact("metric1", 13, 5, MeasureType.COUNTER, null, null, null, "1")));
writeInc(cube, "metric2", 1, 1, "1", "1", "1");
// todo: do some write instead of increments - test those as well
// now let's query!
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), ImmutableList.of("dim2"), ImmutableList.of(new TimeSeries("metric1", dimensionValues("dim2", "1"), timeValues(1, 2, 7, 3, 10, 2, 11, 3)), new TimeSeries("metric1", dimensionValues("dim2", "2"), timeValues(3, 8))));
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 10, 2, 11, 3))));
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, new HashMap<String, String>(), ImmutableList.of("dim1"), ImmutableList.of(new TimeSeries("metric1", dimensionValues("dim1", "1"), timeValues(1, 2, 3, 8, 4, 4, 6, 6, 7, 3, 10, 2, 11, 3)), new TimeSeries("metric1", dimensionValues("dim1", "2"), timeValues(3, 7, 12, 4))));
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim3", "3"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(3, 5))));
// test querying specific aggregations
verifyCountQuery(cube, "agg1", 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 3, 8, 7, 3, 10, 2, 11, 3))));
verifyCountQuery(cube, "agg2", 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 3, 8, 4, 4, 6, 6, 7, 3, 10, 2, 11, 3))));
// query with different agg functions
verifyCountQuery(cube, "agg1", 0, 15, resolution, "metric1", AggregationFunction.MAX, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 3, 5, 7, 3, 10, 2, 11, 3))));
verifyCountQuery(cube, "agg1", 0, 15, resolution, "metric1", AggregationFunction.MIN, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 3, 3, 7, 3, 10, 2, 11, 3))));
verifyCountQuery(cube, "agg1", 0, 15, resolution, "metric1", AggregationFunction.LATEST, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 2, 3, 5, 7, 3, 10, 2, 11, 3))));
// delete cube data for "metric1" for dim->1,dim2->1,dim3->1 for timestamp 1 - 8 and
// check data for other timestamp is available
Map<String, String> deleteTags = new LinkedHashMap<>();
deleteTags.put("dim1", "1");
deleteTags.put("dim2", "1");
deleteTags.put("dim3", "1");
Predicate<List<String>> predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(deleteTags.keySet())) == 0;
CubeDeleteQuery query = new CubeDeleteQuery(0, 8, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), ImmutableList.<String>of(), ImmutableList.of(new TimeSeries("metric1", new HashMap<>(), timeValues(10, 2, 11, 3))));
// delete cube data for "metric1" for dim1->1 and dim2->1 and check by scanning dim1->1 and dim2->1 is empty,
deleteTags.remove("dim3");
query = new CubeDeleteQuery(0, 15, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1"), ImmutableList.<String>of(), ImmutableList.<TimeSeries>of());
}
use of io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery in project cdap by caskdata.
the class AbstractCubeTest method testInterpolate.
@Test
public void testInterpolate() throws Exception {
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1", "dim2", "dim3"));
int resolution = 1;
Cube cube = getCube("myInterpolatedCube", new int[] { resolution }, ImmutableMap.of("agg1", agg1));
// test step interpolation
long startTs = 1;
long endTs = 10;
writeInc(cube, "metric1", startTs, 5, "1", "1", "1");
writeInc(cube, "metric1", endTs, 3, "1", "1", "1");
List<TimeValue> expectedTimeValues = Lists.newArrayList();
for (long i = startTs; i < endTs; i++) {
expectedTimeValues.add(new TimeValue(i, 5));
}
expectedTimeValues.add(new TimeValue(endTs, 3));
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), expectedTimeValues)), new Interpolators.Step());
Map<String, String> deleteTags = new LinkedHashMap<>();
deleteTags.put("dim1", "1");
deleteTags.put("dim2", "1");
deleteTags.put("dim3", "1");
Predicate<List<String>> predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(deleteTags.keySet())) == 0;
CubeDeleteQuery query = new CubeDeleteQuery(startTs, endTs, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
// test small-slope linear interpolation
startTs = 1;
endTs = 5;
writeInc(cube, "metric1", startTs, 5, "1", "1", "1");
writeInc(cube, "metric1", endTs, 3, "1", "1", "1");
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 5, 2, 5, 3, 4, 4, 4, 5, 3))), new Interpolators.Linear());
query = new CubeDeleteQuery(startTs, endTs, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
// test big-slope linear interpolation
writeInc(cube, "metric1", startTs, 100, "1", "1", "1");
writeInc(cube, "metric1", endTs, 500, "1", "1", "1");
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 100, 2, 200, 3, 300, 4, 400, 5, 500))), new Interpolators.Linear());
cube.delete(query);
// test limit on Interpolate
long limit = 20;
writeInc(cube, "metric1", 0, 10, "1", "1", "1");
writeInc(cube, "metric1", limit + 1, 50, "1", "1", "1");
expectedTimeValues.clear();
expectedTimeValues.add(new TimeValue(0, 10));
for (long i = 1; i <= limit; i++) {
expectedTimeValues.add(new TimeValue(i, 0));
}
expectedTimeValues.add(new TimeValue(limit + 1, 50));
verifyCountQuery(cube, 0, 21, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), expectedTimeValues)), new Interpolators.Step(limit));
}
use of io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery in project cdap by caskdata.
the class DefaultMetricStore method deleteMetricsBeforeTimestamp.
private void deleteMetricsBeforeTimestamp(long timestamp, int resolution) {
CubeDeleteQuery query = new CubeDeleteQuery(0, timestamp, resolution, Collections.emptyMap(), Collections.emptySet(), strings -> true);
cube.get().delete(query);
}
use of io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery in project cdap by caskdata.
the class AbstractCubeTest method testMetricDeletion.
@Test
public void testMetricDeletion() throws Exception {
// two aggregation groups with different orders
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1"));
Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim3"), ImmutableList.of("dim3"));
int resolution = 1;
Cube cube = getCube("testDeletion", new int[] { resolution }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
Map<String, String> agg1Dims = new LinkedHashMap<>();
agg1Dims.put("dim1", "1");
agg1Dims.put("dim2", "1");
agg1Dims.put("dim3", "1");
Map<String, String> agg2Dims = new LinkedHashMap<>();
agg2Dims.put("dim1", "1");
agg2Dims.put("dim3", "1");
// write some data
writeInc(cube, "metric1", 1, 1, agg1Dims);
writeInc(cube, "metric2", 3, 3, agg2Dims);
// verify data is there
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric1", new HashMap<>(), timeValues(1, 1))));
verifyCountQuery(cube, 0, 15, resolution, "metric2", AggregationFunction.SUM, agg2Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric2", new HashMap<>(), timeValues(3, 3))));
// delete metrics from agg2
Predicate<List<String>> predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(agg2Dims.keySet())) == 0;
CubeDeleteQuery query = new CubeDeleteQuery(0, 15, resolution, agg2Dims, Collections.emptySet(), predicate);
cube.delete(query);
// agg1 data should still be there
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric1", new HashMap<>(), timeValues(1, 1))));
// agg2 data should get deleted
verifyCountQuery(cube, 0, 15, resolution, "metric2", AggregationFunction.SUM, agg2Dims, ImmutableList.of(), ImmutableList.of());
// delete metrics remain for agg1
predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(agg1Dims.keySet())) == 0;
query = new CubeDeleteQuery(0, 15, resolution, agg1Dims, Collections.emptySet(), predicate);
cube.delete(query);
verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of());
}
Aggregations