Search in sources :

Example 11 with TimeSeries

use of io.cdap.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.

the class AbstractCubeTest method testMetricsAggregationOptionLatest.

@Test
public void testMetricsAggregationOptionLatest() throws Exception {
    Aggregation agg = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1"));
    int resolution = 1;
    Cube cube = getCube("testAggOptionLatest", new int[] { resolution }, ImmutableMap.of("agg", agg));
    Map<String, String> aggDims = new LinkedHashMap<>();
    aggDims.put("dim1", "tag1");
    aggDims.put("dim2", "tag2");
    aggDims.put("dim3", "tag3");
    // write 100 data points to agg
    for (int i = 1; i <= 100; i++) {
        writeGauge(cube, "metric1", i, i, aggDims);
    }
    // query for latest, should have the latest value for each interval, 20, 40, 60, 80, 100
    CubeQuery query = new CubeQuery(null, 0, 200, 1, 5, ImmutableMap.of("metric1", AggregationFunction.SUM), aggDims, Collections.emptyList(), AggregationOption.LATEST, null);
    List<TimeSeries> result = new ArrayList<>(cube.query(query));
    Assert.assertEquals(1, result.size());
    List<TimeValue> timeValues = result.get(0).getTimeValues();
    for (int i = 0; i < timeValues.size(); i++) {
        Assert.assertEquals(20 * (i + 1), timeValues.get(i).getValue());
    }
}
Also used : TimeSeries(io.cdap.cdap.api.dataset.lib.cube.TimeSeries) ArrayList(java.util.ArrayList) CubeQuery(io.cdap.cdap.api.dataset.lib.cube.CubeQuery) LinkedHashMap(java.util.LinkedHashMap) Cube(io.cdap.cdap.api.dataset.lib.cube.Cube) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue) Test(org.junit.Test)

Example 12 with TimeSeries

use of io.cdap.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.

the class AbstractCubeTest method verifyCountQuery.

private void verifyCountQuery(Cube cube, String aggregation, long startTs, long endTs, int resolution, String measureName, AggregationFunction aggFunction, Map<String, String> dimValues, List<String> groupByDims, Collection<TimeSeries> expected, Interpolator interpolator) throws Exception {
    CubeQuery query = CubeQuery.builder().select().measurement(measureName, aggFunction).from(aggregation).resolution(resolution, TimeUnit.SECONDS).where().dimensions(dimValues).timeRange(startTs, endTs).groupBy().dimensions(groupByDims).limit(Integer.MAX_VALUE).interpolator(interpolator).build();
    Collection<TimeSeries> result = cube.query(query);
    Assert.assertEquals(String.format("expected: %s, found: %s", expected, result), expected.size(), result.size());
    Assert.assertTrue(String.format("expected: %s, found: %s", expected, result), expected.containsAll(result));
}
Also used : TimeSeries(io.cdap.cdap.api.dataset.lib.cube.TimeSeries) CubeQuery(io.cdap.cdap.api.dataset.lib.cube.CubeQuery)

Example 13 with TimeSeries

use of io.cdap.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.

the class AbstractCubeTest method testMetricDeletion.

@Test
public void testMetricDeletion() throws Exception {
    // two aggregation groups with different orders
    Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1"));
    Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim3"), ImmutableList.of("dim3"));
    int resolution = 1;
    Cube cube = getCube("testDeletion", new int[] { resolution }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
    Map<String, String> agg1Dims = new LinkedHashMap<>();
    agg1Dims.put("dim1", "1");
    agg1Dims.put("dim2", "1");
    agg1Dims.put("dim3", "1");
    Map<String, String> agg2Dims = new LinkedHashMap<>();
    agg2Dims.put("dim1", "1");
    agg2Dims.put("dim3", "1");
    // write some data
    writeInc(cube, "metric1", 1, 1, agg1Dims);
    writeInc(cube, "metric2", 3, 3, agg2Dims);
    // verify data is there
    verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric1", new HashMap<>(), timeValues(1, 1))));
    verifyCountQuery(cube, 0, 15, resolution, "metric2", AggregationFunction.SUM, agg2Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric2", new HashMap<>(), timeValues(3, 3))));
    // delete metrics from agg2
    Predicate<List<String>> predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(agg2Dims.keySet())) == 0;
    CubeDeleteQuery query = new CubeDeleteQuery(0, 15, resolution, agg2Dims, Collections.emptySet(), predicate);
    cube.delete(query);
    // agg1 data should still be there
    verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of(new TimeSeries("metric1", new HashMap<>(), timeValues(1, 1))));
    // agg2 data should get deleted
    verifyCountQuery(cube, 0, 15, resolution, "metric2", AggregationFunction.SUM, agg2Dims, ImmutableList.of(), ImmutableList.of());
    // delete metrics remain for agg1
    predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(agg1Dims.keySet())) == 0;
    query = new CubeDeleteQuery(0, 15, resolution, agg1Dims, Collections.emptySet(), predicate);
    cube.delete(query);
    verifyCountQuery(cube, 0, 15, resolution, "metric1", AggregationFunction.SUM, agg1Dims, ImmutableList.of(), ImmutableList.of());
}
Also used : CubeDeleteQuery(io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) Interpolators(io.cdap.cdap.api.dataset.lib.cube.Interpolators) Cube(io.cdap.cdap.api.dataset.lib.cube.Cube) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) Collection(java.util.Collection) AggregationOption(io.cdap.cdap.api.dataset.lib.cube.AggregationOption) CubeQuery(io.cdap.cdap.api.dataset.lib.cube.CubeQuery) CubeFact(io.cdap.cdap.api.dataset.lib.cube.CubeFact) Test(org.junit.Test) TimeSeries(io.cdap.cdap.api.dataset.lib.cube.TimeSeries) Maps(com.google.common.collect.Maps) Interpolator(io.cdap.cdap.api.dataset.lib.cube.Interpolator) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) AggregationFunction(io.cdap.cdap.api.dataset.lib.cube.AggregationFunction) MeasureType(io.cdap.cdap.api.dataset.lib.cube.MeasureType) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue) Assert(org.junit.Assert) Collections(java.util.Collections) TimeSeries(io.cdap.cdap.api.dataset.lib.cube.TimeSeries) CubeDeleteQuery(io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Cube(io.cdap.cdap.api.dataset.lib.cube.Cube) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) Test(org.junit.Test)

Example 14 with TimeSeries

use of io.cdap.cdap.api.dataset.lib.cube.TimeSeries in project cdap by caskdata.

the class AbstractCubeTest method testIncrements.

@Test
public void testIncrements() throws Exception {
    Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1"));
    Aggregation agg2 = new DefaultAggregation(ImmutableList.of("dim1", "dim2"));
    int res1 = 1;
    int res100 = 100;
    Cube cube = getCube("myIncCube", new int[] { res1, res100 }, ImmutableMap.of("agg1", agg1, "agg2", agg2));
    // write some data
    writeInc(cube, "metric1", 1, 1, "1", "1");
    writeInc(cube, "metric1", 1, 2, "2", "1");
    writeInc(cube, "metric1", 1, 3, "1", "2");
    writeInc(cube, "metric2", 1, 4, "1", "1");
    writeInc(cube, "metric1", 1, 5, "1", "2");
    writeInc(cube, "metric1", 10, 6, "1", "1");
    writeInc(cube, "metric1", 101, 7, "1", "1");
    // now let's query!
    verifyCountQuery(cube, "agg1", 0, 150, res1, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 9, 10, 6, 101, 7))));
    verifyCountQuery(cube, "agg1", 0, 150, res100, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 15, 100, 7))));
    verifyCountQuery(cube, "agg2", 0, 150, res1, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 1, 10, 6, 101, 7))));
    verifyCountQuery(cube, "agg2", 0, 150, res100, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(0, 7, 100, 7))));
}
Also used : TimeSeries(io.cdap.cdap.api.dataset.lib.cube.TimeSeries) Cube(io.cdap.cdap.api.dataset.lib.cube.Cube) Test(org.junit.Test)

Aggregations

TimeSeries (io.cdap.cdap.api.dataset.lib.cube.TimeSeries)14 Test (org.junit.Test)9 Cube (io.cdap.cdap.api.dataset.lib.cube.Cube)8 CubeQuery (io.cdap.cdap.api.dataset.lib.cube.CubeQuery)7 ArrayList (java.util.ArrayList)7 LinkedHashMap (java.util.LinkedHashMap)7 TimeValue (io.cdap.cdap.api.dataset.lib.cube.TimeValue)6 HashMap (java.util.HashMap)5 Map (java.util.Map)5 AggregationOption (io.cdap.cdap.api.dataset.lib.cube.AggregationOption)4 CubeFact (io.cdap.cdap.api.dataset.lib.cube.CubeFact)4 ImmutableList (com.google.common.collect.ImmutableList)3 ImmutableMap (com.google.common.collect.ImmutableMap)3 Lists (com.google.common.collect.Lists)3 Maps (com.google.common.collect.Maps)3 AggregationFunction (io.cdap.cdap.api.dataset.lib.cube.AggregationFunction)3 CubeDeleteQuery (io.cdap.cdap.api.dataset.lib.cube.CubeDeleteQuery)3 Interpolator (io.cdap.cdap.api.dataset.lib.cube.Interpolator)3 Interpolators (io.cdap.cdap.api.dataset.lib.cube.Interpolators)3 MeasureType (io.cdap.cdap.api.dataset.lib.cube.MeasureType)3