Search in sources :

Example 1 with EntityTable

use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.

the class MetricsDataMigrator method migrateMetricsTableFromVersion26.

private void migrateMetricsTableFromVersion26(Version version) throws DataMigrationException {
    for (String scope : scopes) {
        EntityTable entityTable = new EntityTable(getOrCreateMetricsTable(String.format("%s.%s", scope, entityTableName), DatasetProperties.EMPTY));
        String scopedMetricsTableName = String.format("%s.%s", scope, metricsTableName);
        MetricsTable metricsTable = getOrCreateMetricsTable(scopedMetricsTableName, DatasetProperties.EMPTY);
        System.out.println("Migrating Metrics Data from table : " + scopedMetricsTableName);
        migrateMetricsData(entityTable, metricsTable, scope, version);
    }
}
Also used : EntityTable(co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable) MetricsTable(co.cask.cdap.data2.dataset2.lib.table.MetricsTable)

Example 2 with EntityTable

use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.

the class EntityTableTest method testGetName.

@Test
public void testGetName() throws Exception {
    InMemoryTableService.create("testGetName");
    MetricsTable table = new InMemoryMetricsTable("testGetName");
    EntityTable entityTable = new EntityTable(table);
    // Create some entities.
    for (int i = 1; i <= 10; i++) {
        Assert.assertEquals((long) i, entityTable.getId("app", "app" + i));
    }
    // Reverse lookup
    for (int i = 1; i <= 10; i++) {
        Assert.assertEquals("app" + i, entityTable.getName(i, "app"));
    }
}
Also used : InMemoryMetricsTable(co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable) MetricsTable(co.cask.cdap.data2.dataset2.lib.table.MetricsTable) InMemoryMetricsTable(co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable) Test(org.junit.Test)

Example 3 with EntityTable

use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.

the class FactTableTest method testPreSplits.

@Test
public void testPreSplits() throws Exception {
    InMemoryTableService.create("presplitEntityTable");
    InMemoryTableService.create("presplitDataTable");
    int resolution = 10;
    int rollTimebaseInterval = 2;
    InMemoryMetricsTable metricsTable = new InMemoryMetricsTable("presplitDataTable");
    FactTable table = new FactTable(metricsTable, new EntityTable(new InMemoryMetricsTable("presplitEntityTable")), resolution, rollTimebaseInterval);
    byte[][] splits = FactTable.getSplits(3);
    long ts = System.currentTimeMillis() / 1000;
    DimensionValue dimVal1 = new DimensionValue("dim1", "value1");
    DimensionValue dimVal2 = new DimensionValue("dim2", "value2");
    DimensionValue dimVal3 = new DimensionValue("dim3", "value3");
    // first agg view: dim1
    table.add(ImmutableList.of(new Fact(ts, ImmutableList.of(dimVal1), new Measurement("metric1", MeasureType.COUNTER, 1))));
    // second agg view: dim1 & dim2
    table.add(ImmutableList.of(new Fact(ts, ImmutableList.of(dimVal1, dimVal2), new Measurement("metric1", MeasureType.COUNTER, 1))));
    // third agg view: dim3
    table.add(ImmutableList.of(new Fact(ts, ImmutableList.of(dimVal3), new Measurement("metric1", MeasureType.COUNTER, 1))));
    // Verify all written records are spread across splits
    Scanner scanner = metricsTable.scan(null, null, null);
    Row row;
    Set<Integer> splitsWithRows = Sets.newHashSet();
    while ((row = scanner.next()) != null) {
        boolean added = false;
        for (int i = 0; i < splits.length; i++) {
            if (Bytes.compareTo(row.getRow(), splits[i]) < 0) {
                splitsWithRows.add(i);
                added = true;
                break;
            }
        }
        if (!added) {
            // falls into last split
            splitsWithRows.add(splits.length);
        }
    }
    Assert.assertEquals(3, splitsWithRows.size());
}
Also used : Measurement(co.cask.cdap.api.dataset.lib.cube.Measurement) Scanner(co.cask.cdap.api.dataset.table.Scanner) DimensionValue(co.cask.cdap.api.dataset.lib.cube.DimensionValue) InMemoryMetricsTable(co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable) Row(co.cask.cdap.api.dataset.table.Row) Test(org.junit.Test)

Example 4 with EntityTable

use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.

the class FactTableTest method testMaxResolution.

@Test
public void testMaxResolution() throws Exception {
    // we use Integer.MAX_VALUE as resolution to compute all-time total values
    InMemoryTableService.create("TotalsEntityTable");
    InMemoryTableService.create("TotalsDataTable");
    int resolution = Integer.MAX_VALUE;
    // should not matter when resolution is max
    int rollTimebaseInterval = 3600;
    FactTable table = new FactTable(new InMemoryMetricsTable("TotalsDataTable"), new EntityTable(new InMemoryMetricsTable("TotalsEntityTable")), resolution, rollTimebaseInterval);
    // ts is expected in seconds
    long ts = System.currentTimeMillis() / 1000;
    int count = 1000;
    for (int i = 0; i < count; i++) {
        for (int k = 0; k < 10; k++) {
            // shift one day
            writeInc(table, "metric" + k, ts + i * 60 * 60 * 24, i * k, "dim" + k, "value" + k);
        }
    }
    for (int k = 0; k < 10; k++) {
        // 0, 0 should match timestamp of all data points
        FactScan scan = new FactScan(0, 0, "metric" + k, dimValues("dim" + k, "value" + k));
        Table<String, List<DimensionValue>, List<TimeValue>> expected = HashBasedTable.create();
        expected.put("metric" + k, dimValues("dim" + k, "value" + k), ImmutableList.of(new TimeValue(0, k * count * (count - 1) / 2)));
        assertScan(table, expected, scan);
    }
}
Also used : InMemoryMetricsTable(co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) Test(org.junit.Test)

Example 5 with EntityTable

use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.

the class FactTableTest method testBasics.

@Test
public void testBasics() throws Exception {
    InMemoryTableService.create("EntityTable");
    InMemoryTableService.create("DataTable");
    int resolution = 10;
    int rollTimebaseInterval = 2;
    FactTable table = new FactTable(new InMemoryMetricsTable("DataTable"), new EntityTable(new InMemoryMetricsTable("EntityTable")), resolution, rollTimebaseInterval);
    // aligned to start of resolution bucket
    // "/1000" because time is expected to be in seconds
    long ts = ((System.currentTimeMillis() / 1000) / resolution) * resolution;
    // testing encoding with multiple dims
    List<DimensionValue> dimensionValues = ImmutableList.of(new DimensionValue("dim1", "value1"), new DimensionValue("dim2", "value2"), new DimensionValue("dim3", "value3"));
    // trying adding one by one, in same (first) time resolution bucket
    for (int i = 0; i < 5; i++) {
        for (int k = 1; k < 4; k++) {
            // note: "+i" here and below doesn't affect results, just to confirm
            // that data points are rounded to the resolution
            table.add(ImmutableList.of(new Fact(ts + i, dimensionValues, new Measurement("metric" + k, MeasureType.COUNTER, k))));
        }
    }
    // trying adding one by one, in different time resolution buckets
    for (int i = 0; i < 3; i++) {
        for (int k = 1; k < 4; k++) {
            table.add(ImmutableList.of(new Fact(ts + resolution * i + i, dimensionValues, new Measurement("metric" + k, MeasureType.COUNTER, 2 * k))));
        }
    }
    // trying adding as list
    // first incs in same (second) time resolution bucket
    List<Fact> aggs = Lists.newArrayList();
    for (int i = 0; i < 7; i++) {
        for (int k = 1; k < 4; k++) {
            aggs.add(new Fact(ts + resolution, dimensionValues, new Measurement("metric" + k, MeasureType.COUNTER, 3 * k)));
        }
    }
    // then incs in different time resolution buckets
    for (int i = 0; i < 3; i++) {
        for (int k = 1; k < 4; k++) {
            aggs.add(new Fact(ts + resolution * i, dimensionValues, new Measurement("metric" + k, MeasureType.COUNTER, 4 * k)));
        }
    }
    table.add(aggs);
    // verify each metric
    for (int k = 1; k < 4; k++) {
        FactScan scan = new FactScan(ts - 2 * resolution, ts + 3 * resolution, "metric" + k, dimensionValues);
        Table<String, List<DimensionValue>, List<TimeValue>> expected = HashBasedTable.create();
        expected.put("metric" + k, dimensionValues, ImmutableList.of(new TimeValue(ts, 11 * k), new TimeValue(ts + resolution, 27 * k), new TimeValue(ts + 2 * resolution, 6 * k)));
        assertScan(table, expected, scan);
    }
    // verify each metric within a single timeBase
    for (int k = 1; k < 4; k++) {
        FactScan scan = new FactScan(ts, ts + resolution - 1, "metric" + k, dimensionValues);
        Table<String, List<DimensionValue>, List<TimeValue>> expected = HashBasedTable.create();
        expected.put("metric" + k, dimensionValues, ImmutableList.of(new TimeValue(ts, 11 * k)));
        assertScan(table, expected, scan);
    }
    // verify all metrics with fuzzy metric in scan
    Table<String, List<DimensionValue>, List<TimeValue>> expected = HashBasedTable.create();
    for (int k = 1; k < 4; k++) {
        expected.put("metric" + k, dimensionValues, ImmutableList.of(new TimeValue(ts, 11 * k), new TimeValue(ts + resolution, 27 * k), new TimeValue(ts + 2 * resolution, 6 * k)));
    }
    // metric = null means "all"
    FactScan scan = new FactScan(ts - 2 * resolution, ts + 3 * resolution, dimensionValues);
    assertScan(table, expected, scan);
    // delete metric test
    expected.clear();
    // delete the metrics data at (timestamp + 20) resolution
    scan = new FactScan(ts + resolution * 2, ts + resolution * 3, dimensionValues);
    table.delete(scan);
    for (int k = 1; k < 4; k++) {
        expected.put("metric" + k, dimensionValues, ImmutableList.of(new TimeValue(ts, 11 * k), new TimeValue(ts + resolution, 27 * k)));
    }
    // verify deletion
    scan = new FactScan(ts - 2 * resolution, ts + 3 * resolution, dimensionValues);
    assertScan(table, expected, scan);
    // delete metrics for "metric1" at ts0 and verify deletion
    scan = new FactScan(ts, ts + 1, "metric1", dimensionValues);
    table.delete(scan);
    expected.clear();
    expected.put("metric1", dimensionValues, ImmutableList.of(new TimeValue(ts + resolution, 27)));
    scan = new FactScan(ts - 2 * resolution, ts + 3 * resolution, "metric1", dimensionValues);
    assertScan(table, expected, scan);
    // verify the next dims search
    Collection<DimensionValue> nextTags = table.findSingleDimensionValue(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "value1"), ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of(new DimensionValue("dim2", "value2")), nextTags);
    Map<String, String> slice = Maps.newHashMap();
    slice.put("dim1", null);
    nextTags = table.findSingleDimensionValue(ImmutableList.of("dim1", "dim2", "dim3"), slice, ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of(new DimensionValue("dim2", "value2")), nextTags);
    nextTags = table.findSingleDimensionValue(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "value1", "dim2", "value2"), ts, ts + 3);
    Assert.assertEquals(ImmutableSet.of(new DimensionValue("dim3", "value3")), nextTags);
    // add new dim values
    dimensionValues = ImmutableList.of(new DimensionValue("dim1", "value1"), new DimensionValue("dim2", "value5"), new DimensionValue("dim3", null));
    table.add(ImmutableList.of(new Fact(ts, dimensionValues, new Measurement("metric", MeasureType.COUNTER, 10))));
    dimensionValues = ImmutableList.of(new DimensionValue("dim1", "value1"), new DimensionValue("dim2", null), new DimensionValue("dim3", "value3"));
    table.add(ImmutableList.of(new Fact(ts, dimensionValues, new Measurement("metric", MeasureType.COUNTER, 10))));
    nextTags = table.findSingleDimensionValue(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "value1"), ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of(new DimensionValue("dim2", "value2"), new DimensionValue("dim2", "value5"), new DimensionValue("dim3", "value3")), nextTags);
    // search for metric names given dims list and verify
    Collection<String> metricNames = table.findMeasureNames(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "value1", "dim2", "value2", "dim3", "value3"), ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of("metric2", "metric3"), metricNames);
    metricNames = table.findMeasureNames(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim1", "value1"), ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of("metric", "metric2", "metric3"), metricNames);
    metricNames = table.findMeasureNames(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableMap.of("dim2", "value2"), ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of("metric2", "metric3"), metricNames);
    metricNames = table.findMeasureNames(ImmutableList.of("dim1", "dim2", "dim3"), slice, ts, ts + 1);
    Assert.assertEquals(ImmutableSet.of("metric", "metric2", "metric3"), metricNames);
}
Also used : Measurement(co.cask.cdap.api.dataset.lib.cube.Measurement) DimensionValue(co.cask.cdap.api.dataset.lib.cube.DimensionValue) InMemoryMetricsTable(co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) Test(org.junit.Test)

Aggregations

InMemoryMetricsTable (co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable)10 Test (org.junit.Test)9 MetricsTable (co.cask.cdap.data2.dataset2.lib.table.MetricsTable)7 DimensionValue (co.cask.cdap.api.dataset.lib.cube.DimensionValue)4 EntityTable (co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable)3 ImmutableList (com.google.common.collect.ImmutableList)3 List (java.util.List)3 Measurement (co.cask.cdap.api.dataset.lib.cube.Measurement)2 TimeValue (co.cask.cdap.api.dataset.lib.cube.TimeValue)2 FactTable (co.cask.cdap.data2.dataset2.lib.timeseries.FactTable)2 Row (co.cask.cdap.api.dataset.table.Row)1 Scanner (co.cask.cdap.api.dataset.table.Scanner)1 Table (co.cask.cdap.api.dataset.table.Table)1 ImmutablePair (co.cask.cdap.common.utils.ImmutablePair)1 FuzzyRowFilter (co.cask.cdap.data2.dataset2.lib.table.FuzzyRowFilter)1 ArrayList (java.util.ArrayList)1