use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.
the class CubeDatasetDefinition method getDataset.
@Override
public CubeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
MetricsTable entityTable = metricsTableDef.getDataset(datasetContext, spec.getSpecification("entity"), arguments, classLoader);
int[] resolutions = getResolutions(spec.getProperties());
Map<Integer, Table> resolutionTables = Maps.newHashMap();
for (int resolution : resolutions) {
resolutionTables.put(resolution, tableDef.getDataset(datasetContext, spec.getSpecification(String.valueOf(resolution)), arguments, classLoader));
}
Map<String, Aggregation> aggregations = getAggregations(spec.getProperties());
return new CubeDataset(spec.getName(), entityTable, resolutionTables, aggregations);
}
use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.
the class DefaultCubeTest method getCube.
@Override
protected Cube getCube(final String name, int[] resolutions, Map<String, ? extends Aggregation> aggregations) {
FactTableSupplier supplier = new FactTableSupplier() {
@Override
public FactTable get(int resolution, int rollTime) {
String entityTableName = "EntityTable-" + name;
InMemoryTableService.create(entityTableName);
String dataTableName = "DataTable-" + name + "-" + resolution;
InMemoryTableService.create(dataTableName);
return new FactTable(new InMemoryMetricsTable(dataTableName), new EntityTable(new InMemoryMetricsTable(entityTableName)), resolution, rollTime);
}
};
return new DefaultCube(resolutions, supplier, aggregations, ImmutableMap.<String, AggregationAlias>of());
}
use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.
the class MetricsDataMigrator method migrateMetricsTableFromVersion27.
private void migrateMetricsTableFromVersion27(Version version) throws DataMigrationException {
EntityTable entityTable = new EntityTable(getOrCreateMetricsTable(entityTableName, DatasetProperties.EMPTY));
MetricsTable metricsTable = getOrCreateMetricsTable(metricsTableName, DatasetProperties.EMPTY);
System.out.println("Migrating Metrics Data from table : " + metricsTableName);
migrateMetricsData(entityTable, metricsTable, null, version);
}
use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.
the class EntityTableTest method testGetId.
@Test
public void testGetId() throws Exception {
InMemoryTableService.create("testGetId");
MetricsTable table = new InMemoryMetricsTable("testGetId");
EntityTable entityTable = new EntityTable(table);
// Make sure it is created sequentially
for (int i = 1; i <= 10; i++) {
Assert.assertEquals((long) i, entityTable.getId("app", "app" + i));
}
// It should get the same value (from cache)
for (int i = 1; i <= 10; i++) {
Assert.assertEquals((long) i, entityTable.getId("app", "app" + i));
}
// Construct another entityTable, it should load from storage.
entityTable = new EntityTable(table);
for (int i = 1; i <= 10; i++) {
Assert.assertEquals((long) i, entityTable.getId("app", "app" + i));
}
// ID for different type should have ID starts from 1 again.
for (int i = 1; i <= 10; i++) {
Assert.assertEquals((long) i, entityTable.getId("flow", "flow" + i));
}
}
use of co.cask.cdap.data2.dataset2.lib.timeseries.EntityTable in project cdap by caskdata.
the class EntityTableTest method testRecycleAfterMaxId.
@Test
public void testRecycleAfterMaxId() throws Exception {
InMemoryTableService.create("testRecycleId");
MetricsTable table = new InMemoryMetricsTable("testRecycleId");
EntityTable entityTable = new EntityTable(table, 101);
// only have 100 entries as maxId.
for (long i = 1; i <= 500; i++) {
entityTable.getId("app", "app" + i);
}
// we call getName for the 100 entries, it will be the latest entries 401-500
for (long i = 1; i <= 100; i++) {
Assert.assertEquals("app" + String.valueOf(400 + i), entityTable.getName(i, "app"));
}
}
Aggregations