Search in sources :

Example 6 with ColumnListsCache

use of org.apache.drill.exec.store.hive.ColumnListsCache in project drill by apache.

the class TestColumnListCache method testPartitionColumnListAccess.

@Test
public void testPartitionColumnListAccess() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    columns.add(new FieldSchema("f3", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    columns.add(new FieldSchema("f4", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    assertEquals(columns, cache.getColumns(2));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) BaseTest(org.apache.drill.test.BaseTest) SlowTest(org.apache.drill.categories.SlowTest) Test(org.junit.Test)

Example 7 with ColumnListsCache

use of org.apache.drill.exec.store.hive.ColumnListsCache in project drill by apache.

the class TestColumnListCache method testPartitionColumnCaching.

@Test
public void testPartitionColumnCaching() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    // sum of all indexes from cache
    int indexSum = cache.addOrGet(columns);
    indexSum += cache.addOrGet(columns);
    List<FieldSchema> sameColumns = Lists.newArrayList(columns);
    indexSum += cache.addOrGet(sameColumns);
    List<FieldSchema> otherColumns = Lists.newArrayList();
    otherColumns.add(new FieldSchema("f3", "int", null));
    otherColumns.add(new FieldSchema("f4", "int", null));
    // sum of all indexes from cache
    int secondIndexSum = cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    List<FieldSchema> sameOtherColumns = Lists.newArrayList();
    sameOtherColumns.add(new FieldSchema("f3", "int", null));
    sameOtherColumns.add(new FieldSchema("f4", "int", null));
    secondIndexSum += cache.addOrGet(sameOtherColumns);
    secondIndexSum += cache.addOrGet(Lists.newArrayList(sameOtherColumns));
    secondIndexSum += cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    indexSum += cache.addOrGet(sameColumns);
    indexSum += cache.addOrGet(columns);
    // added only two kinds of column lists
    assertNull(cache.getColumns(3));
    // sum of the indices of the first column list
    assertEquals(0, indexSum);
    assertEquals(6, secondIndexSum);
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) BaseTest(org.apache.drill.test.BaseTest) SlowTest(org.apache.drill.categories.SlowTest) Test(org.junit.Test)

Example 8 with ColumnListsCache

use of org.apache.drill.exec.store.hive.ColumnListsCache in project drill by axbaretto.

the class TestColumnListCache method testPartitionColumnCaching.

@Test
public void testPartitionColumnCaching() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    // sum of all indexes from cache
    int indexSum = cache.addOrGet(columns);
    indexSum += cache.addOrGet(columns);
    List<FieldSchema> sameColumns = Lists.newArrayList(columns);
    indexSum += cache.addOrGet(sameColumns);
    List<FieldSchema> otherColumns = Lists.newArrayList();
    otherColumns.add(new FieldSchema("f3", "int", null));
    otherColumns.add(new FieldSchema("f4", "int", null));
    // sum of all indexes from cache
    int secondIndexSum = cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    List<FieldSchema> sameOtherColumns = Lists.newArrayList();
    sameOtherColumns.add(new FieldSchema("f3", "int", null));
    sameOtherColumns.add(new FieldSchema("f4", "int", null));
    secondIndexSum += cache.addOrGet(sameOtherColumns);
    secondIndexSum += cache.addOrGet(Lists.newArrayList(sameOtherColumns));
    secondIndexSum += cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    indexSum += cache.addOrGet(sameColumns);
    indexSum += cache.addOrGet(columns);
    // added only two kinds of column lists
    assertNull(cache.getColumns(3));
    // sum of the indices of the first column list
    assertEquals(0, indexSum);
    assertEquals(6, secondIndexSum);
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) SlowTest(org.apache.drill.categories.SlowTest) Test(org.junit.Test)

Example 9 with ColumnListsCache

use of org.apache.drill.exec.store.hive.ColumnListsCache in project drill by axbaretto.

the class TestColumnListCache method testPartitionColumnsIndex.

@Test
public void testPartitionColumnsIndex() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    cache.addOrGet(columns);
    columns.add(new FieldSchema("f3", "int", null));
    assertEquals(1, cache.addOrGet(columns));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) SlowTest(org.apache.drill.categories.SlowTest) Test(org.junit.Test)

Example 10 with ColumnListsCache

use of org.apache.drill.exec.store.hive.ColumnListsCache in project drill by apache.

the class TableEntryCacheLoader method load.

@Override
@SuppressWarnings("NullableProblems")
public HiveReadEntry load(TableName key) throws Exception {
    Table table;
    List<Partition> partitions;
    synchronized (client) {
        table = getTable(key);
        partitions = getPartitions(key);
    }
    HiveTableWithColumnCache hiveTable = new HiveTableWithColumnCache(table, new ColumnListsCache(table));
    List<HiveTableWrapper.HivePartitionWrapper> partitionWrappers = getPartitionWrappers(partitions, hiveTable);
    return new HiveReadEntry(new HiveTableWrapper(hiveTable), partitionWrappers);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) HiveReadEntry(org.apache.drill.exec.store.hive.HiveReadEntry) Table(org.apache.hadoop.hive.metastore.api.Table) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) HiveTableWithColumnCache(org.apache.drill.exec.store.hive.HiveTableWithColumnCache) HiveTableWrapper(org.apache.drill.exec.store.hive.HiveTableWrapper)

Aggregations

ColumnListsCache (org.apache.drill.exec.store.hive.ColumnListsCache)11 SlowTest (org.apache.drill.categories.SlowTest)10 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)10 Test (org.junit.Test)10 BaseTest (org.apache.drill.test.BaseTest)5 HiveReadEntry (org.apache.drill.exec.store.hive.HiveReadEntry)1 HiveTableWithColumnCache (org.apache.drill.exec.store.hive.HiveTableWithColumnCache)1 HiveTableWrapper (org.apache.drill.exec.store.hive.HiveTableWrapper)1 Partition (org.apache.hadoop.hive.metastore.api.Partition)1 Table (org.apache.hadoop.hive.metastore.api.Table)1