Search in sources :

Example 91 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project presto by prestodb.

the class BridgingHiveMetastore method renameColumn.

@Override
public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) {
    Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(databaseName, tableName);
    if (!source.isPresent()) {
        throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
    }
    org.apache.hadoop.hive.metastore.api.Table table = source.get();
    for (FieldSchema fieldSchema : table.getPartitionKeys()) {
        if (fieldSchema.getName().equals(oldColumnName)) {
            throw new PrestoException(NOT_SUPPORTED, "Renaming partition columns is not supported");
        }
    }
    for (FieldSchema fieldSchema : table.getSd().getCols()) {
        if (fieldSchema.getName().equals(oldColumnName)) {
            fieldSchema.setName(newColumnName);
        }
    }
    alterTable(databaseName, tableName, table);
}
Also used : MetastoreUtil.toMetastoreApiTable(com.facebook.presto.hive.metastore.MetastoreUtil.toMetastoreApiTable) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) PrestoException(com.facebook.presto.spi.PrestoException) SchemaTableName(com.facebook.presto.spi.SchemaTableName) TableNotFoundException(com.facebook.presto.spi.TableNotFoundException)

Example 92 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project drill by apache.

the class DrillHiveTable method getRowType.

@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
    List<RelDataType> typeList = Lists.newArrayList();
    List<String> fieldNameList = Lists.newArrayList();
    List<FieldSchema> hiveFields = hiveTable.getColumnListsCache().getColumns(0);
    for (FieldSchema hiveField : hiveFields) {
        fieldNameList.add(hiveField.getName());
        typeList.add(getNullableRelDataTypeFromHiveType(typeFactory, TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType())));
    }
    for (FieldSchema field : hiveTable.getPartitionKeys()) {
        fieldNameList.add(field.getName());
        typeList.add(getNullableRelDataTypeFromHiveType(typeFactory, TypeInfoUtils.getTypeInfoFromTypeString(field.getType())));
    }
    return typeFactory.createStructType(typeList, fieldNameList);
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) RelDataType(org.apache.calcite.rel.type.RelDataType)

Example 93 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project drill by apache.

the class TestColumnListCache method testPartitionColumnCaching.

@Test
public void testPartitionColumnCaching() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    // sum of all indexes from cache
    int indexSum = cache.addOrGet(columns);
    indexSum += cache.addOrGet(columns);
    List<FieldSchema> sameColumns = Lists.newArrayList(columns);
    indexSum += cache.addOrGet(sameColumns);
    List<FieldSchema> otherColumns = Lists.newArrayList();
    otherColumns.add(new FieldSchema("f3", "int", null));
    otherColumns.add(new FieldSchema("f4", "int", null));
    // sum of all indexes from cache
    int secondIndexSum = cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    List<FieldSchema> sameOtherColumns = Lists.newArrayList();
    sameOtherColumns.add(new FieldSchema("f3", "int", null));
    sameOtherColumns.add(new FieldSchema("f4", "int", null));
    secondIndexSum += cache.addOrGet(sameOtherColumns);
    secondIndexSum += cache.addOrGet(Lists.newArrayList(sameOtherColumns));
    secondIndexSum += cache.addOrGet(otherColumns);
    secondIndexSum += cache.addOrGet(otherColumns);
    indexSum += cache.addOrGet(sameColumns);
    indexSum += cache.addOrGet(columns);
    // added only two kinds of column lists
    assertNull(cache.getColumns(3));
    // sum of the indices of the first column list
    assertEquals(0, indexSum);
    assertEquals(6, secondIndexSum);
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) Test(org.junit.Test)

Example 94 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project drill by apache.

the class TestColumnListCache method testTableColumnsIndex.

@Test
public void testTableColumnsIndex() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    assertEquals(0, cache.addOrGet(columns));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) Test(org.junit.Test)

Example 95 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project drill by apache.

the class TestColumnListCache method testPartitionColumnListAccess.

@Test
public void testPartitionColumnListAccess() {
    ColumnListsCache cache = new ColumnListsCache();
    List<FieldSchema> columns = Lists.newArrayList();
    columns.add(new FieldSchema("f1", "int", null));
    columns.add(new FieldSchema("f2", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    columns.add(new FieldSchema("f3", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    columns.add(new FieldSchema("f4", "int", null));
    cache.addOrGet(columns);
    cache.addOrGet(columns);
    assertEquals(columns, cache.getColumns(2));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnListsCache(org.apache.drill.exec.store.hive.ColumnListsCache) Test(org.junit.Test)

Aggregations

FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)407 ArrayList (java.util.ArrayList)254 Table (org.apache.hadoop.hive.metastore.api.Table)163 Test (org.junit.Test)160 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)136 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)118 Partition (org.apache.hadoop.hive.metastore.api.Partition)93 HashMap (java.util.HashMap)69 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)44 List (java.util.List)42 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)42 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)40 Database (org.apache.hadoop.hive.metastore.api.Database)40 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)39 IOException (java.io.IOException)36 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)36 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)35 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)34 Path (org.apache.hadoop.fs.Path)32 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)32