Search in sources :

Example 51 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetastoreTransformer method testLocationBlank.

@Test
public void testLocationBlank() throws Exception {
    Table tbl = new TableBuilder().setTableName("locationBlank").setCols(new ArrayList<FieldSchema>()).setLocation("").build(conf);
    // expected to execute the operation without any exceptions
    client.createTable(tbl);
    Table tbl2 = client.getTable(tbl.getDbName(), tbl.getTableName().toLowerCase());
    assertEquals("locationblank", new File(tbl2.getSd().getLocation()).getName());
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) File(java.io.File) Test(org.junit.Test)

Example 52 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestObjectStore method createPartitionedTable.

/**
 * Creates DB1 database, TABLE1 table with 3 partitions.
 * @param withPrivileges Should we create privileges as well
 * @param withStatistics Should we create statitics as well
 */
private void createPartitionedTable(boolean withPrivileges, boolean withStatistics) throws Exception {
    Database db1 = new DatabaseBuilder().setName(DB1).setDescription("description").setLocation("locationurl").build(conf);
    try (AutoCloseable c = deadline()) {
        objectStore.createDatabase(db1);
    }
    Table tbl1 = new TableBuilder().setDbName(DB1).setTableName(TABLE1).addCol("test_col1", "int").addCol("test_col2", "int").addPartCol("test_part_col", "int").addCol("test_bucket_col", "int", "test bucket col comment").addCol("test_skewed_col", "int", "test skewed col comment").addCol("test_sort_col", "int", "test sort col comment").build(conf);
    try (AutoCloseable c = deadline()) {
        objectStore.createTable(tbl1);
    }
    PrivilegeBag privilegeBag = new PrivilegeBag();
    // Create partitions for the partitioned table
    for (int i = 0; i < 3; i++) {
        Partition part = new PartitionBuilder().inTable(tbl1).addValue("a" + i).addSerdeParam("serdeParam", "serdeParamValue").addStorageDescriptorParam("sdParam", "sdParamValue").addBucketCol("test_bucket_col").addSkewedColName("test_skewed_col").addSortCol("test_sort_col", 1).build(conf);
        try (AutoCloseable c = deadline()) {
            objectStore.addPartition(part);
        }
        if (withPrivileges) {
            HiveObjectRef partitionReference = new HiveObjectRefBuilder().buildPartitionReference(part);
            HiveObjectRef partitionColumnReference = new HiveObjectRefBuilder().buildPartitionColumnReference(tbl1, "test_part_col", part.getValues());
            PrivilegeGrantInfo privilegeGrantInfo = new PrivilegeGrantInfoBuilder().setPrivilege("a").build();
            HiveObjectPrivilege partitionPriv = new HiveObjectPrivilegeBuilder().setHiveObjectRef(partitionReference).setPrincipleName("a").setPrincipalType(PrincipalType.USER).setGrantInfo(privilegeGrantInfo).build();
            privilegeBag.addToPrivileges(partitionPriv);
            HiveObjectPrivilege partitionColPriv = new HiveObjectPrivilegeBuilder().setHiveObjectRef(partitionColumnReference).setPrincipleName("a").setPrincipalType(PrincipalType.USER).setGrantInfo(privilegeGrantInfo).build();
            privilegeBag.addToPrivileges(partitionColPriv);
        }
        if (withStatistics) {
            ColumnStatistics stats = new ColumnStatistics();
            ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
            desc.setCatName(tbl1.getCatName());
            desc.setDbName(tbl1.getDbName());
            desc.setTableName(tbl1.getTableName());
            desc.setPartName("test_part_col=a" + i);
            stats.setStatsDesc(desc);
            List<ColumnStatisticsObj> statsObjList = new ArrayList<>(1);
            stats.setStatsObj(statsObjList);
            stats.setEngine(ENGINE);
            ColumnStatisticsData data = new ColumnStatisticsData();
            LongColumnStatsData longStats = new LongColumnStatsData();
            longStats.setNumNulls(1);
            longStats.setNumDVs(2);
            longStats.setLowValue(3);
            longStats.setHighValue(4);
            data.setLongStats(longStats);
            ColumnStatisticsObj partStats = new ColumnStatisticsObj("test_part_col", "int", data);
            statsObjList.add(partStats);
            try (AutoCloseable c = deadline()) {
                objectStore.updatePartitionColumnStatistics(stats, part.getValues(), null, -1);
            }
        }
    }
    if (withPrivileges) {
        try (AutoCloseable c = deadline()) {
            objectStore.grantPrivileges(privilegeBag);
        }
    }
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TestHiveMetaStore.createSourceTable(org.apache.hadoop.hive.metastore.TestHiveMetaStore.createSourceTable) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) HiveObjectPrivilegeBuilder(org.apache.hadoop.hive.metastore.client.builder.HiveObjectPrivilegeBuilder) ArrayList(java.util.ArrayList) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) HiveObjectRefBuilder(org.apache.hadoop.hive.metastore.client.builder.HiveObjectRefBuilder) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) Database(org.apache.hadoop.hive.metastore.api.Database) PrivilegeGrantInfoBuilder(org.apache.hadoop.hive.metastore.client.builder.PrivilegeGrantInfoBuilder) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)

Example 53 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestPartitionManagement method createMetadata.

private List<String> createMetadata(String catName, String dbName, String tableName, List<String> partKeys, List<String> partKeyTypes, List<List<String>> partVals, Map<String, Column> colMap, boolean isOrc) throws TException {
    if (!DEFAULT_CATALOG_NAME.equals(catName)) {
        Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
        client.createCatalog(cat);
    }
    Database db;
    if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
        DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
        dbBuilder.setCatalogName(catName);
        db = dbBuilder.create(client, conf);
    } else {
        db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
    }
    TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
    if (isOrc) {
        tb.setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
    }
    for (Column col : colMap.values()) {
        tb.addCol(col.colName, col.colType);
    }
    if (partKeys != null) {
        if (partKeyTypes == null) {
            throw new IllegalArgumentException("partKeyTypes cannot be null when partKeys is non-null");
        }
        if (partKeys.size() != partKeyTypes.size()) {
            throw new IllegalArgumentException("partKeys and partKeyTypes size should be same");
        }
        if (partVals.isEmpty()) {
            throw new IllegalArgumentException("partVals cannot be empty for patitioned table");
        }
        for (int i = 0; i < partKeys.size(); i++) {
            tb.addPartCol(partKeys.get(i), partKeyTypes.get(i));
        }
    }
    Table table = tb.create(client, conf);
    if (partKeys != null) {
        for (List<String> partVal : partVals) {
            new PartitionBuilder().inTable(table).setValues(partVal).addToTable(client, conf);
        }
    }
    List<String> partNames = new ArrayList<>();
    if (partKeys != null) {
        for (int i = 0; i < partKeys.size(); i++) {
            String partKey = partKeys.get(i);
            for (String partVal : partVals.get(i)) {
                String partName = partKey + "=" + partVal;
                partNames.add(partName);
            }
        }
    }
    client.flushCache();
    return partNames;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 54 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestStats method createMetadata.

private List<String> createMetadata(String catName, String dbName, String tableName, String partKey, List<String> partVals, Map<String, Column> colMap) throws TException {
    if (!DEFAULT_CATALOG_NAME.equals(catName) && !NO_CAT.equals(catName)) {
        Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
        client.createCatalog(cat);
    }
    Database db;
    if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
        DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
        if (!NO_CAT.equals(catName))
            dbBuilder.setCatalogName(catName);
        db = dbBuilder.create(client, conf);
    } else {
        db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
    }
    TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
    for (Column col : colMap.values()) tb.addCol(col.colName, col.colType);
    if (partKey != null) {
        assert partVals != null && !partVals.isEmpty() : "Must provide partition values for partitioned table";
        tb.addPartCol(partKey, ColumnType.STRING_TYPE_NAME);
    }
    Table table = tb.create(client, conf);
    if (partKey != null) {
        for (String partVal : partVals) {
            new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
        }
    }
    SetPartitionsStatsRequest rqst = new SetPartitionsStatsRequest();
    List<String> partNames = new ArrayList<>();
    if (partKey == null) {
        rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, null, colMap.values()));
    } else {
        for (String partVal : partVals) {
            String partName = partKey + "=" + partVal;
            rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, partName, colMap.values()));
            partNames.add(partName);
        }
    }
    rqst.setEngine(ENGINE);
    client.setPartitionColumnStatistics(rqst);
    return partNames;
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) SetPartitionsStatsRequest(org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog)

Example 55 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHmsServerAuthorization method creatEnv.

/**
 * This is called in each test after the configuration is set in each test case.
 * @throws Exception
 */
protected void creatEnv(Configuration conf) throws Exception {
    client.dropDatabase(dbName1, true, true, true);
    client.dropDatabase(dbName2, true, true, true);
    Database db1 = new DatabaseBuilder().setName(dbName1).setCatalogName(Warehouse.DEFAULT_CATALOG_NAME).create(client, conf);
    Database db2 = new DatabaseBuilder().setName(dbName2).setCatalogName(Warehouse.DEFAULT_CATALOG_NAME).create(client, conf);
    new TableBuilder().setDbName(dbName1).setTableName(TAB1).addCol("id", "int").addCol("name", "string").create(client, conf);
    Table tab2 = new TableBuilder().setDbName(dbName1).setTableName(TAB2).addCol("id", "int").addPartCol("name", "string").create(client, conf);
    new PartitionBuilder().inTable(tab2).addValue("value1").addToTable(client, conf);
    new PartitionBuilder().inTable(tab2).addValue("value2").addToTable(client, conf);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15