Search in sources :

Example 11 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestSessionHiveMetastoreClientListPartitionsTempTable method createTestTable.

@Override
protected Table createTestTable(IMetaStoreClient client, String dbName, String tableName, List<String> partCols, boolean setPartitionLevelPrivileges) throws TException {
    TableBuilder builder = new TableBuilder().setDbName(dbName).setTableName(tableName).addCol("id", "int").addCol("name", "string").setTemporary(true);
    partCols.forEach(col -> builder.addPartCol(col, "string"));
    Table table = builder.build(conf);
    if (setPartitionLevelPrivileges) {
        table.putToParameters(PART_PRIV, "true");
    }
    client.createTable(table);
    return table;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Example 12 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestSessionHiveMetastoreClientAppendPartitionTempTable method createTable.

@Override
protected Table createTable(String tableName, List<FieldSchema> partCols, Map<String, String> tableParams, String tableType, String location) throws Exception {
    TableBuilder builder = new TableBuilder().setDbName(DB_NAME).setTableName(tableName).addCol("test_id", "int", "test col id").addCol("test_value", "string", "test col value").setPartCols(partCols).setType(tableType).setLocation(location).setTemporary(true);
    if (tableParams != null) {
        builder.setTableParams(tableParams);
    }
    builder.create(getClient(), conf);
    return getClient().getTable(DB_NAME, tableName);
}
Also used : TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Example 13 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestSessionHiveMetastoreClientExchangePartitionsTempTable method createNonTempTable.

private Table createNonTempTable(String dbName, String tableName, List<FieldSchema> partCols, String location) throws Exception {
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("test_id", INT_COL_TYPE, "test col id"));
    cols.add(new FieldSchema("test_value", "string", "test col value"));
    new TableBuilder().setDbName(dbName).setTableName(tableName).setCols(cols).setPartCols(partCols).setLocation(location).setTemporary(false).create(getClient(), getMetaStore().getConf());
    return getClient().getTable(dbName, tableName);
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Example 14 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestTablesCreateDropAlterTruncate method moveTablesBetweenCatalogsOnAlter.

@Test(expected = InvalidOperationException.class)
public void moveTablesBetweenCatalogsOnAlter() throws TException {
    String catName = "move_table_between_catalogs_on_alter";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "a_db";
    // For this one don't specify a location to make sure it gets put in the catalog directory
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    String tableName = "non_movable_table";
    Table before = new TableBuilder().inDb(db).setTableName(tableName).addCol("col1", ColumnType.STRING_TYPE_NAME).addCol("col2", ColumnType.INT_TYPE_NAME).create(client, metaStore.getConf());
    Table after = before.deepCopy();
    after.setCatName(DEFAULT_CATALOG_NAME);
    client.alter_table(catName, dbName, tableName, after);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) TestHiveMetaStore.createSourceTable(org.apache.hadoop.hive.metastore.TestHiveMetaStore.createSourceTable) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 15 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestTablesCreateDropAlterTruncate method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE)) {
        client.dropTable(DEFAULT_DATABASE, tableName, true, true, true);
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    testTables[0] = new TableBuilder().setTableName("test_table").addCol("test_col", "int").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setTableName("test_view").addCol("test_col", "int").setType("VIRTUAL_VIEW").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().setTableName("test_table_to_find_1").addCol("test_col", "int").create(client, metaStore.getConf());
    testTables[3] = new TableBuilder().setTableName("test_partitioned_table").addCol("test_col1", "int").addCol("test_col2", "int").addPartCol("test_part_col", "int").create(client, metaStore.getConf());
    testTables[4] = new TableBuilder().setTableName("external_table_for_test").addCol("test_col", "int").setLocation(metaStore.getExternalWarehouseRoot() + "/external/table_dir").addTableParam("EXTERNAL", "TRUE").setType("EXTERNAL_TABLE").create(client, metaStore.getConf());
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    testTables[5] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table").addCol("test_col", "int").create(client, metaStore.getConf());
    // Create partitions for the partitioned table
    for (int i = 0; i < 2; i++) {
        new PartitionBuilder().inTable(testTables[3]).addValue("a" + i).addToTable(client, metaStore.getConf());
    }
    // Add an external partition too
    new PartitionBuilder().inTable(testTables[3]).addValue("a2").setLocation(metaStore.getWarehouseRoot() + "/external/a2").addToTable(client, metaStore.getConf());
    // Add data files to the partitioned table
    List<Partition> partitions = client.listPartitions(testTables[3].getDbName(), testTables[3].getTableName(), (short) -1);
    for (Partition partition : partitions) {
        Path dataFile = new Path(partition.getSd().getLocation() + "/dataFile");
        metaStore.createFile(dataFile, "100");
    }
    // Reload tables from the MetaStore, and create data files
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getDbName(), testTables[i].getTableName());
        if (testTables[i].getPartitionKeys().isEmpty()) {
            if (testTables[i].getSd().getLocation() != null) {
                Path dataFile = new Path(testTables[i].getSd().getLocation() + "/dataFile");
                metaStore.createFile(dataFile, "100");
            }
        }
    }
    partitionedTable = testTables[3];
    externalTable = testTables[4];
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Before(org.junit.Before)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15