Search in sources :

Example 1 with DatabaseBuilder

use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.

the class TestObjectStoreSchemaMethods method createUniqueDatabaseForTest.

private String createUniqueDatabaseForTest() throws MetaException, InvalidObjectException {
    String dbName = "uniquedbfortest" + dbNum++;
    Database db = new DatabaseBuilder().setName(dbName).setLocation("somewhere").setDescription("descriptive").build();
    objectStore.createDatabase(db);
    return dbName;
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 2 with DatabaseBuilder

use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.

the class TestAddPartitions method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(DB_NAME, true, true, true);
    metaStore.cleanWarehouseDirs();
    Database db = new DatabaseBuilder().setName(DB_NAME).build();
    client.createDatabase(db);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) Before(org.junit.Before)

Example 3 with DatabaseBuilder

use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.

the class TestAddPartitionsFromPartSpec method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(DB_NAME, true, true, true);
    metaStore.cleanWarehouseDirs();
    Database db = new DatabaseBuilder().setName(DB_NAME).build();
    client.createDatabase(db);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) Before(org.junit.Before)

Example 4 with DatabaseBuilder

use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.

the class TestAlterPartitions method createDB.

private void createDB(String dbName) throws TException {
    Database db = new DatabaseBuilder().setName(dbName).build();
    client.createDatabase(db);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 5 with DatabaseBuilder

use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.

the class TestTablesCreateDropAlterTruncate method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE)) {
        client.dropTable(DEFAULT_DATABASE, tableName, true, true, true);
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    testTables[0] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("test_table").addCol("test_col", "int").build();
    testTables[1] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("test_view").addCol("test_col", "int").setType("VIRTUAL_VIEW").build();
    testTables[2] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("test_table_to_find_1").addCol("test_col", "int").build();
    testTables[3] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("test_partitioned_table").addCol("test_col1", "int").addCol("test_col2", "int").addPartCol("test_part_col", "int").build();
    testTables[4] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("external_table_for_test").addCol("test_col", "int").setLocation(metaStore.getWarehouseRoot() + "/external/table_dir").addTableParam("EXTERNAL", "TRUE").setType("EXTERNAL_TABLE").build();
    client.createDatabase(new DatabaseBuilder().setName(OTHER_DATABASE).build());
    testTables[5] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table").addCol("test_col", "int").build();
    // Create the tables in the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        client.createTable(testTables[i]);
    }
    // Create partitions for the partitioned table
    for (int i = 0; i < 3; i++) {
        Partition partition = new PartitionBuilder().fromTable(testTables[3]).addValue("a" + i).build();
        client.add_partition(partition);
    }
    // Add data files to the partitioned table
    List<Partition> partitions = client.listPartitions(testTables[3].getDbName(), testTables[3].getTableName(), (short) -1);
    for (Partition partition : partitions) {
        Path dataFile = new Path(partition.getSd().getLocation().toString() + "/dataFile");
        metaStore.createFile(dataFile, "100");
    }
    // Reload tables from the MetaStore, and create data files
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getDbName(), testTables[i].getTableName());
        if (testTables[i].getPartitionKeys().isEmpty()) {
            if (testTables[i].getSd().getLocation() != null) {
                Path dataFile = new Path(testTables[i].getSd().getLocation().toString() + "/dataFile");
                metaStore.createFile(dataFile, "100");
            }
        }
    }
    partitionedTable = testTables[3];
    externalTable = testTables[4];
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Before(org.junit.Before)

Aggregations

DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)31 Database (org.apache.hadoop.hive.metastore.api.Database)26 Test (org.junit.Test)12 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)9 Before (org.junit.Before)9 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)8 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 Table (org.apache.hadoop.hive.metastore.api.Table)6 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)6 ISchema (org.apache.hadoop.hive.metastore.api.ISchema)5 ISchemaBuilder (org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder)5 SchemaVersion (org.apache.hadoop.hive.metastore.api.SchemaVersion)4 SchemaVersionBuilder (org.apache.hadoop.hive.metastore.client.builder.SchemaVersionBuilder)4 MetastoreUnitTest (org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)3 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)3 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 ListenerEvent (org.apache.hadoop.hive.metastore.events.ListenerEvent)2 File (java.io.File)1