use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.
the class TestTablesList method setUp.
@Before
public void setUp() throws Exception {
// Get new client
client = metaStore.getClient();
// Clean up the database
client.dropDatabase(OTHER_DATABASE, true, true, true);
// Drop every table in the default database
for (String tableName : client.getAllTables(DEFAULT_DATABASE)) {
client.dropTable(DEFAULT_DATABASE, tableName, true, true, true);
}
// Clean up trash
metaStore.cleanWarehouseDirs();
testTables[0] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_0").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(1000).addTableParam("param1", "value1").build();
testTables[1] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_1").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(2000).addTableParam("param1", "value2").build();
testTables[2] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_2").addCol("test_col", "int").setOwner("Owner2").setLastAccessTime(1000).addTableParam("param1", "value2").build();
testTables[3] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_3").addCol("test_col", "int").setOwner("Owner3").setLastAccessTime(3000).addTableParam("param1", "value2").build();
testTables[4] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_4").addCol("test_col", "int").setOwner("Tester").setLastAccessTime(2500).addTableParam("param1", "value4").build();
testTables[5] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_5").addCol("test_col", "int").build();
client.createDatabase(new DatabaseBuilder().setName(OTHER_DATABASE).build());
testTables[6] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("filter_test_table_0").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(1000).addTableParam("param1", "value1").build();
// Create the tables in the MetaStore
for (int i = 0; i < testTables.length; i++) {
client.createTable(testTables[i]);
}
// Reload tables from the MetaStore
for (int i = 0; i < testTables.length; i++) {
testTables[i] = client.getTable(testTables[i].getDbName(), testTables[i].getTableName());
}
}
use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.
the class TestDatabases method testAlterDatabaseNoSuchDatabase.
@Test(expected = NoSuchObjectException.class)
public void testAlterDatabaseNoSuchDatabase() throws Exception {
Database newDatabase = new DatabaseBuilder().setName("test_database_altered").build();
client.alterDatabase("no_such_database", newDatabase);
}
use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.
the class TestFunctions method setUp.
@Before
public void setUp() throws Exception {
// Get new client
client = metaStore.getClient();
// Clean up the database
client.dropDatabase(OTHER_DATABASE, true, true, true);
for (Function function : client.getAllFunctions().getFunctions()) {
client.dropFunction(function.getDbName(), function.getFunctionName());
}
testFunctions[0] = new FunctionBuilder().setDbName(DEFAULT_DATABASE).setName("test_function_to_find_1").setClass(TEST_FUNCTION_CLASS).addResourceUri(new ResourceUri(ResourceType.JAR, "hdfs:///tmp/jar1.jar")).addResourceUri(new ResourceUri(ResourceType.FILE, "hdfs:///tmp/file1.txt")).addResourceUri(new ResourceUri(ResourceType.ARCHIVE, "hdfs:///tmp/archive1.tgz")).build();
testFunctions[1] = new FunctionBuilder().setDbName(DEFAULT_DATABASE).setName("test_function_to_find_2").setClass(TEST_FUNCTION_CLASS).build();
testFunctions[2] = new FunctionBuilder().setDbName(DEFAULT_DATABASE).setName("test_function_hidden_1").setClass(TEST_FUNCTION_CLASS).build();
client.createDatabase(new DatabaseBuilder().setName(OTHER_DATABASE).build());
testFunctions[3] = new FunctionBuilder().setDbName(OTHER_DATABASE).setName("test_function_to_find_1").setClass(TEST_FUNCTION_CLASS).build();
// Create the functions, and reload them from the MetaStore
for (int i = 0; i < testFunctions.length; i++) {
client.createFunction(testFunctions[i]);
testFunctions[i] = client.getFunction(testFunctions[i].getDbName(), testFunctions[i].getFunctionName());
}
}
use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.
the class TestGetPartitions method createDB.
private void createDB(String dbName) throws TException {
Database db = new DatabaseBuilder().setName(dbName).build();
client.createDatabase(db);
}
use of org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder in project hive by apache.
the class TestListPartitions method createDB.
private void createDB(String dbName) throws TException {
Database db = new DatabaseBuilder().setName(dbName).build();
client.createDatabase(db);
}
Aggregations