use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestTablesList method otherCatalogs.
@Test
public void otherCatalogs() throws TException {
String catName = "list_tables_in_other_catalogs";
Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
client.createCatalog(cat);
String dbName = "db_in_other_catalog";
// For this one don't specify a location to make sure it gets put in the catalog directory
Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
String[] tableNames = new String[4];
for (int i = 0; i < tableNames.length; i++) {
tableNames[i] = "table_in_other_catalog_" + i;
TableBuilder builder = new TableBuilder().inDb(db).setTableName(tableNames[i]).addCol("col1_" + i, ColumnType.STRING_TYPE_NAME).addCol("col2_" + i, ColumnType.INT_TYPE_NAME);
if (i == 0)
builder.addTableParam("the_key", "the_value");
builder.create(client, metaStore.getConf());
}
String filter = hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS + "the_key=\"the_value\"";
List<String> fetchedNames = client.listTableNamesByFilter(catName, dbName, filter, (short) -1);
Assert.assertEquals(1, fetchedNames.size());
Assert.assertEquals(tableNames[0], fetchedNames.get(0));
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestTablesList method setUp.
@Before
public void setUp() throws Exception {
// Get new client
client = metaStore.getClient();
// Clean up the database
client.dropDatabase(OTHER_DATABASE, true, true, true);
// Drop every table in the default database
for (String tableName : client.getAllTables(DEFAULT_DATABASE)) {
client.dropTable(DEFAULT_DATABASE, tableName, true, true, true);
}
// Clean up trash
metaStore.cleanWarehouseDirs();
testTables[0] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_0").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(1000).addTableParam("param1", "value1").create(client, metaStore.getConf());
testTables[1] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_1").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(2000).addTableParam("param1", "value2").create(client, metaStore.getConf());
testTables[2] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_2").addCol("test_col", "int").setOwner("Owner2").setLastAccessTime(1000).addTableParam("param1", "value2").create(client, metaStore.getConf());
testTables[3] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_3").addCol("test_col", "int").setOwner("Owner3").setLastAccessTime(3000).addTableParam("param1", "value2").create(client, metaStore.getConf());
testTables[4] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_4").addCol("test_col", "int").setOwner("Tester").setLastAccessTime(2500).addTableParam("param1", "value4").create(client, metaStore.getConf());
testTables[5] = new TableBuilder().setDbName(DEFAULT_DATABASE).setTableName("filter_test_table_5").addCol("test_col", "int").create(client, metaStore.getConf());
new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
testTables[6] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("filter_test_table_0").addCol("test_col", "int").setOwner("Owner1").setLastAccessTime(1000).addTableParam("param1", "value1").create(client, metaStore.getConf());
// Reload tables from the MetaStore
for (int i = 0; i < testTables.length; i++) {
testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
}
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestHiveMetaStore method testTableDatabase.
@Test
public void testTableDatabase() throws Exception {
String dbName = "testDb";
String tblName_1 = "testTbl_1";
String tblName_2 = "testTbl_2";
try {
silentDropDatabase(dbName);
String extWarehouse = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE_EXTERNAL);
LOG.info("external warehouse set to:" + extWarehouse);
if (extWarehouse == null || extWarehouse.trim().isEmpty()) {
extWarehouse = "/tmp/external";
}
String dbLocation = extWarehouse + "/_testDB_table_database_";
String mgdLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_table_database_";
new DatabaseBuilder().setName(dbName).setLocation(dbLocation).setManagedLocation(mgdLocation).create(client, conf);
Database db = client.getDatabase(dbName);
Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName_1).setType(TableType.EXTERNAL_TABLE.name()).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).addTableParam("EXTERNAL", "TRUE").create(client, conf);
tbl = client.getTable(dbName, tblName_1);
Path path = new Path(tbl.getSd().getLocation());
System.err.println("Table's location " + path + ", Database's location " + db.getLocationUri());
assertEquals("Table type is expected to be EXTERNAL", TableType.EXTERNAL_TABLE.name(), tbl.getTableType());
assertEquals("Table location is not a subset of the database location", db.getLocationUri(), path.getParent().toString());
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testTableDatabase() failed.");
throw e;
} finally {
silentDropDatabase(dbName);
}
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestHiveMetaStore method testRenamePartition.
@Test
public void testRenamePartition() throws Throwable {
try {
String dbName = "compdb1";
String tblName = "comptbl1";
List<String> vals = new ArrayList<>(2);
vals.add("2011-07-11");
vals.add("8");
String part_path = "/ds=2011-07-11/hr=8";
List<String> tmp_vals = new ArrayList<>(2);
tmp_vals.add("tmp_2011-07-11");
tmp_vals.add("-8");
String part2_path = "/ds=tmp_2011-07-11/hr=-8";
client.dropTable(dbName, tblName);
silentDropDatabase(dbName);
new DatabaseBuilder().setName(dbName).setDescription("Rename Partition Test database").create(client, conf);
Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).addPartCol("ds", ColumnType.STRING_TYPE_NAME).addPartCol("hr", ColumnType.INT_TYPE_NAME).create(client, conf);
if (isThriftClient) {
// the createTable() above does not update the location in the 'tbl'
// object when the client is a thrift client and the code below relies
// on the location being present in the 'tbl' object - so get the table
// from the metastore
tbl = client.getTable(dbName, tblName);
}
Partition part = new Partition();
part.setDbName(dbName);
part.setTableName(tblName);
part.setValues(vals);
part.setParameters(new HashMap<>());
part.setSd(tbl.getSd().deepCopy());
part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
part.getParameters().put("retention", "10");
part.getSd().setNumBuckets(12);
part.getSd().getSerdeInfo().getParameters().put("abc", "1");
client.add_partition(part);
part.setValues(tmp_vals);
client.renamePartition(dbName, tblName, vals, part);
boolean exceptionThrown = false;
try {
Partition p = client.getPartition(dbName, tblName, vals);
} catch (Exception e) {
assertEquals("partition should not have existed", NoSuchObjectException.class, e.getClass());
exceptionThrown = true;
}
assertTrue("Expected NoSuchObjectException", exceptionThrown);
Partition part3 = client.getPartition(dbName, tblName, tmp_vals);
assertEquals("couldn't rename partition", part3.getParameters().get("retention"), "10");
assertEquals("couldn't rename partition", part3.getSd().getSerdeInfo().getParameters().get("abc"), "1");
assertEquals("couldn't rename partition", part3.getSd().getNumBuckets(), 12);
assertEquals("new partition sd matches", part3.getSd().getLocation(), tbl.getSd().getLocation() + part2_path);
part.setValues(vals);
client.renamePartition(dbName, tblName, tmp_vals, part);
exceptionThrown = false;
try {
Partition p = client.getPartition(dbName, tblName, tmp_vals);
} catch (Exception e) {
assertEquals("partition should not have existed", NoSuchObjectException.class, e.getClass());
exceptionThrown = true;
}
assertTrue("Expected NoSuchObjectException", exceptionThrown);
part3 = client.getPartition(dbName, tblName, vals);
assertEquals("couldn't rename partition", part3.getParameters().get("retention"), "10");
assertEquals("couldn't rename partition", part3.getSd().getSerdeInfo().getParameters().get("abc"), "1");
assertEquals("couldn't rename partition", part3.getSd().getNumBuckets(), 12);
assertEquals("new partition sd matches", part3.getSd().getLocation(), tbl.getSd().getLocation() + part_path);
client.dropTable(dbName, tblName);
client.dropDatabase(dbName);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testRenamePartition() failed.");
throw e;
}
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestHiveMetaStore method testValidateTableCols.
@Test
public void testValidateTableCols() throws Throwable {
try {
String dbName = "compdb";
String tblName = "comptbl";
client.dropTable(dbName, tblName);
silentDropDatabase(dbName);
new DatabaseBuilder().setName(dbName).setDescription("Validate Table Columns test").create(client, conf);
Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).create(client, conf);
if (isThriftClient) {
tbl = client.getTable(dbName, tblName);
}
List<String> expectedCols = Lists.newArrayList();
expectedCols.add("name");
ObjectStore objStore = new ObjectStore();
try {
objStore.validateTableCols(tbl, expectedCols);
} catch (MetaException ex) {
throw new RuntimeException(ex);
}
expectedCols.add("doesntExist");
boolean exceptionFound = false;
try {
objStore.validateTableCols(tbl, expectedCols);
} catch (MetaException ex) {
assertEquals(ex.getMessage(), "Column doesntExist doesn't exist in table comptbl in database compdb");
exceptionFound = true;
}
assertTrue(exceptionFound);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testValidateTableCols() failed.");
throw e;
}
}
Aggregations