use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestMetastoreTransformer method testLocationBlank.
@Test
public void testLocationBlank() throws Exception {
Table tbl = new TableBuilder().setTableName("locationBlank").setCols(new ArrayList<FieldSchema>()).setLocation("").build(conf);
// expected to execute the operation without any exceptions
client.createTable(tbl);
Table tbl2 = client.getTable(tbl.getDbName(), tbl.getTableName().toLowerCase());
assertEquals("locationblank", new File(tbl2.getSd().getLocation()).getName());
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestObjectStore method createPartitionedTable.
/**
* Creates DB1 database, TABLE1 table with 3 partitions.
* @param withPrivileges Should we create privileges as well
* @param withStatistics Should we create statitics as well
*/
private void createPartitionedTable(boolean withPrivileges, boolean withStatistics) throws Exception {
Database db1 = new DatabaseBuilder().setName(DB1).setDescription("description").setLocation("locationurl").build(conf);
try (AutoCloseable c = deadline()) {
objectStore.createDatabase(db1);
}
Table tbl1 = new TableBuilder().setDbName(DB1).setTableName(TABLE1).addCol("test_col1", "int").addCol("test_col2", "int").addPartCol("test_part_col", "int").addCol("test_bucket_col", "int", "test bucket col comment").addCol("test_skewed_col", "int", "test skewed col comment").addCol("test_sort_col", "int", "test sort col comment").build(conf);
try (AutoCloseable c = deadline()) {
objectStore.createTable(tbl1);
}
PrivilegeBag privilegeBag = new PrivilegeBag();
// Create partitions for the partitioned table
for (int i = 0; i < 3; i++) {
Partition part = new PartitionBuilder().inTable(tbl1).addValue("a" + i).addSerdeParam("serdeParam", "serdeParamValue").addStorageDescriptorParam("sdParam", "sdParamValue").addBucketCol("test_bucket_col").addSkewedColName("test_skewed_col").addSortCol("test_sort_col", 1).build(conf);
try (AutoCloseable c = deadline()) {
objectStore.addPartition(part);
}
if (withPrivileges) {
HiveObjectRef partitionReference = new HiveObjectRefBuilder().buildPartitionReference(part);
HiveObjectRef partitionColumnReference = new HiveObjectRefBuilder().buildPartitionColumnReference(tbl1, "test_part_col", part.getValues());
PrivilegeGrantInfo privilegeGrantInfo = new PrivilegeGrantInfoBuilder().setPrivilege("a").build();
HiveObjectPrivilege partitionPriv = new HiveObjectPrivilegeBuilder().setHiveObjectRef(partitionReference).setPrincipleName("a").setPrincipalType(PrincipalType.USER).setGrantInfo(privilegeGrantInfo).build();
privilegeBag.addToPrivileges(partitionPriv);
HiveObjectPrivilege partitionColPriv = new HiveObjectPrivilegeBuilder().setHiveObjectRef(partitionColumnReference).setPrincipleName("a").setPrincipalType(PrincipalType.USER).setGrantInfo(privilegeGrantInfo).build();
privilegeBag.addToPrivileges(partitionColPriv);
}
if (withStatistics) {
ColumnStatistics stats = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
desc.setCatName(tbl1.getCatName());
desc.setDbName(tbl1.getDbName());
desc.setTableName(tbl1.getTableName());
desc.setPartName("test_part_col=a" + i);
stats.setStatsDesc(desc);
List<ColumnStatisticsObj> statsObjList = new ArrayList<>(1);
stats.setStatsObj(statsObjList);
stats.setEngine(ENGINE);
ColumnStatisticsData data = new ColumnStatisticsData();
LongColumnStatsData longStats = new LongColumnStatsData();
longStats.setNumNulls(1);
longStats.setNumDVs(2);
longStats.setLowValue(3);
longStats.setHighValue(4);
data.setLongStats(longStats);
ColumnStatisticsObj partStats = new ColumnStatisticsObj("test_part_col", "int", data);
statsObjList.add(partStats);
try (AutoCloseable c = deadline()) {
objectStore.updatePartitionColumnStatistics(stats, part.getValues(), null, -1);
}
}
}
if (withPrivileges) {
try (AutoCloseable c = deadline()) {
objectStore.grantPrivileges(privilegeBag);
}
}
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestPartitionManagement method createMetadata.
private List<String> createMetadata(String catName, String dbName, String tableName, List<String> partKeys, List<String> partKeyTypes, List<List<String>> partVals, Map<String, Column> colMap, boolean isOrc) throws TException {
if (!DEFAULT_CATALOG_NAME.equals(catName)) {
Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
client.createCatalog(cat);
}
Database db;
if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
dbBuilder.setCatalogName(catName);
db = dbBuilder.create(client, conf);
} else {
db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
}
TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
if (isOrc) {
tb.setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
}
for (Column col : colMap.values()) {
tb.addCol(col.colName, col.colType);
}
if (partKeys != null) {
if (partKeyTypes == null) {
throw new IllegalArgumentException("partKeyTypes cannot be null when partKeys is non-null");
}
if (partKeys.size() != partKeyTypes.size()) {
throw new IllegalArgumentException("partKeys and partKeyTypes size should be same");
}
if (partVals.isEmpty()) {
throw new IllegalArgumentException("partVals cannot be empty for patitioned table");
}
for (int i = 0; i < partKeys.size(); i++) {
tb.addPartCol(partKeys.get(i), partKeyTypes.get(i));
}
}
Table table = tb.create(client, conf);
if (partKeys != null) {
for (List<String> partVal : partVals) {
new PartitionBuilder().inTable(table).setValues(partVal).addToTable(client, conf);
}
}
List<String> partNames = new ArrayList<>();
if (partKeys != null) {
for (int i = 0; i < partKeys.size(); i++) {
String partKey = partKeys.get(i);
for (String partVal : partVals.get(i)) {
String partName = partKey + "=" + partVal;
partNames.add(partName);
}
}
}
client.flushCache();
return partNames;
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestStats method createMetadata.
private List<String> createMetadata(String catName, String dbName, String tableName, String partKey, List<String> partVals, Map<String, Column> colMap) throws TException {
if (!DEFAULT_CATALOG_NAME.equals(catName) && !NO_CAT.equals(catName)) {
Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
client.createCatalog(cat);
}
Database db;
if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
if (!NO_CAT.equals(catName))
dbBuilder.setCatalogName(catName);
db = dbBuilder.create(client, conf);
} else {
db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
}
TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
for (Column col : colMap.values()) tb.addCol(col.colName, col.colType);
if (partKey != null) {
assert partVals != null && !partVals.isEmpty() : "Must provide partition values for partitioned table";
tb.addPartCol(partKey, ColumnType.STRING_TYPE_NAME);
}
Table table = tb.create(client, conf);
if (partKey != null) {
for (String partVal : partVals) {
new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
}
}
SetPartitionsStatsRequest rqst = new SetPartitionsStatsRequest();
List<String> partNames = new ArrayList<>();
if (partKey == null) {
rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, null, colMap.values()));
} else {
for (String partVal : partVals) {
String partName = partKey + "=" + partVal;
rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, partName, colMap.values()));
partNames.add(partName);
}
}
rqst.setEngine(ENGINE);
client.setPartitionColumnStatistics(rqst);
return partNames;
}
use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.
the class TestHmsServerAuthorization method creatEnv.
/**
* This is called in each test after the configuration is set in each test case.
* @throws Exception
*/
protected void creatEnv(Configuration conf) throws Exception {
client.dropDatabase(dbName1, true, true, true);
client.dropDatabase(dbName2, true, true, true);
Database db1 = new DatabaseBuilder().setName(dbName1).setCatalogName(Warehouse.DEFAULT_CATALOG_NAME).create(client, conf);
Database db2 = new DatabaseBuilder().setName(dbName2).setCatalogName(Warehouse.DEFAULT_CATALOG_NAME).create(client, conf);
new TableBuilder().setDbName(dbName1).setTableName(TAB1).addCol("id", "int").addCol("name", "string").create(client, conf);
Table tab2 = new TableBuilder().setDbName(dbName1).setTableName(TAB2).addCol("id", "int").addPartCol("name", "string").create(client, conf);
new PartitionBuilder().inTable(tab2).addValue("value1").addToTable(client, conf);
new PartitionBuilder().inTable(tab2).addValue("value2").addToTable(client, conf);
}
Aggregations