use of org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder in project hive by apache.
the class TestAppendPartitions method createPartition.
private void createPartition(Table table, List<String> values) throws Exception {
Partition partition = new PartitionBuilder().fromTable(table).setValues(values).build();
client.add_partition(partition);
}
use of org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder in project hive by apache.
the class TestSchemaToolCatalogOps method moveTableWithinCatalog.
@Test
public void moveTableWithinCatalog() throws TException, HiveMetaException {
String toDbName = "moveTableWithinCatalogDb";
String tableName = "moveTableWithinCatalogTable";
String partVal = "moveTableWithinCatalogKey";
new DatabaseBuilder().setName(toDbName).create(client, conf);
Table table = new TableBuilder().setTableName(tableName).addCol("a", "int").addPartCol("p", "string").create(client, conf);
new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s", tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
execute(new SchemaToolTaskMoveTable(), argsMoveTable);
Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
Assert.assertNotNull(fetchedTable);
Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedTable.getCatName());
Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
Partition fetchedPart = client.getPartition(DEFAULT_CATALOG_NAME, toDbName, tableName, Collections.singletonList(partVal));
Assert.assertNotNull(fetchedPart);
Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedPart.getCatName());
Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
}
use of org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder in project hive by apache.
the class TestSchemaToolCatalogOps method moveDatabase.
@Test
public void moveDatabase() throws HiveMetaException, TException {
String toCatName = "moveDbCat";
String dbName = "moveDbDb";
String tableName = "moveDbTable";
String funcName = "movedbfunc";
String partVal = "moveDbKey";
new CatalogBuilder().setName(toCatName).setLocation("file:///tmp").create(client);
Database db = new DatabaseBuilder().setCatalogName(DEFAULT_CATALOG_NAME).setName(dbName).create(client, conf);
new FunctionBuilder().inDb(db).setName(funcName).setClass("org.apache.hive.myudf").create(client, conf);
Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("a", "int").addPartCol("p", "string").create(client, conf);
new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s", dbName, DEFAULT_CATALOG_NAME, toCatName);
execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
Database fetchedDb = client.getDatabase(toCatName, dbName);
Assert.assertNotNull(fetchedDb);
Assert.assertEquals(toCatName.toLowerCase(), fetchedDb.getCatalogName());
Function fetchedFunction = client.getFunction(toCatName, dbName, funcName);
Assert.assertNotNull(fetchedFunction);
Assert.assertEquals(toCatName.toLowerCase(), fetchedFunction.getCatName());
Assert.assertEquals(dbName.toLowerCase(), fetchedFunction.getDbName());
Table fetchedTable = client.getTable(toCatName, dbName, tableName);
Assert.assertNotNull(fetchedTable);
Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
Assert.assertEquals(dbName.toLowerCase(), fetchedTable.getDbName());
Partition fetchedPart = client.getPartition(toCatName, dbName, tableName, Collections.singletonList(partVal));
Assert.assertNotNull(fetchedPart);
Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
Assert.assertEquals(dbName.toLowerCase(), fetchedPart.getDbName());
Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
}
use of org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder in project hive by apache.
the class TestSessionHiveMetastoreClientGetPartitionsTempTable method addPartition.
@Override
protected void addPartition(IMetaStoreClient client, Table table, List<String> values) throws TException {
PartitionBuilder builder = new PartitionBuilder().inTable(table);
values.forEach(builder::addValue);
Partition partition = builder.build(conf);
if (table.getParameters().containsKey(PART_PRIV) && table.getParameters().get(PART_PRIV).equals("true")) {
PrincipalPrivilegeSet privileges = new PrincipalPrivilegeSet();
Map<String, List<PrivilegeGrantInfo>> userPrivileges = new HashMap<>();
userPrivileges.put(USER_NAME, new ArrayList<>());
privileges.setUserPrivileges(userPrivileges);
Map<String, List<PrivilegeGrantInfo>> groupPrivileges = new HashMap<>();
GROUPS.forEach(g -> groupPrivileges.put(g, new ArrayList<>()));
privileges.setGroupPrivileges(groupPrivileges);
partition.setPrivileges(privileges);
}
client.add_partition(partition);
}
use of org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder in project hive by apache.
the class TestSessionHiveMetastoreClientAlterPartitionsTempTable method addPartitions.
@Override
protected void addPartitions(IMetaStoreClient client, Table table, List<String> values) throws Exception {
List<Partition> partitions = new ArrayList<>();
for (int i = 0; i < values.size(); i++) {
partitions.add(new PartitionBuilder().inTable(table).addValue(values.get(i)).setLocation(MetaStoreTestUtils.getTestWarehouseDir(values.get(i) + i)).build(conf));
}
client.add_partitions(partitions);
}
Aggregations