use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestDatabases method testAlterDatabaseNoSuchDatabase.
@Test(expected = NoSuchObjectException.class)
public void testAlterDatabaseNoSuchDatabase() throws Exception {
Database newDatabase = new DatabaseBuilder().setName("test_database_altered").build();
client.alterDatabase("no_such_database", newDatabase);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestGetPartitions method createDB.
private void createDB(String dbName) throws TException {
Database db = new DatabaseBuilder().setName(dbName).build();
client.createDatabase(db);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestListPartitions method createDB.
private void createDB(String dbName) throws TException {
Database db = new DatabaseBuilder().setName(dbName).build();
client.createDatabase(db);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestPermsGrp method testCustomPerms.
public void testCustomPerms() throws Exception {
String dbName = Warehouse.DEFAULT_DATABASE_NAME;
String tblName = "simptbl";
String typeName = "Person";
try {
// Lets first test for default permissions, this is the case when user specified nothing.
Table tbl = getTable(dbName, tblName, typeName);
msc.createTable(tbl);
Database db = Hive.get(hcatConf).getDatabase(dbName);
Path dfsPath = clientWH.getDefaultTablePath(db, tblName);
cleanupTbl(dbName, tblName, typeName);
// Next user did specify perms.
try {
callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx-wx---" });
fail();
} catch (Exception e) {
assertTrue(e instanceof ExitException);
assertEquals(((ExitException) e).getStatus(), 0);
}
dfsPath = clientWH.getDefaultTablePath(db, tblName);
assertEquals(FsPermission.valueOf("drwx-wx---"), dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission());
cleanupTbl(dbName, tblName, typeName);
// User specified perms in invalid format.
hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
// make sure create table fails.
try {
callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx" });
fail();
} catch (Exception me) {
assertTrue(me instanceof ExitException);
}
// No physical dir gets created.
dfsPath = clientWH.getDefaultTablePath(db, tblName);
try {
dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
fail();
} catch (Exception fnfe) {
assertTrue(fnfe instanceof FileNotFoundException);
}
// And no metadata gets created.
try {
msc.getTable(Warehouse.DEFAULT_DATABASE_NAME, tblName);
fail();
} catch (Exception e) {
assertTrue(e instanceof NoSuchObjectException);
assertEquals("default.simptbl table not found", e.getMessage());
}
// test for invalid group name
hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
try {
// create table must fail.
callHCatCli(new String[] { "-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER" });
fail();
} catch (Exception me) {
assertTrue(me instanceof SecurityException);
}
try {
// no metadata should get created.
msc.getTable(dbName, tblName);
fail();
} catch (Exception e) {
assertTrue(e instanceof NoSuchObjectException);
assertEquals("default.simptbl table not found", e.getMessage());
}
try {
// neither dir should get created.
dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
fail();
} catch (Exception e) {
assertTrue(e instanceof FileNotFoundException);
}
} catch (Exception e) {
LOG.error("testCustomPerms failed.", e);
throw e;
}
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class HCatSemanticAnalyzer method authorizeDDLWork.
@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
// DB opereations, none of them are enforced by Hive right now.
ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
if (showDatabases != null) {
authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(), HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
}
DropDatabaseDesc dropDb = work.getDropDatabaseDesc();
if (dropDb != null) {
Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
if (db != null) {
// if above returned a null, then the db does not exist - probably a
// "drop database if exists" clause - don't try to authorize then.
authorize(db, Privilege.DROP);
}
}
DescDatabaseDesc descDb = work.getDescDatabaseDesc();
if (descDb != null) {
Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
authorize(db, Privilege.SELECT);
}
SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc();
if (switchDb != null) {
Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
authorize(db, Privilege.SELECT);
}
ShowTablesDesc showTables = work.getShowTblsDesc();
if (showTables != null) {
String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName();
authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
}
ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc();
if (showTableStatus != null) {
String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName();
authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
}
// TODO: add alter database support in HCat
// Table operations.
DropTableDesc dropTable = work.getDropTblDesc();
if (dropTable != null) {
if (dropTable.getPartSpecs() == null) {
// drop table is already enforced by Hive. We only check for table level location even if the
// table is partitioned.
} else {
// this is actually a ALTER TABLE DROP PARITITION statement
for (DropTableDesc.PartSpec partSpec : dropTable.getPartSpecs()) {
// partitions are not added as write entries in drop partitions in Hive
Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropTable.getTableName());
List<Partition> partitions = null;
try {
partitions = hive.getPartitionsByFilter(table, partSpec.getPartSpec().getExprString());
} catch (Exception e) {
throw new HiveException(e);
}
for (Partition part : partitions) {
authorize(part, Privilege.DROP);
}
}
}
}
AlterTableDesc alterTable = work.getAlterTblDesc();
if (alterTable != null) {
Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
Partition part = null;
if (alterTable.getPartSpec() != null) {
part = hive.getPartition(table, alterTable.getPartSpec(), false);
}
String newLocation = alterTable.getNewLocation();
/* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
* for the old table/partition location and the new location.
*/
if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
if (part != null) {
// authorize for the old
authorize(part, Privilege.ALTER_DATA);
// location, and new location
part.setLocation(newLocation);
authorize(part, Privilege.ALTER_DATA);
} else {
// authorize for the old
authorize(table, Privilege.ALTER_DATA);
// location, and new location
table.getTTable().getSd().setLocation(newLocation);
authorize(table, Privilege.ALTER_DATA);
}
}
// other alter operations are already supported by Hive
}
// we should be careful when authorizing table based on just the
// table name. If columns have separate authorization domain, it
// must be honored
DescTableDesc descTable = work.getDescTblDesc();
if (descTable != null) {
String tableName = extractTableName(descTable.getTableName());
authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
}
ShowPartitionsDesc showParts = work.getShowPartsDesc();
if (showParts != null) {
String tableName = extractTableName(showParts.getTabName());
authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
}
}
Aggregations