use of org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc in project hive by apache.
the class HCatSemanticAnalyzer method authorizeDDLWork.
@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
DDLDesc ddlDesc = work.getDDLDesc();
if (ddlDesc instanceof ShowDatabasesDesc) {
authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(), HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
} else if (ddlDesc instanceof DropDatabaseDesc) {
DropDatabaseDesc dropDb = (DropDatabaseDesc) ddlDesc;
Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
if (db != null) {
// if above returned a null, then the db does not exist - probably a
// "drop database if exists" clause - don't try to authorize then.
authorize(db, Privilege.DROP);
}
} else if (ddlDesc instanceof DescDatabaseDesc) {
DescDatabaseDesc descDb = (DescDatabaseDesc) ddlDesc;
Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
authorize(db, Privilege.SELECT);
} else if (ddlDesc instanceof SwitchDatabaseDesc) {
SwitchDatabaseDesc switchDb = (SwitchDatabaseDesc) ddlDesc;
Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
authorize(db, Privilege.SELECT);
} else if (ddlDesc instanceof ShowTablesDesc) {
ShowTablesDesc showTables = (ShowTablesDesc) ddlDesc;
String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName();
authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
} else if (ddlDesc instanceof DescTableDesc) {
// we should be careful when authorizing table based on just the
// table name. If columns have separate authorization domain, it
// must be honored
DescTableDesc descTable = (DescTableDesc) ddlDesc;
String tableName = extractTableName(descTable.getDbTableName());
authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
} else if (ddlDesc instanceof ShowTableStatusDesc) {
ShowTableStatusDesc showTableStatus = (ShowTableStatusDesc) ddlDesc;
String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName();
authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
} else if (ddlDesc instanceof AlterTableDropPartitionDesc) {
AlterTableDropPartitionDesc dropPartition = (AlterTableDropPartitionDesc) ddlDesc;
// this is actually a ALTER TABLE DROP PARITITION statement
for (AlterTableDropPartitionDesc.PartitionDesc partSpec : dropPartition.getPartSpecs()) {
// partitions are not added as write entries in drop partitions in Hive
Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropPartition.getTableName());
List<Partition> partitions = null;
try {
partitions = hive.getPartitionsByFilter(table, partSpec.getPartSpec().getExprString());
} catch (Exception e) {
throw new HiveException(e);
}
for (Partition part : partitions) {
authorize(part, Privilege.DROP);
}
}
} else if (ddlDesc instanceof ShowPartitionsDesc) {
ShowPartitionsDesc showParts = (ShowPartitionsDesc) ddlDesc;
String tableName = extractTableName(showParts.getTabName());
authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
} else if (ddlDesc instanceof AlterTableSetLocationDesc) {
AlterTableSetLocationDesc alterTable = (AlterTableSetLocationDesc) ddlDesc;
Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getDbTableName())[1], false);
Partition part = null;
if (alterTable.getPartitionSpec() != null) {
part = hive.getPartition(table, alterTable.getPartitionSpec(), false);
}
String newLocation = alterTable.getLocation();
/* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
* for the old table/partition location and the new location.
*/
if (part != null) {
// authorize for the old
authorize(part, Privilege.ALTER_DATA);
// location, and new location
part.setLocation(newLocation);
authorize(part, Privilege.ALTER_DATA);
} else {
// authorize for the old
authorize(table, Privilege.ALTER_DATA);
// location, and new location
table.getTTable().getSd().setLocation(newLocation);
authorize(table, Privilege.ALTER_DATA);
}
}
}
use of org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc in project hive by apache.
the class DropDatabaseHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
DropDatabaseMessage msg = deserializer.getDropDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec());
Task<?> dropDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.info("Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(dropDBTask);
}
Aggregations