use of org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider in project hive by apache.
the class Driver method doAuthorization.
/**
* Do authorization using post semantic analysis information in the semantic analyzer
* The original command is also passed so that authorization interface can provide
* more useful information in logs.
* @param sem SemanticAnalyzer used to parse input query
* @param command input query
* @throws HiveException
* @throws AuthorizationException
*/
public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command) throws HiveException, AuthorizationException {
SessionState ss = SessionState.get();
Hive db = sem.getDb();
Set<ReadEntity> additionalInputs = new HashSet<ReadEntity>();
for (Entity e : sem.getInputs()) {
if (e.getType() == Entity.Type.PARTITION) {
additionalInputs.add(new ReadEntity(e.getTable()));
}
}
Set<WriteEntity> additionalOutputs = new HashSet<WriteEntity>();
for (WriteEntity e : sem.getOutputs()) {
if (e.getType() == Entity.Type.PARTITION) {
additionalOutputs.add(new WriteEntity(e.getTable(), e.getWriteType()));
}
}
// The following union operation returns a union, which traverses over the
// first set once and then then over each element of second set, in order,
// that is not contained in first. This means it doesn't replace anything
// in first set, and would preserve the WriteType in WriteEntity in first
// set in case of outputs list.
Set<ReadEntity> inputs = Sets.union(sem.getInputs(), additionalInputs);
Set<WriteEntity> outputs = Sets.union(sem.getOutputs(), additionalOutputs);
if (ss.isAuthorizationModeV2()) {
// get mapping of tables to columns used
ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();
// colAccessInfo is set only in case of SemanticAnalyzer
Map<String, List<String>> selectTab2Cols = colAccessInfo != null ? colAccessInfo.getTableToColumnAccessMap() : null;
Map<String, List<String>> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ? sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols);
return;
}
if (op == null) {
throw new HiveException("Operation should not be null");
}
HiveAuthorizationProvider authorizer = ss.getAuthorizer();
if (op.equals(HiveOperation.CREATEDATABASE)) {
authorizer.authorize(op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
} else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) {
authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
} else {
if (op.equals(HiveOperation.IMPORT)) {
ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
if (!isa.existsTable()) {
authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
}
}
}
if (outputs != null && outputs.size() > 0) {
for (WriteEntity write : outputs) {
if (write.isDummy() || write.isPathType()) {
continue;
}
if (write.getType() == Entity.Type.DATABASE) {
if (!op.equals(HiveOperation.IMPORT)) {
// We skip DB check for import here because we already handle it above
// as a CTAS check.
authorizer.authorize(write.getDatabase(), null, op.getOutputRequiredPrivileges());
}
continue;
}
if (write.getType() == WriteEntity.Type.PARTITION) {
Partition part = db.getPartition(write.getTable(), write.getPartition().getSpec(), false);
if (part != null) {
authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges());
continue;
}
}
if (write.getTable() != null) {
authorizer.authorize(write.getTable(), null, op.getOutputRequiredPrivileges());
}
}
}
if (inputs != null && inputs.size() > 0) {
Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
// determine if partition level privileges should be checked for input tables
Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
for (ReadEntity read : inputs) {
if (read.isDummy() || read.isPathType() || read.getType() == Entity.Type.DATABASE) {
continue;
}
Table tbl = read.getTable();
if ((read.getPartition() != null) || (tbl != null && tbl.isPartitioned())) {
String tblName = tbl.getTableName();
if (tableUsePartLevelAuth.get(tblName) == null) {
boolean usePartLevelPriv = (tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE".equalsIgnoreCase(tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))));
if (usePartLevelPriv) {
tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
} else {
tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
}
}
}
}
// column authorization is checked through table scan operators.
getTablePartitionUsedColumns(op, sem, tab2Cols, part2Cols, tableUsePartLevelAuth);
// cache the results for table authorization
Set<String> tableAuthChecked = new HashSet<String>();
for (ReadEntity read : inputs) {
// if read is not direct, we do not need to check its autho.
if (read.isDummy() || read.isPathType() || !read.isDirect()) {
continue;
}
if (read.getType() == Entity.Type.DATABASE) {
authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
continue;
}
Table tbl = read.getTable();
if (tbl.isView() && sem instanceof SemanticAnalyzer) {
tab2Cols.put(tbl, sem.getColumnAccessInfo().getTableToColumnAccessMap().get(tbl.getCompleteName()));
}
if (read.getPartition() != null) {
Partition partition = read.getPartition();
tbl = partition.getTable();
// use partition level authorization
if (Boolean.TRUE.equals(tableUsePartLevelAuth.get(tbl.getTableName()))) {
List<String> cols = part2Cols.get(partition);
if (cols != null && cols.size() > 0) {
authorizer.authorize(partition.getTable(), partition, cols, op.getInputRequiredPrivileges(), null);
} else {
authorizer.authorize(partition, op.getInputRequiredPrivileges(), null);
}
continue;
}
}
// partitions
if (tbl != null && !tableAuthChecked.contains(tbl.getTableName()) && !(Boolean.TRUE.equals(tableUsePartLevelAuth.get(tbl.getTableName())))) {
List<String> cols = tab2Cols.get(tbl);
if (cols != null && cols.size() > 0) {
authorizer.authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null);
} else {
authorizer.authorize(tbl, op.getInputRequiredPrivileges(), null);
}
tableAuthChecked.add(tbl.getTableName());
}
}
}
}
use of org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider in project hive by apache.
the class HiveUtils method getAuthorizeProviderManager.
/**
* Create a new instance of HiveAuthorizationProvider
* @param conf
* @param authzClassName - authorization provider class name
* @param authenticator
* @param nullIfOtherClass - return null if configuration
* does not point to a HiveAuthorizationProvider subclass
* @return new instance of HiveAuthorizationProvider
* @throws HiveException
*/
@SuppressWarnings("unchecked")
public static HiveAuthorizationProvider getAuthorizeProviderManager(Configuration conf, String authzClassName, HiveAuthenticationProvider authenticator, boolean nullIfOtherClass) throws HiveException {
HiveAuthorizationProvider ret = null;
try {
Class<? extends HiveAuthorizationProvider> cls = null;
if (authzClassName == null || authzClassName.trim().equals("")) {
cls = DefaultHiveAuthorizationProvider.class;
} else {
Class<?> configClass = Class.forName(authzClassName, true, JavaUtils.getClassLoader());
if (nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass)) {
return null;
}
cls = (Class<? extends HiveAuthorizationProvider>) configClass;
}
if (cls != null) {
ret = ReflectionUtils.newInstance(cls, conf);
}
} catch (Exception e) {
throw new HiveException(e);
}
ret.setAuthenticator(authenticator);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider in project hive by apache.
the class CommandAuthorizerV1 method doAuthorization.
static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, SessionState ss, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws HiveException {
if (op == null) {
throw new HiveException("Operation should not be null");
}
Hive db = sem.getDb();
HiveAuthorizationProvider authorizer = ss.getAuthorizer();
authorizeOperation(op, sem, inputs, outputs, db, authorizer);
authorizeOutputs(op, outputs, db, authorizer);
authorizeInputs(op, sem, inputs, authorizer);
}
Aggregations