use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class CommandAuthorizerV2 method getHivePrivObjects.
private static List<HivePrivilegeObject> getHivePrivObjects(List<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols, HiveOperationType hiveOpType) throws HiveException {
List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
if (privObjects == null) {
return hivePrivobjs;
}
for (Entity privObject : privObjects) {
if (privObject.isDummy()) {
// do not authorize dummy readEntity or writeEntity
continue;
}
if (privObject instanceof ReadEntity && !((ReadEntity) privObject).isDirect()) {
// This ReadEntity represents one of the underlying tables/views of a view, skip it if
// it's not inside a deferred authorized view.
ReadEntity reTable = (ReadEntity) privObject;
Boolean isDeferred = false;
if (reTable.getParents() != null && reTable.getParents().size() > 0) {
for (ReadEntity re : reTable.getParents()) {
if (re.getTyp() == Type.TABLE && re.getTable() != null) {
Table t = re.getTable();
if (!isDeferredAuthView(t)) {
continue;
} else {
isDeferred = true;
}
}
}
}
if (!isDeferred) {
continue;
}
}
if (privObject instanceof WriteEntity && ((WriteEntity) privObject).isTempURI()) {
// do not authorize temporary uris
continue;
}
if (privObject.getTyp() == Type.TABLE && (privObject.getT() == null || privObject.getT().isTemporary())) {
// skip temporary tables from authorization
continue;
}
addHivePrivObject(privObject, tableName2Cols, hivePrivobjs, hiveOpType);
}
return hivePrivobjs;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class CommandAuthorizerV2 method addHivePrivObject.
private static void addHivePrivObject(Entity privObject, Map<String, List<String>> tableName2Cols, List<HivePrivilegeObject> hivePrivObjs, HiveOperationType hiveOpType) throws HiveException {
HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
HivePrivilegeObject hivePrivObject = null;
switch(privObject.getType()) {
case DATABASE:
Database database = privObject.getDatabase();
hivePrivObject = new HivePrivilegeObject(privObjType, database.getName(), null, null, null, actionType, null, null, database.getOwnerName(), database.getOwnerType());
break;
case TABLE:
Table table = privObject.getTable();
List<String> columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(table.getDbName(), table.getTableName()));
hivePrivObject = new HivePrivilegeObject(privObjType, table.getDbName(), table.getTableName(), null, columns, actionType, null, null, table.getOwner(), table.getOwnerType());
if (table.getStorageHandler() != null) {
// TODO: add hive privilege object for storage based handlers for create and alter table commands.
if (hiveOpType == HiveOperationType.CREATETABLE || hiveOpType == HiveOperationType.ALTERTABLE_PROPERTIES || hiveOpType == HiveOperationType.CREATETABLE_AS_SELECT) {
try {
String storageUri = table.getStorageHandler().getURIForAuth(table.getTTable()).toString();
hivePrivObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageUri, null, null, actionType, null, table.getStorageHandler().getClass().getName(), table.getOwner(), table.getOwnerType()));
} catch (Exception ex) {
LOG.error("Exception occurred while getting the URI from storage handler: " + ex.getMessage(), ex);
throw new HiveException("Exception occurred while getting the URI from storage handler: " + ex.getMessage());
}
}
}
break;
case DFS_DIR:
case LOCAL_DIR:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getD().toString(), null, null, actionType, null, null, null, null);
break;
case FUNCTION:
String dbName = privObject.getDatabase() != null ? privObject.getDatabase().getName() : null;
hivePrivObject = new HivePrivilegeObject(privObjType, dbName, privObject.getFunctionName(), null, null, actionType, null, privObject.getClassName(), null, null);
break;
case DUMMYPARTITION:
case PARTITION:
// TODO: not currently handled
return;
case SERVICE_NAME:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getServiceName(), null, null, actionType, null, null, null, null);
break;
case DATACONNECTOR:
DataConnector connector = privObject.getDataConnector();
hivePrivObject = new HivePrivilegeObject(privObjType, null, connector.getName(), null, null, actionType, null, null, connector.getOwnerName(), connector.getOwnerType());
break;
default:
throw new AssertionError("Unexpected object type");
}
hivePrivObjs.add(hivePrivObject);
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class CommandUtil method authorizeCommandThrowEx.
private static void authorizeCommandThrowEx(SessionState ss, HiveOperationType type, List<String> command, String serviceObject) throws HiveAuthzPluginException, HiveAccessControlException {
HivePrivilegeObject commandObj = HivePrivilegeObject.createHivePrivilegeObject(command);
HivePrivilegeObject serviceObj = new HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.SERVICE_NAME, null, serviceObject, null, null, null);
HiveAuthzContext.Builder ctxBuilder = new HiveAuthzContext.Builder();
ctxBuilder.setCommandString(Joiner.on(' ').join(command));
ctxBuilder.setUserIpAddress(ss.getUserIpAddress());
ctxBuilder.setForwardedAddresses(ss.getForwardedAddresses());
ss.getAuthorizerV2().checkPrivileges(type, Collections.singletonList(commandObj), Collections.singletonList(serviceObj), ctxBuilder.build());
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class ScheduledQueryAnalyzer method checkAuthorization.
private void checkAuthorization(ScheduledQueryMaintenanceRequestType type, ScheduledQuery schq) throws SemanticException {
boolean schqAuthorization = (SessionState.get().getAuthorizerV2() != null) && conf.getBoolVar(ConfVars.HIVE_SECURITY_AUTHORIZATION_SCHEDULED_QUERIES_SUPPORTED);
try {
if (!schqAuthorization) {
String currentUser = getUserName();
if (!Objects.equal(currentUser, schq.getUser())) {
throw new HiveAccessControlException("Authorization of scheduled queries is not enabled - only owners may change scheduled queries (currentUser: " + currentUser + ", owner: " + schq.getUser() + ")");
}
} else {
HiveOperationType opType = toHiveOpType(type);
List<HivePrivilegeObject> privObjects = new ArrayList<HivePrivilegeObject>();
ScheduledQueryKey key = schq.getScheduleKey();
privObjects.add(HivePrivilegeObject.forScheduledQuery(schq.getUser(), key.getClusterNamespace(), key.getScheduleName()));
SessionState.get().getAuthorizerV2().checkPrivileges(opType, privObjects, privObjects, new HiveAuthzContext.Builder().build());
}
} catch (Exception e) {
throw new SemanticException(e.getMessage(), e);
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class RevokeOperation method execute.
@Override
public int execute() throws HiveException {
HiveAuthorizer authorizer = PrivilegeUtils.getSessionAuthorizer(context.getConf());
// Convert to object types used by the authorization plugin interface
List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), PrivilegeUtils.getAuthorizationTranslator(authorizer));
List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(desc.getPrivileges(), PrivilegeUtils.getAuthorizationTranslator(authorizer));
HivePrivilegeObject hivePrivilegeObject = PrivilegeUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getPrivilegeSubject());
HivePrincipal grantorPrincipal = new HivePrincipal(null, null);
authorizer.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivilegeObject, grantorPrincipal, desc.isGrantOption());
return 0;
}
Aggregations