Search in sources :

Example 16 with RangerAccessResult

use of org.apache.ranger.plugin.policyengine.RangerAccessResult in project ranger by apache.

the class RangerAuthorizer method authorize.

public boolean authorize(String fileName, String accessType, String user, Set<String> userGroups) {
    RangerAccessResourceImpl resource = new RangerAccessResourceImpl();
    // "path" must be a value resource name in servicedef JSON
    resource.setValue("path", fileName);
    RangerAccessRequest request = new RangerAccessRequestImpl(resource, accessType, user, userGroups);
    RangerAccessResult result = plugin.isAccessAllowed(request);
    return result != null && result.getIsAllowed();
}
Also used : RangerAccessRequestImpl(org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl) RangerAccessResourceImpl(org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest)

Example 17 with RangerAccessResult

use of org.apache.ranger.plugin.policyengine.RangerAccessResult in project ranger by apache.

the class RangerHivePlugin method filterListCmdObjects.

/**
 * Check if user has privileges to do this action on these objects
 * @param objs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
    }
    RangerPerfTracer perf = null;
    if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
        perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.filterListCmdObjects()");
    }
    List<HivePrivilegeObject> ret = null;
    // bail out early if nothing is there to validate!
    if (objs == null) {
        LOG.debug("filterListCmdObjects: meta objects list was null!");
    } else if (objs.isEmpty()) {
        LOG.debug("filterListCmdObjects: meta objects list was empty!");
        ret = objs;
    } else if (getCurrentUserGroupInfo() == null) {
        /*
			 * This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2!
			 */
        LOG.warn("filterListCmdObjects: user information not available");
        ret = objs;
    } else {
        if (LOG.isDebugEnabled()) {
            LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
        }
        // get user/group info
        // we know this can't be null since we checked it above!
        UserGroupInformation ugi = getCurrentUserGroupInfo();
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("filterListCmdObjects: user[%s], groups%s", user, groups));
        }
        if (ret == null) {
            // if we got any items to filter then we can't return back a null.  We must return back a list even if its empty.
            ret = new ArrayList<HivePrivilegeObject>(objs.size());
        }
        for (HivePrivilegeObject privilegeObject : objs) {
            if (LOG.isDebugEnabled()) {
                HivePrivObjectActionType actionType = privilegeObject.getActionType();
                HivePrivilegeObjectType objectType = privilegeObject.getType();
                String objectName = privilegeObject.getObjectName();
                String dbName = privilegeObject.getDbname();
                List<String> columns = privilegeObject.getColumns();
                List<String> partitionKeys = privilegeObject.getPartKeys();
                String commandString = context == null ? null : context.getCommandString();
                String ipAddress = context == null ? null : context.getIpAddress();
                final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
                LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
            }
            RangerHiveResource resource = createHiveResource(privilegeObject);
            if (resource == null) {
                LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
            } else {
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, context, sessionContext, hivePlugin.getClusterName());
                RangerAccessResult result = hivePlugin.isAccessAllowed(request);
                if (result == null) {
                    LOG.error("filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
                } else if (!result.getIsAllowed()) {
                    if (!LOG.isDebugEnabled()) {
                        String path = resource.getAsString();
                        LOG.debug(String.format("filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]", user, request.getHiveAccessType().name(), path, resource, request, result));
                    }
                } else {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format("filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]", resource, request, result));
                    }
                    ret.add(privilegeObject);
                }
            }
        }
    }
    RangerPerfTracer.log(perf);
    if (LOG.isDebugEnabled()) {
        int count = ret == null ? 0 : ret.size();
        LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
    }
    return ret;
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) HivePrivObjectActionType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HiveAuthzSessionContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext) HivePrivilegeObjectType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 18 with RangerAccessResult

use of org.apache.ranger.plugin.policyengine.RangerAccessResult in project ranger by apache.

the class RangerHivePlugin method checkPrivileges.

/**
 * Check if user has privileges to do this action on these objects
 * @param hiveOpType
 * @param inputHObjs
 * @param outputHObjs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
@Override
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();
    if (ugi == null) {
        throw new HiveAccessControlException("Permission denied: user information not available");
    }
    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();
    RangerPerfTracer perf = null;
    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        String clusterName = hivePlugin.getClusterName();
        if (LOG.isDebugEnabled()) {
            LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
        }
        if (hiveOpType == HiveOperationType.DFS) {
            handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler);
            return;
        }
        if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
            perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.checkPrivileges(hiveOpType=" + hiveOpType + ")");
        }
        List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();
        if (!CollectionUtils.isEmpty(inputHObjs)) {
            for (HivePrivilegeObject hiveObj : inputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String path = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                    }
                    continue;
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, true);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
                    requests.add(request);
                }
            }
        } else {
            // this should happen only for SHOWDATABASES
            if (hiveOpType == HiveOperationType.SHOWDATABASES) {
                RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null);
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType.name(), HiveAccessType.USE, context, sessionContext, clusterName);
                requests.add(request);
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType + "] received with empty input objects list!");
                }
            }
        }
        if (!CollectionUtils.isEmpty(outputHObjs)) {
            for (HivePrivilegeObject hiveObj : outputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String path = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                    }
                    continue;
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, false);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
                    requests.add(request);
                }
            }
        }
        buildRequestContextWithAllAccessedResources(requests);
        for (RangerHiveAccessRequest request : requests) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("request: " + request);
            }
            RangerHiveResource resource = (RangerHiveResource) request.getResource();
            RangerAccessResult result = null;
            if (resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
                List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();
                String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);
                // in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set
                resource.setServiceDef(hivePlugin.getServiceDef());
                for (String column : columns) {
                    if (column != null) {
                        column = column.trim();
                    }
                    if (StringUtils.isBlank(column)) {
                        continue;
                    }
                    RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTable(), column);
                    RangerHiveAccessRequest colRequest = request.copy();
                    colRequest.setResource(colResource);
                    colRequests.add(colRequest);
                }
                Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler);
                if (colResults != null) {
                    for (RangerAccessResult colResult : colResults) {
                        result = colResult;
                        if (result != null && !result.getIsAllowed()) {
                            break;
                        }
                    }
                }
            } else {
                result = hivePlugin.isAccessAllowed(request, auditHandler);
            }
            if ((result == null || result.getIsAllowed()) && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) {
                // check if row-filtering is applicable for the table/view being accessed
                HiveAccessType savedAccessType = request.getHiveAccessType();
                RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE, resource.getDatabase(), resource.getTable());
                // filtering/masking policies are defined only for SELECT
                request.setHiveAccessType(HiveAccessType.SELECT);
                request.setResource(tblResource);
                RangerAccessResult rowFilterResult = getRowFilterResult(request);
                if (isRowFilterEnabled(rowFilterResult)) {
                    if (result == null) {
                        result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, rowFilterResult.getServiceName(), rowFilterResult.getServiceDef(), request);
                    }
                    result.setIsAllowed(false);
                    result.setPolicyId(rowFilterResult.getPolicyId());
                    result.setReason("User does not have acces to all rows of the table");
                } else {
                    // check if masking is enabled for any column in the table/view
                    request.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS);
                    RangerAccessResult dataMaskResult = getDataMaskResult(request);
                    if (isDataMaskEnabled(dataMaskResult)) {
                        if (result == null) {
                            result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, dataMaskResult.getServiceName(), dataMaskResult.getServiceDef(), request);
                        }
                        result.setIsAllowed(false);
                        result.setPolicyId(dataMaskResult.getPolicyId());
                        result.setReason("User does not have access to unmasked column values");
                    }
                }
                request.setHiveAccessType(savedAccessType);
                request.setResource(resource);
                if (result != null && !result.getIsAllowed()) {
                    auditHandler.processResult(result);
                }
            }
            if (result == null || !result.getIsAllowed()) {
                String path = resource.getAsString();
                path = (path == null) ? "Unknown resource!!" : buildPathForException(path, hiveOpType);
                throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, request.getHiveAccessType().name(), path));
            }
        }
    } finally {
        auditHandler.flushAudit();
        RangerPerfTracer.log(perf);
    }
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) ArrayList(java.util.ArrayList) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) FsAction(org.apache.hadoop.fs.permission.FsAction) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveAuthzSessionContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 19 with RangerAccessResult

use of org.apache.ranger.plugin.policyengine.RangerAccessResult in project ranger by apache.

the class TestPolicyEngine method runTests.

private void runTests(InputStreamReader reader, String testName) {
    try {
        PolicyEngineTestCase testCase = gsonBuilder.fromJson(reader, PolicyEngineTestCase.class);
        assertTrue("invalid input: " + testName, testCase != null && testCase.serviceDef != null && testCase.policies != null && testCase.tests != null);
        ServicePolicies servicePolicies = new ServicePolicies();
        servicePolicies.setServiceName(testCase.serviceName);
        servicePolicies.setServiceDef(testCase.serviceDef);
        servicePolicies.setPolicies(testCase.policies);
        RangerPolicyEngineOptions policyEngineOptions = new RangerPolicyEngineOptions();
        RangerPolicyEngine policyEngine = new RangerPolicyEngineImpl(testName, servicePolicies, policyEngineOptions);
        RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler();
        for (TestData test : testCase.tests) {
            RangerAccessResult expected = test.result;
            RangerAccessRequest request = test.request;
            policyEngine.preProcess(request);
            RangerAccessResult result = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler);
            assertNotNull("result was null! - " + test.name, result);
            assertEquals("isAllowed mismatched! - " + test.name, expected.getIsAllowed(), result.getIsAllowed());
            assertEquals("isAudited mismatched! - " + test.name, expected.getIsAudited(), result.getIsAudited());
            assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId());
        }
    } catch (Throwable excp) {
        excp.printStackTrace();
    }
}
Also used : RangerPolicyEngineImpl(org.apache.ranger.plugin.policyengine.RangerPolicyEngineImpl) RangerAccessResultProcessor(org.apache.ranger.plugin.policyengine.RangerAccessResultProcessor) ServicePolicies(org.apache.ranger.plugin.util.ServicePolicies) TestData(org.apache.ranger.authorization.hbase.TestPolicyEngine.PolicyEngineTestCase.TestData) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) RangerPolicyEngine(org.apache.ranger.plugin.policyengine.RangerPolicyEngine) RangerDefaultAuditHandler(org.apache.ranger.plugin.audit.RangerDefaultAuditHandler) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest) RangerPolicyEngineOptions(org.apache.ranger.plugin.policyengine.RangerPolicyEngineOptions)

Example 20 with RangerAccessResult

use of org.apache.ranger.plugin.policyengine.RangerAccessResult in project ranger by apache.

the class RangerStormAuthorizer method permit.

/**
 * permit() method is invoked for each incoming Thrift request.
 * @param aRequestContext request context includes info about
 * @param aOperationName operation name
 * @param aTopologyConfigMap configuration of targeted topology
 * @return true if the request is authorized, false if reject
 */
@Override
public boolean permit(ReqContext aRequestContext, String aOperationName, Map aTopologyConfigMap) {
    boolean accessAllowed = false;
    boolean isAuditEnabled = false;
    String topologyName = null;
    RangerPerfTracer perf = null;
    try {
        if (RangerPerfTracer.isPerfTraceEnabled(PERF_STORMAUTH_REQUEST_LOG)) {
            perf = RangerPerfTracer.getPerfTracer(PERF_STORMAUTH_REQUEST_LOG, "RangerStormAuthorizer.permit()");
        }
        topologyName = (aTopologyConfigMap == null ? "" : (String) aTopologyConfigMap.get(Config.TOPOLOGY_NAME));
        if (LOG.isDebugEnabled()) {
            LOG.debug("[req " + aRequestContext.requestID() + "] Access " + " from: [" + aRequestContext.remoteAddress() + "]" + " user: [" + aRequestContext.principal() + "]," + " op:   [" + aOperationName + "]," + "topology: [" + topologyName + "]");
            if (aTopologyConfigMap != null) {
                for (Object keyObj : aTopologyConfigMap.keySet()) {
                    Object valObj = aTopologyConfigMap.get(keyObj);
                    LOG.debug("TOPOLOGY CONFIG MAP [" + keyObj + "] => [" + valObj + "]");
                }
            } else {
                LOG.debug("TOPOLOGY CONFIG MAP is passed as null.");
            }
        }
        if (noAuthzOperations.contains(aOperationName)) {
            accessAllowed = true;
        } else if (plugin == null) {
            LOG.info("Ranger plugin not initialized yet! Skipping authorization;  allowedFlag => [" + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
        } else {
            String userName = null;
            String[] groups = null;
            Principal user = aRequestContext.principal();
            if (user != null) {
                userName = user.getName();
                if (userName != null) {
                    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userName);
                    userName = ugi.getShortUserName();
                    groups = ugi.getGroupNames();
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("User found from principal [" + user.getName() + "] => user:[" + userName + "], groups:[" + StringUtil.toString(groups) + "]");
                    }
                }
            }
            if (userName != null) {
                String clientIp = (aRequestContext.remoteAddress() == null ? null : aRequestContext.remoteAddress().getHostAddress());
                String clusterName = plugin.getClusterName();
                RangerAccessRequest accessRequest = plugin.buildAccessRequest(userName, groups, clientIp, topologyName, aOperationName, clusterName);
                RangerAccessResult result = plugin.isAccessAllowed(accessRequest);
                accessAllowed = result != null && result.getIsAllowed();
                isAuditEnabled = result != null && result.getIsAudited();
                if (LOG.isDebugEnabled()) {
                    LOG.debug("User found from principal [" + userName + "], groups [" + StringUtil.toString(groups) + "]: verifying using [" + plugin.getClass().getName() + "], allowedFlag => [" + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
                }
            } else {
                LOG.info("NULL User found from principal [" + user + "]: Skipping authorization;  allowedFlag => [" + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
            }
        }
    } catch (Throwable t) {
        LOG.error("RangerStormAuthorizer found this exception", t);
    } finally {
        RangerPerfTracer.log(perf);
        if (LOG.isDebugEnabled()) {
            LOG.debug("[req " + aRequestContext.requestID() + "] Access " + " from: [" + aRequestContext.remoteAddress() + "]" + " user: [" + aRequestContext.principal() + "]," + " op:   [" + aOperationName + "]," + "topology: [" + topologyName + "] => returns [" + accessAllowed + "], Audit Enabled:" + isAuditEnabled);
        }
    }
    return accessAllowed;
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest) Principal(java.security.Principal) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

RangerAccessResult (org.apache.ranger.plugin.policyengine.RangerAccessResult)20 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)6 RangerPerfTracer (org.apache.ranger.plugin.util.RangerPerfTracer)6 RangerAccessRequest (org.apache.ranger.plugin.policyengine.RangerAccessRequest)5 HiveAuthzSessionContext (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext)4 RangerAccessRequestImpl (org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl)4 RangerAccessResourceImpl (org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl)4 Principal (java.security.Principal)2 Date (java.util.Date)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 HivePrivilegeObject (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)2 AuthzAuditEvent (org.apache.ranger.audit.model.AuthzAuditEvent)2 RangerBasePlugin (org.apache.ranger.plugin.service.RangerBasePlugin)2 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 Subject (javax.security.auth.Subject)1 IAE (org.apache.druid.java.util.common.IAE)1 Access (org.apache.druid.server.security.Access)1 FsAction (org.apache.hadoop.fs.permission.FsAction)1 HiveAccessControlException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException)1