Search in sources :

Example 21 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method filterListCmdObjects.

/**
 * Check if user has privileges to do this action on these objects
 * @param objs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
    }
    RangerPerfTracer perf = null;
    if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
        perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.filterListCmdObjects()");
    }
    List<HivePrivilegeObject> ret = null;
    // bail out early if nothing is there to validate!
    if (objs == null) {
        LOG.debug("filterListCmdObjects: meta objects list was null!");
    } else if (objs.isEmpty()) {
        LOG.debug("filterListCmdObjects: meta objects list was empty!");
        ret = objs;
    } else if (getCurrentUserGroupInfo() == null) {
        /*
			 * This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2!
			 */
        LOG.warn("filterListCmdObjects: user information not available");
        ret = objs;
    } else {
        if (LOG.isDebugEnabled()) {
            LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
        }
        // get user/group info
        // we know this can't be null since we checked it above!
        UserGroupInformation ugi = getCurrentUserGroupInfo();
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("filterListCmdObjects: user[%s], groups%s", user, groups));
        }
        if (ret == null) {
            // if we got any items to filter then we can't return back a null.  We must return back a list even if its empty.
            ret = new ArrayList<HivePrivilegeObject>(objs.size());
        }
        for (HivePrivilegeObject privilegeObject : objs) {
            if (LOG.isDebugEnabled()) {
                HivePrivObjectActionType actionType = privilegeObject.getActionType();
                HivePrivilegeObjectType objectType = privilegeObject.getType();
                String objectName = privilegeObject.getObjectName();
                String dbName = privilegeObject.getDbname();
                List<String> columns = privilegeObject.getColumns();
                List<String> partitionKeys = privilegeObject.getPartKeys();
                String commandString = context == null ? null : context.getCommandString();
                String ipAddress = context == null ? null : context.getIpAddress();
                final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
                LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
            }
            RangerHiveResource resource = createHiveResource(privilegeObject);
            if (resource == null) {
                LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
            } else {
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, context, sessionContext, hivePlugin.getClusterName());
                RangerAccessResult result = hivePlugin.isAccessAllowed(request);
                if (result == null) {
                    LOG.error("filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
                } else if (!result.getIsAllowed()) {
                    if (!LOG.isDebugEnabled()) {
                        String path = resource.getAsString();
                        LOG.debug(String.format("filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]", user, request.getHiveAccessType().name(), path, resource, request, result));
                    }
                } else {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(String.format("filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]", resource, request, result));
                    }
                    ret.add(privilegeObject);
                }
            }
        }
    }
    RangerPerfTracer.log(perf);
    if (LOG.isDebugEnabled()) {
        int count = ret == null ? 0 : ret.size();
        LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
    }
    return ret;
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) HivePrivObjectActionType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HiveAuthzSessionContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext) HivePrivilegeObjectType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 22 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method checkPrivileges.

/**
 * Check if user has privileges to do this action on these objects
 * @param hiveOpType
 * @param inputHObjs
 * @param outputHObjs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
@Override
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();
    if (ugi == null) {
        throw new HiveAccessControlException("Permission denied: user information not available");
    }
    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();
    RangerPerfTracer perf = null;
    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        String clusterName = hivePlugin.getClusterName();
        if (LOG.isDebugEnabled()) {
            LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
        }
        if (hiveOpType == HiveOperationType.DFS) {
            handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler);
            return;
        }
        if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
            perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.checkPrivileges(hiveOpType=" + hiveOpType + ")");
        }
        List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();
        if (!CollectionUtils.isEmpty(inputHObjs)) {
            for (HivePrivilegeObject hiveObj : inputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String path = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                    }
                    continue;
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, true);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
                    requests.add(request);
                }
            }
        } else {
            // this should happen only for SHOWDATABASES
            if (hiveOpType == HiveOperationType.SHOWDATABASES) {
                RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null);
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType.name(), HiveAccessType.USE, context, sessionContext, clusterName);
                requests.add(request);
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType + "] received with empty input objects list!");
                }
            }
        }
        if (!CollectionUtils.isEmpty(outputHObjs)) {
            for (HivePrivilegeObject hiveObj : outputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String path = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                    }
                    continue;
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, false);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
                    requests.add(request);
                }
            }
        }
        buildRequestContextWithAllAccessedResources(requests);
        for (RangerHiveAccessRequest request : requests) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("request: " + request);
            }
            RangerHiveResource resource = (RangerHiveResource) request.getResource();
            RangerAccessResult result = null;
            if (resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
                List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();
                String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);
                // in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set
                resource.setServiceDef(hivePlugin.getServiceDef());
                for (String column : columns) {
                    if (column != null) {
                        column = column.trim();
                    }
                    if (StringUtils.isBlank(column)) {
                        continue;
                    }
                    RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTable(), column);
                    RangerHiveAccessRequest colRequest = request.copy();
                    colRequest.setResource(colResource);
                    colRequests.add(colRequest);
                }
                Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler);
                if (colResults != null) {
                    for (RangerAccessResult colResult : colResults) {
                        result = colResult;
                        if (result != null && !result.getIsAllowed()) {
                            break;
                        }
                    }
                }
            } else {
                result = hivePlugin.isAccessAllowed(request, auditHandler);
            }
            if ((result == null || result.getIsAllowed()) && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) {
                // check if row-filtering is applicable for the table/view being accessed
                HiveAccessType savedAccessType = request.getHiveAccessType();
                RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE, resource.getDatabase(), resource.getTable());
                // filtering/masking policies are defined only for SELECT
                request.setHiveAccessType(HiveAccessType.SELECT);
                request.setResource(tblResource);
                RangerAccessResult rowFilterResult = getRowFilterResult(request);
                if (isRowFilterEnabled(rowFilterResult)) {
                    if (result == null) {
                        result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, rowFilterResult.getServiceName(), rowFilterResult.getServiceDef(), request);
                    }
                    result.setIsAllowed(false);
                    result.setPolicyId(rowFilterResult.getPolicyId());
                    result.setReason("User does not have acces to all rows of the table");
                } else {
                    // check if masking is enabled for any column in the table/view
                    request.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS);
                    RangerAccessResult dataMaskResult = getDataMaskResult(request);
                    if (isDataMaskEnabled(dataMaskResult)) {
                        if (result == null) {
                            result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, dataMaskResult.getServiceName(), dataMaskResult.getServiceDef(), request);
                        }
                        result.setIsAllowed(false);
                        result.setPolicyId(dataMaskResult.getPolicyId());
                        result.setReason("User does not have access to unmasked column values");
                    }
                }
                request.setHiveAccessType(savedAccessType);
                request.setResource(resource);
                if (result != null && !result.getIsAllowed()) {
                    auditHandler.processResult(result);
                }
            }
            if (result == null || !result.getIsAllowed()) {
                String path = resource.getAsString();
                path = (path == null) ? "Unknown resource!!" : buildPathForException(path, hiveOpType);
                throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, request.getHiveAccessType().name(), path));
            }
        }
    } finally {
        auditHandler.flushAudit();
        RangerPerfTracer.log(perf);
    }
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) ArrayList(java.util.ArrayList) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) FsAction(org.apache.hadoop.fs.permission.FsAction) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveAuthzSessionContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 23 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method handleDfsCommand.

private void handleDfsCommand(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, String user, RangerHiveAuditHandler auditHandler) throws HiveAuthzPluginException, HiveAccessControlException {
    String dfsCommandParams = null;
    if (inputHObjs != null) {
        for (HivePrivilegeObject hiveObj : inputHObjs) {
            if (hiveObj.getType() == HivePrivilegeObjectType.COMMAND_PARAMS) {
                dfsCommandParams = StringUtil.toString(hiveObj.getCommandParams());
                if (!StringUtil.isEmpty(dfsCommandParams)) {
                    break;
                }
            }
        }
    }
    int serviceType = -1;
    String serviceName = null;
    if (hivePlugin != null) {
        serviceType = hivePlugin.getServiceDefId();
        serviceName = hivePlugin.getServiceName();
    }
    auditHandler.logAuditEventForDfs(user, dfsCommandParams, false, serviceType, serviceName);
    throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have privilege for [%s] command", user, hiveOpType.name()));
}
Also used : HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Example 24 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project drill by axbaretto.

the class HiveAuthorizationHelper method authorizeReadTable.

/**
 * Check authorization for "READ TABLE" for given db.table. A {@link HiveAccessControlException} is thrown
 * for illegal access.
 * @param dbName
 * @param tableName
 */
public void authorizeReadTable(final String dbName, final String tableName) throws HiveAccessControlException {
    if (!authzEnabled) {
        return;
    }
    HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName);
    authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), Collections.<HivePrivilegeObject>emptyList(), "READ TABLE");
}
Also used : HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Example 25 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project drill by apache.

the class HiveAuthorizationHelper method authorizeShowTables.

/**
 * Check authorization for "SHOW TABLES" command in given Hive db. A {@link HiveAccessControlException} is thrown
 * for illegal access.
 * @param dbName
 */
void authorizeShowTables(final String dbName) throws HiveAccessControlException {
    if (!authzEnabled) {
        return;
    }
    final HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null);
    authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), Collections.<HivePrivilegeObject>emptyList(), "SHOW TABLES");
}
Also used : HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Aggregations

HivePrivilegeObject (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)26 ArrayList (java.util.ArrayList)10 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)5 HivePrincipal (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal)5 HiveAccessControlException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException)4 HivePrivilege (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege)4 HivePrivilegeInfo (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo)4 HiveObjectPrivilege (org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege)3 HiveObjectRef (org.apache.hadoop.hive.metastore.api.HiveObjectRef)3 PrivilegeGrantInfo (org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 HivePrivilegeObjectType (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType)3 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)3 RangerPerfTracer (org.apache.ranger.plugin.util.RangerPerfTracer)3 LinkedHashMap (java.util.LinkedHashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 RelNode (org.apache.calcite.rel.RelNode)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 PrincipalType (org.apache.hadoop.hive.metastore.api.PrincipalType)2