Search in sources :

Example 66 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method applyRowFilterAndColumnMasking.

@Override
public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext queryContext, List<HivePrivilegeObject> hiveObjs) throws SemanticException {
    List<HivePrivilegeObject> ret = new ArrayList<HivePrivilegeObject>();
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> applyRowFilterAndColumnMasking(" + queryContext + ", objCount=" + hiveObjs.size() + ")");
    }
    RangerPerfTracer perf = null;
    if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
        perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.applyRowFilterAndColumnMasking()");
    }
    if (CollectionUtils.isNotEmpty(hiveObjs)) {
        for (HivePrivilegeObject hiveObj : hiveObjs) {
            HivePrivilegeObjectType hiveObjType = hiveObj.getType();
            if (hiveObjType == null) {
                hiveObjType = HivePrivilegeObjectType.TABLE_OR_VIEW;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("applyRowFilterAndColumnMasking(hiveObjType=" + hiveObjType + ")");
            }
            boolean needToTransform = false;
            if (hiveObjType == HivePrivilegeObjectType.TABLE_OR_VIEW) {
                String database = hiveObj.getDbname();
                String table = hiveObj.getObjectName();
                String rowFilterExpr = getRowFilterExpression(queryContext, database, table);
                if (StringUtils.isNotBlank(rowFilterExpr)) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("rowFilter(database=" + database + ", table=" + table + "): " + rowFilterExpr);
                    }
                    hiveObj.setRowFilterExpression(rowFilterExpr);
                    needToTransform = true;
                }
                if (CollectionUtils.isNotEmpty(hiveObj.getColumns())) {
                    List<String> columnTransformers = new ArrayList<String>();
                    for (String column : hiveObj.getColumns()) {
                        boolean isColumnTransformed = addCellValueTransformerAndCheckIfTransformed(queryContext, database, table, column, columnTransformers);
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("addCellValueTransformerAndCheckIfTransformed(database=" + database + ", table=" + table + ", column=" + column + "): " + isColumnTransformed);
                        }
                        needToTransform = needToTransform || isColumnTransformed;
                    }
                    hiveObj.setCellValueTransformers(columnTransformers);
                }
            }
            if (needToTransform) {
                ret.add(hiveObj);
            }
        }
    }
    RangerPerfTracer.log(perf);
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== applyRowFilterAndColumnMasking(" + queryContext + ", objCount=" + hiveObjs.size() + "): retCount=" + ret.size());
    }
    return ret;
}
Also used : RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) ArrayList(java.util.ArrayList) HivePrivilegeObjectType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Example 67 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method handleDfsCommand.

private void handleDfsCommand(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, String user, RangerHiveAuditHandler auditHandler) throws HiveAuthzPluginException, HiveAccessControlException {
    String dfsCommandParams = null;
    if (inputHObjs != null) {
        for (HivePrivilegeObject hiveObj : inputHObjs) {
            if (hiveObj.getType() == HivePrivilegeObjectType.COMMAND_PARAMS) {
                dfsCommandParams = StringUtil.toString(hiveObj.getCommandParams());
                if (!StringUtil.isEmpty(dfsCommandParams)) {
                    break;
                }
            }
        }
    }
    int serviceType = -1;
    String serviceName = null;
    if (hivePlugin != null) {
        serviceType = hivePlugin.getServiceDefId();
        serviceName = hivePlugin.getServiceName();
    }
    auditHandler.logAuditEventForDfs(user, dfsCommandParams, false, serviceType, serviceName);
    throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have privilege for [%s] command", user, hiveOpType.name()));
}
Also used : HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Example 68 with HivePrivilegeObject

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.

the class RangerHivePlugin method checkPrivileges.

/**
 * Check if user has privileges to do this action on these objects
 * @param hiveOpType
 * @param inputHObjs
 * @param outputHObjs
 * @param context
 * @throws HiveAuthzPluginException
 * @throws HiveAccessControlException
 */
@Override
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
    UserGroupInformation ugi = getCurrentUserGroupInfo();
    if (ugi == null) {
        throw new HiveAccessControlException("Permission denied: user information not available");
    }
    RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler(hivePlugin.getConfig());
    RangerPerfTracer perf = null;
    try {
        HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
        String user = ugi.getShortUserName();
        Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
        Set<String> roles = getCurrentRolesForUser(user, groups);
        if (LOG.isDebugEnabled()) {
            LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
        }
        if (hiveOpType == HiveOperationType.DFS) {
            handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler);
            return;
        }
        if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
            perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.checkPrivileges(hiveOpType=" + hiveOpType + ")");
        }
        List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();
        if (!CollectionUtils.isEmpty(inputHObjs)) {
            for (HivePrivilegeObject hiveObj : inputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj, inputHObjs, outputHObjs);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String pathStr = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(pathStr)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    Path path = new Path(pathStr);
                    FileSystem fs = null;
                    try {
                        fs = FileSystem.get(path.toUri(), getHiveConf());
                    } catch (IOException e) {
                        LOG.error("Error getting permissions for " + path, e);
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path), e);
                    }
                    boolean shouldCheckAccess = true;
                    if (isMountedFs(fs)) {
                        Path resolvedPath = resolvePath(path, fs);
                        if (resolvedPath != null) {
                            // we know the resolved path scheme. Let's check the resolved path
                            // scheme is part of hivePlugin.getFSScheme.
                            shouldCheckAccess = isPathInFSScheme(resolvedPath.toUri().toString());
                        }
                    }
                    if (shouldCheckAccess) {
                        if (!isURIAccessAllowed(user, permission, path, fs)) {
                            throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                        }
                        continue;
                    }
                // This means we got resolved path scheme is not part of
                // hivePlugin.getFSScheme
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, true);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, hiveOpType, accessType, context, sessionContext);
                    requests.add(request);
                }
            }
        } else {
            // this should happen only for SHOWDATABASES
            if (hiveOpType == HiveOperationType.SHOWDATABASES) {
                RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null);
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, hiveOpType.name(), HiveAccessType.USE, context, sessionContext);
                requests.add(request);
            } else if (hiveOpType == HiveOperationType.REPLDUMP) {
                // This happens when REPL DUMP command with null inputHObjs is sent in checkPrivileges()
                // following parsing is done for Audit info
                RangerHiveResource resource = null;
                HiveObj hiveObj = new HiveObj(context);
                String dbName = hiveObj.getDatabaseName();
                String tableName = hiveObj.getTableName();
                LOG.debug("Database: " + dbName + " Table: " + tableName);
                if (!StringUtil.isEmpty(tableName)) {
                    resource = new RangerHiveResource(HiveObjectType.TABLE, dbName, tableName);
                } else {
                    resource = new RangerHiveResource(HiveObjectType.DATABASE, dbName, null);
                }
                // 
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, hiveOpType.name(), HiveAccessType.REPLADMIN, context, sessionContext);
                requests.add(request);
            } else if (hiveOpType.equals(HiveOperationType.ALTERTABLE_OWNER)) {
                RangerHiveAccessRequest request = buildRequestForAlterTableSetOwnerFromCommandString(user, groups, roles, hiveOpType.name(), context, sessionContext);
                if (request != null) {
                    requests.add(request);
                } else {
                    throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have privilege for [%s] command", user, hiveOpType.name()));
                }
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType + "] received with empty input objects list!");
                }
            }
        }
        if (!CollectionUtils.isEmpty(outputHObjs)) {
            for (HivePrivilegeObject hiveObj : outputHObjs) {
                RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj, inputHObjs, outputHObjs);
                if (resource == null) {
                    // possible if input object/object is of a kind that we don't currently authorize
                    continue;
                }
                String pathStr = hiveObj.getObjectName();
                HiveObjectType hiveObjType = resource.getObjectType();
                if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(pathStr)) {
                    FsAction permission = getURIAccessType(hiveOpType);
                    Path path = new Path(pathStr);
                    FileSystem fs = null;
                    try {
                        fs = FileSystem.get(path.toUri(), getHiveConf());
                    } catch (IOException e) {
                        LOG.error("Error getting permissions for " + path, e);
                        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path), e);
                    }
                    boolean shouldCheckAccess = true;
                    if (isMountedFs(fs)) {
                        Path resolvedPath = resolvePath(path, fs);
                        if (resolvedPath != null) {
                            // we know the resolved path scheme. Let's check the resolved path
                            // scheme is part of hivePlugin.getFSScheme.
                            shouldCheckAccess = isPathInFSScheme(resolvedPath.toUri().toString());
                        }
                    }
                    if (shouldCheckAccess) {
                        if (!isURIAccessAllowed(user, permission, path, fs)) {
                            throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
                        }
                        continue;
                    }
                // This means we got resolved path scheme is not part of
                // hivePlugin.getFSScheme
                }
                HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, false);
                if (accessType == HiveAccessType.NONE) {
                    continue;
                }
                if (!existsByResourceAndAccessType(requests, resource, accessType)) {
                    RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, hiveOpType, accessType, context, sessionContext);
                    requests.add(request);
                }
            }
        } else {
            if (hiveOpType == HiveOperationType.REPLLOAD) {
                // This happens when REPL LOAD command with null inputHObjs is sent in checkPrivileges()
                // following parsing is done for Audit info
                RangerHiveResource resource = null;
                HiveObj hiveObj = new HiveObj(context);
                String dbName = hiveObj.getDatabaseName();
                String tableName = hiveObj.getTableName();
                LOG.debug("Database: " + dbName + " Table: " + tableName);
                if (!StringUtil.isEmpty(tableName)) {
                    resource = new RangerHiveResource(HiveObjectType.TABLE, dbName, tableName);
                } else {
                    resource = new RangerHiveResource(HiveObjectType.DATABASE, dbName, null);
                }
                RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, hiveOpType.name(), HiveAccessType.REPLADMIN, context, sessionContext);
                requests.add(request);
            }
        }
        buildRequestContextWithAllAccessedResources(requests);
        for (RangerHiveAccessRequest request : requests) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("request: " + request);
            }
            RangerHiveResource resource = (RangerHiveResource) request.getResource();
            RangerAccessResult result = null;
            if (resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
                List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();
                String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);
                // in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set
                resource.setServiceDef(hivePlugin.getServiceDef());
                for (String column : columns) {
                    if (column != null) {
                        column = column.trim();
                    }
                    if (StringUtils.isBlank(column)) {
                        continue;
                    }
                    RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTable(), column);
                    colResource.setOwnerUser(resource.getOwnerUser());
                    RangerHiveAccessRequest colRequest = request.copy();
                    colRequest.setResource(colResource);
                    colRequests.add(colRequest);
                }
                Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler);
                if (colResults != null) {
                    for (RangerAccessResult colResult : colResults) {
                        result = colResult;
                        if (result != null && !result.getIsAllowed()) {
                            break;
                        }
                    }
                }
            } else {
                result = hivePlugin.isAccessAllowed(request, auditHandler);
            }
            if ((result == null || result.getIsAllowed()) && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) {
                // check if row-filtering is applicable for the table/view being accessed
                HiveAccessType savedAccessType = request.getHiveAccessType();
                RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE, resource.getDatabase(), resource.getTable());
                // filtering/masking policies are defined only for SELECT
                request.setHiveAccessType(HiveAccessType.SELECT);
                request.setResource(tblResource);
                RangerAccessResult rowFilterResult = getRowFilterResult(request);
                if (isRowFilterEnabled(rowFilterResult)) {
                    if (result == null) {
                        result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, rowFilterResult.getServiceName(), rowFilterResult.getServiceDef(), request);
                    }
                    result.setIsAllowed(false);
                    result.setPolicyId(rowFilterResult.getPolicyId());
                    result.setReason("User does not have access to all rows of the table");
                } else {
                    // check if masking is enabled for any column in the table/view
                    request.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS);
                    RangerAccessResult dataMaskResult = getDataMaskResult(request);
                    if (isDataMaskEnabled(dataMaskResult)) {
                        if (result == null) {
                            result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, dataMaskResult.getServiceName(), dataMaskResult.getServiceDef(), request);
                        }
                        result.setIsAllowed(false);
                        result.setPolicyId(dataMaskResult.getPolicyId());
                        result.setReason("User does not have access to unmasked column values");
                    }
                }
                request.setHiveAccessType(savedAccessType);
                request.setResource(resource);
                if (result != null && !result.getIsAllowed()) {
                    auditHandler.processResult(result);
                }
            }
            if (result == null || !result.getIsAllowed()) {
                String path = resource.getAsString();
                path = (path == null) ? "Unknown resource!!" : buildPathForException(path, hiveOpType);
                throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, request.getHiveAccessType().name(), path));
            }
        }
    } finally {
        auditHandler.flushAudit();
        RangerPerfTracer.log(perf);
    }
}
Also used : ArrayList(java.util.ArrayList) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) FsAction(org.apache.hadoop.fs.permission.FsAction) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) FileSystem(org.apache.hadoop.fs.FileSystem) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(org.apache.hadoop.fs.Path) RangerPerfTracer(org.apache.ranger.plugin.util.RangerPerfTracer) RangerAccessResult(org.apache.ranger.plugin.policyengine.RangerAccessResult) IOException(java.io.IOException) HiveAuthzSessionContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext) RangerAccessRequest(org.apache.ranger.plugin.policyengine.RangerAccessRequest)

Aggregations

HivePrivilegeObject (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)68 ArrayList (java.util.ArrayList)39 Table (org.apache.hadoop.hive.metastore.api.Table)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)10 IOException (java.io.IOException)9 HivePrincipal (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal)9 HiveAccessControlException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException)8 HiveAuthzContext (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext)8 HivePrivilegeObjectType (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType)8 HivePrivilege (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege)7 Table (org.apache.hadoop.hive.ql.metadata.Table)6 HiveAuthorizer (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer)6 HivePrivilegeInfo (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo)6 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)4 Database (org.apache.hadoop.hive.metastore.api.Database)4 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)4 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)4 HiveOperationType (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType)4 HivePrivObjectActionType (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType)4