use of org.apache.ranger.plugin.util.RangerPerfTracer in project ranger by apache.
the class RangerKafkaAuthorizer method authorize.
@Override
public boolean authorize(Session session, Operation operation, Resource resource) {
if (rangerPlugin == null) {
MiscUtil.logErrorMessageByInterval(logger, "Authorizer is still not initialized");
return false;
}
// TODO: If resource type is consumer group, then allow it by default
if (resource.resourceType().equals(Group$.MODULE$)) {
if (logger.isDebugEnabled()) {
logger.debug("If resource type is consumer group, then we allow it by default! Returning true");
}
return true;
}
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_KAFKAAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_KAFKAAUTH_REQUEST_LOG, "RangerKafkaAuthorizer.authorize(resource=" + resource + ")");
}
String userName = null;
if (session.principal() != null) {
userName = session.principal().getName();
}
java.util.Set<String> userGroups = MiscUtil.getGroupsForRequestUser(userName);
String ip = session.clientAddress().getHostAddress();
// skip leading slash
if (StringUtils.isNotEmpty(ip) && ip.charAt(0) == '/') {
ip = ip.substring(1);
}
Date eventTime = new Date();
String accessType = mapToRangerAccessType(operation);
boolean validationFailed = false;
String validationStr = "";
if (accessType == null) {
if (MiscUtil.logErrorMessageByInterval(logger, "Unsupported access type. operation=" + operation)) {
logger.fatal("Unsupported access type. session=" + session + ", operation=" + operation + ", resource=" + resource);
}
validationFailed = true;
validationStr += "Unsupported access type. operation=" + operation;
}
String action = accessType;
String clusterName = rangerPlugin.getClusterName();
RangerAccessRequestImpl rangerRequest = new RangerAccessRequestImpl();
rangerRequest.setUser(userName);
rangerRequest.setUserGroups(userGroups);
rangerRequest.setClientIPAddress(ip);
rangerRequest.setAccessTime(eventTime);
RangerAccessResourceImpl rangerResource = new RangerAccessResourceImpl();
rangerRequest.setResource(rangerResource);
rangerRequest.setAccessType(accessType);
rangerRequest.setAction(action);
rangerRequest.setRequestData(resource.name());
rangerRequest.setClusterName(clusterName);
if (resource.resourceType().equals(Topic$.MODULE$)) {
rangerResource.setValue(KEY_TOPIC, resource.name());
} else if (resource.resourceType().equals(Cluster$.MODULE$)) {
// NOPMD
// CLUSTER should go as null
// rangerResource.setValue(KEY_CLUSTER, resource.name());
} else if (resource.resourceType().equals(Group$.MODULE$)) {
rangerResource.setValue(KEY_CONSUMER_GROUP, resource.name());
} else {
logger.fatal("Unsupported resourceType=" + resource.resourceType());
validationFailed = true;
}
boolean returnValue = false;
if (validationFailed) {
MiscUtil.logErrorMessageByInterval(logger, validationStr + ", request=" + rangerRequest);
} else {
try {
RangerAccessResult result = rangerPlugin.isAccessAllowed(rangerRequest);
if (result == null) {
logger.error("Ranger Plugin returned null. Returning false");
} else {
returnValue = result.getIsAllowed();
}
} catch (Throwable t) {
logger.error("Error while calling isAccessAllowed(). request=" + rangerRequest, t);
}
}
RangerPerfTracer.log(perf);
if (logger.isDebugEnabled()) {
logger.debug("rangerRequest=" + rangerRequest + ", return=" + returnValue);
}
return returnValue;
}
use of org.apache.ranger.plugin.util.RangerPerfTracer in project ranger by apache.
the class RangerYarnAuditHandler method checkPermission.
@Override
public boolean checkPermission(AccessType accessType, PrivilegedEntity entity, UserGroupInformation ugi) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> RangerYarnAuthorizer.checkPermission(" + accessType + ", " + toString(entity) + ", " + ugi + ")");
}
boolean ret = false;
RangerYarnPlugin plugin = yarnPlugin;
RangerYarnAuditHandler auditHandler = null;
RangerAccessResult result = null;
String clusterName = yarnPlugin.getClusterName();
RangerPerfTracer perf = null;
RangerPerfTracer yarnAclPerf = null;
if (plugin != null) {
if (RangerPerfTracer.isPerfTraceEnabled(PERF_YARNAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_YARNAUTH_REQUEST_LOG, "RangerYarnAuthorizer.checkPermission(entity=" + entity + ")");
}
RangerYarnAccessRequest request = new RangerYarnAccessRequest(entity, getRangerAccessType(accessType), accessType.name(), ugi, clusterName);
auditHandler = new RangerYarnAuditHandler();
result = plugin.isAccessAllowed(request, auditHandler);
}
if (RangerYarnAuthorizer.yarnAuthEnabled && (result == null || !result.getIsAccessDetermined())) {
if (RangerPerfTracer.isPerfTraceEnabled(PERF_YARNAUTH_REQUEST_LOG)) {
yarnAclPerf = RangerPerfTracer.getPerfTracer(PERF_YARNAUTH_REQUEST_LOG, "RangerYarnNativeAuthorizer.isAllowedByYarnAcl(entity=" + entity + ")");
}
ret = isAllowedByYarnAcl(accessType, entity, ugi, auditHandler);
} else {
ret = result != null && result.getIsAllowed();
}
if (auditHandler != null) {
auditHandler.flushAudit();
}
RangerPerfTracer.log(yarnAclPerf);
RangerPerfTracer.log(perf);
if (LOG.isDebugEnabled()) {
LOG.debug("<== RangerYarnAuthorizer.checkPermission(" + accessType + ", " + toString(entity) + ", " + ugi + "): " + ret);
}
return ret;
}
use of org.apache.ranger.plugin.util.RangerPerfTracer in project ranger by apache.
the class RangerHivePlugin method applyRowFilterAndColumnMasking.
@Override
public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext queryContext, List<HivePrivilegeObject> hiveObjs) throws SemanticException {
List<HivePrivilegeObject> ret = new ArrayList<HivePrivilegeObject>();
if (LOG.isDebugEnabled()) {
LOG.debug("==> applyRowFilterAndColumnMasking(" + queryContext + ", objCount=" + hiveObjs.size() + ")");
}
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.applyRowFilterAndColumnMasking()");
}
if (CollectionUtils.isNotEmpty(hiveObjs)) {
for (HivePrivilegeObject hiveObj : hiveObjs) {
HivePrivilegeObjectType hiveObjType = hiveObj.getType();
if (hiveObjType == null) {
hiveObjType = HivePrivilegeObjectType.TABLE_OR_VIEW;
}
if (LOG.isDebugEnabled()) {
LOG.debug("applyRowFilterAndColumnMasking(hiveObjType=" + hiveObjType + ")");
}
boolean needToTransform = false;
if (hiveObjType == HivePrivilegeObjectType.TABLE_OR_VIEW) {
String database = hiveObj.getDbname();
String table = hiveObj.getObjectName();
String rowFilterExpr = getRowFilterExpression(queryContext, database, table);
if (StringUtils.isNotBlank(rowFilterExpr)) {
if (LOG.isDebugEnabled()) {
LOG.debug("rowFilter(database=" + database + ", table=" + table + "): " + rowFilterExpr);
}
hiveObj.setRowFilterExpression(rowFilterExpr);
needToTransform = true;
}
if (CollectionUtils.isNotEmpty(hiveObj.getColumns())) {
List<String> columnTransformers = new ArrayList<String>();
for (String column : hiveObj.getColumns()) {
boolean isColumnTransformed = addCellValueTransformerAndCheckIfTransformed(queryContext, database, table, column, columnTransformers);
if (LOG.isDebugEnabled()) {
LOG.debug("addCellValueTransformerAndCheckIfTransformed(database=" + database + ", table=" + table + ", column=" + column + "): " + isColumnTransformed);
}
needToTransform = needToTransform || isColumnTransformed;
}
hiveObj.setCellValueTransformers(columnTransformers);
}
}
if (needToTransform) {
ret.add(hiveObj);
}
}
}
RangerPerfTracer.log(perf);
if (LOG.isDebugEnabled()) {
LOG.debug("<== applyRowFilterAndColumnMasking(" + queryContext + ", objCount=" + hiveObjs.size() + "): retCount=" + ret.size());
}
return ret;
}
use of org.apache.ranger.plugin.util.RangerPerfTracer in project ranger by apache.
the class RangerHivePlugin method filterListCmdObjects.
/**
* Check if user has privileges to do this action on these objects
* @param objs
* @param context
* @throws HiveAuthzPluginException
* @throws HiveAccessControlException
*/
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
}
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.filterListCmdObjects()");
}
List<HivePrivilegeObject> ret = null;
// bail out early if nothing is there to validate!
if (objs == null) {
LOG.debug("filterListCmdObjects: meta objects list was null!");
} else if (objs.isEmpty()) {
LOG.debug("filterListCmdObjects: meta objects list was empty!");
ret = objs;
} else if (getCurrentUserGroupInfo() == null) {
/*
* This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2!
*/
LOG.warn("filterListCmdObjects: user information not available");
ret = objs;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
}
// get user/group info
// we know this can't be null since we checked it above!
UserGroupInformation ugi = getCurrentUserGroupInfo();
HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
String user = ugi.getShortUserName();
Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: user[%s], groups%s", user, groups));
}
if (ret == null) {
// if we got any items to filter then we can't return back a null. We must return back a list even if its empty.
ret = new ArrayList<HivePrivilegeObject>(objs.size());
}
for (HivePrivilegeObject privilegeObject : objs) {
if (LOG.isDebugEnabled()) {
HivePrivObjectActionType actionType = privilegeObject.getActionType();
HivePrivilegeObjectType objectType = privilegeObject.getType();
String objectName = privilegeObject.getObjectName();
String dbName = privilegeObject.getDbname();
List<String> columns = privilegeObject.getColumns();
List<String> partitionKeys = privilegeObject.getPartKeys();
String commandString = context == null ? null : context.getCommandString();
String ipAddress = context == null ? null : context.getIpAddress();
final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
}
RangerHiveResource resource = createHiveResource(privilegeObject);
if (resource == null) {
LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
} else {
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, context, sessionContext, hivePlugin.getClusterName());
RangerAccessResult result = hivePlugin.isAccessAllowed(request);
if (result == null) {
LOG.error("filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
} else if (!result.getIsAllowed()) {
if (!LOG.isDebugEnabled()) {
String path = resource.getAsString();
LOG.debug(String.format("filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]", user, request.getHiveAccessType().name(), path, resource, request, result));
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]", resource, request, result));
}
ret.add(privilegeObject);
}
}
}
}
RangerPerfTracer.log(perf);
if (LOG.isDebugEnabled()) {
int count = ret == null ? 0 : ret.size();
LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
}
return ret;
}
use of org.apache.ranger.plugin.util.RangerPerfTracer in project ranger by apache.
the class RangerHivePlugin method checkPrivileges.
/**
* Check if user has privileges to do this action on these objects
* @param hiveOpType
* @param inputHObjs
* @param outputHObjs
* @param context
* @throws HiveAuthzPluginException
* @throws HiveAccessControlException
*/
@Override
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
UserGroupInformation ugi = getCurrentUserGroupInfo();
if (ugi == null) {
throw new HiveAccessControlException("Permission denied: user information not available");
}
RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();
RangerPerfTracer perf = null;
try {
HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
String user = ugi.getShortUserName();
Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
String clusterName = hivePlugin.getClusterName();
if (LOG.isDebugEnabled()) {
LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
}
if (hiveOpType == HiveOperationType.DFS) {
handleDfsCommand(hiveOpType, inputHObjs, user, auditHandler);
return;
}
if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.checkPrivileges(hiveOpType=" + hiveOpType + ")");
}
List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();
if (!CollectionUtils.isEmpty(inputHObjs)) {
for (HivePrivilegeObject hiveObj : inputHObjs) {
RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
if (resource == null) {
// possible if input object/object is of a kind that we don't currently authorize
continue;
}
String path = hiveObj.getObjectName();
HiveObjectType hiveObjType = resource.getObjectType();
if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
FsAction permission = getURIAccessType(hiveOpType);
if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
}
continue;
}
HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, true);
if (accessType == HiveAccessType.NONE) {
continue;
}
if (!existsByResourceAndAccessType(requests, resource, accessType)) {
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
requests.add(request);
}
}
} else {
// this should happen only for SHOWDATABASES
if (hiveOpType == HiveOperationType.SHOWDATABASES) {
RangerHiveResource resource = new RangerHiveResource(HiveObjectType.DATABASE, null);
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType.name(), HiveAccessType.USE, context, sessionContext, clusterName);
requests.add(request);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("RangerHiveAuthorizer.checkPrivileges: Unexpected operation type[" + hiveOpType + "] received with empty input objects list!");
}
}
}
if (!CollectionUtils.isEmpty(outputHObjs)) {
for (HivePrivilegeObject hiveObj : outputHObjs) {
RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
if (resource == null) {
// possible if input object/object is of a kind that we don't currently authorize
continue;
}
String path = hiveObj.getObjectName();
HiveObjectType hiveObjType = resource.getObjectType();
if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
FsAction permission = getURIAccessType(hiveOpType);
if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
}
continue;
}
HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, hiveObjType, false);
if (accessType == HiveAccessType.NONE) {
continue;
}
if (!existsByResourceAndAccessType(requests, resource, accessType)) {
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext, clusterName);
requests.add(request);
}
}
}
buildRequestContextWithAllAccessedResources(requests);
for (RangerHiveAccessRequest request : requests) {
if (LOG.isDebugEnabled()) {
LOG.debug("request: " + request);
}
RangerHiveResource resource = (RangerHiveResource) request.getResource();
RangerAccessResult result = null;
if (resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();
String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);
// in case of multiple columns, original request is not sent to the plugin; hence service-def will not be set
resource.setServiceDef(hivePlugin.getServiceDef());
for (String column : columns) {
if (column != null) {
column = column.trim();
}
if (StringUtils.isBlank(column)) {
continue;
}
RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTable(), column);
RangerHiveAccessRequest colRequest = request.copy();
colRequest.setResource(colResource);
colRequests.add(colRequest);
}
Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler);
if (colResults != null) {
for (RangerAccessResult colResult : colResults) {
result = colResult;
if (result != null && !result.getIsAllowed()) {
break;
}
}
}
} else {
result = hivePlugin.isAccessAllowed(request, auditHandler);
}
if ((result == null || result.getIsAllowed()) && isBlockAccessIfRowfilterColumnMaskSpecified(hiveOpType, request)) {
// check if row-filtering is applicable for the table/view being accessed
HiveAccessType savedAccessType = request.getHiveAccessType();
RangerHiveResource tblResource = new RangerHiveResource(HiveObjectType.TABLE, resource.getDatabase(), resource.getTable());
// filtering/masking policies are defined only for SELECT
request.setHiveAccessType(HiveAccessType.SELECT);
request.setResource(tblResource);
RangerAccessResult rowFilterResult = getRowFilterResult(request);
if (isRowFilterEnabled(rowFilterResult)) {
if (result == null) {
result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, rowFilterResult.getServiceName(), rowFilterResult.getServiceDef(), request);
}
result.setIsAllowed(false);
result.setPolicyId(rowFilterResult.getPolicyId());
result.setReason("User does not have acces to all rows of the table");
} else {
// check if masking is enabled for any column in the table/view
request.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF_OR_DESCENDANTS);
RangerAccessResult dataMaskResult = getDataMaskResult(request);
if (isDataMaskEnabled(dataMaskResult)) {
if (result == null) {
result = new RangerAccessResult(RangerPolicy.POLICY_TYPE_ACCESS, dataMaskResult.getServiceName(), dataMaskResult.getServiceDef(), request);
}
result.setIsAllowed(false);
result.setPolicyId(dataMaskResult.getPolicyId());
result.setReason("User does not have access to unmasked column values");
}
}
request.setHiveAccessType(savedAccessType);
request.setResource(resource);
if (result != null && !result.getIsAllowed()) {
auditHandler.processResult(result);
}
}
if (result == null || !result.getIsAllowed()) {
String path = resource.getAsString();
path = (path == null) ? "Unknown resource!!" : buildPathForException(path, hiveOpType);
throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, request.getHiveAccessType().name(), path));
}
}
} finally {
auditHandler.flushAudit();
RangerPerfTracer.log(perf);
}
}
Aggregations