use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SQLStdHiveAuthorizationValidatorForTest method applyRowFilterAndColumnMasking.
// Please take a look at the instructions in HiveAuthorizer.java before
// implementing applyRowFilterAndColumnMasking
public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext context, List<HivePrivilegeObject> privObjs) throws SemanticException {
List<HivePrivilegeObject> needRewritePrivObjs = new ArrayList<>();
for (HivePrivilegeObject privObj : privObjs) {
if (privObj.getObjectName().equals("masking_test") || privObj.getObjectName().startsWith("masking_test_n")) {
privObj.setRowFilterExpression("key % 2 = 0 and key < 10");
List<String> cellValueTransformers = new ArrayList<>();
for (String columnName : privObj.getColumns()) {
if (columnName.equals("value")) {
cellValueTransformers.add("reverse(value)");
} else {
cellValueTransformers.add(columnName);
}
}
privObj.setCellValueTransformers(cellValueTransformers);
needRewritePrivObjs.add(privObj);
} else if (privObj.getObjectName().equals("masking_test_view") || privObj.getObjectName().startsWith("masking_test_view_n")) {
privObj.setRowFilterExpression("key > 6");
List<String> cellValueTransformers = new ArrayList<>();
for (String columnName : privObj.getColumns()) {
if (columnName.equals("key")) {
cellValueTransformers.add("key / 2");
} else {
cellValueTransformers.add(columnName);
}
}
privObj.setCellValueTransformers(cellValueTransformers);
needRewritePrivObjs.add(privObj);
} else if (privObj.getObjectName().equals("masking_test_subq") || privObj.getObjectName().startsWith("masking_test_subq_n")) {
privObj.setRowFilterExpression("key in (select key from src where src.key = " + privObj.getObjectName() + ".key)");
needRewritePrivObjs.add(privObj);
} else if (privObj.getObjectName().equals("masking_acid_no_masking") || privObj.getObjectName().startsWith("masking_acid_no_masking_n")) {
// testing acid usage when no masking/filtering is present
needRewritePrivObjs.add(privObj);
} else if (privObj.getObjectName().equals("masking_test_druid") || privObj.getObjectName().startsWith("masking_test_druid_n")) {
// testing druid queries row filtering is present
privObj.setRowFilterExpression("key > 10");
needRewritePrivObjs.add(privObj);
}
}
return needRewritePrivObjs;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.
the class RangerHivePlugin method getHivePrivilegeInfos.
private List<HivePrivilegeInfo> getHivePrivilegeInfos(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
List<HivePrivilegeInfo> ret = new ArrayList<>();
HivePrivilegeObject.HivePrivilegeObjectType objectType = null;
Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> userPermissions = null;
Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> groupPermissions = null;
Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> rolePermissions = null;
String dbName = null;
String objectName = null;
String columnName = null;
List<String> partValues = null;
try {
HiveObjectRef msObjRef = AuthorizationUtils.getThriftHiveObjectRef(privObj);
if (msObjRef != null) {
HivePrivilegeObject hivePrivilegeObject = null;
if (msObjRef.getDbName() != null) {
// when resource is specified in the show grants, acl will be for that resource / user / groups
objectType = getPluginPrivilegeObjType(msObjRef.getObjectType());
dbName = msObjRef.getDbName();
objectName = msObjRef.getObjectName();
columnName = (msObjRef.getColumnName() == null) ? new String() : msObjRef.getColumnName();
partValues = (msObjRef.getPartValues() == null) ? new ArrayList<>() : msObjRef.getPartValues();
hivePrivilegeObject = new HivePrivilegeObject(objectType, dbName, objectName);
RangerResourceACLs rangerResourceACLs = getRangerResourceACLs(hivePrivilegeObject);
if (rangerResourceACLs != null) {
Map<String, Map<String, RangerResourceACLs.AccessResult>> userRangerACLs = rangerResourceACLs.getUserACLs();
Map<String, Map<String, RangerResourceACLs.AccessResult>> groupRangerACLs = rangerResourceACLs.getGroupACLs();
Map<String, Map<String, RangerResourceACLs.AccessResult>> roleRangerACLs = rangerResourceACLs.getRoleACLs();
userPermissions = convertRangerACLsToHiveACLs(userRangerACLs);
groupPermissions = convertRangerACLsToHiveACLs(groupRangerACLs);
rolePermissions = convertRangerACLsToHiveACLs(roleRangerACLs);
if (principal != null) {
if (principal.getType() == HivePrincipal.HivePrincipalType.USER) {
String user = principal.getName();
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> userACLs = userPermissions.get(user);
if (userACLs != null) {
Map<String, RangerResourceACLs.AccessResult> userAccessResult = userRangerACLs.get(user);
for (HiveResourceACLs.Privilege userACL : userACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(userAccessResult, userACL.name());
if (policy != null) {
String aclname = getPermission(userACL, userAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(principal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
Set<String> groups = getPrincipalGroup(user);
for (String group : groups) {
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> groupACLs = groupPermissions.get(group);
if (groupACLs != null) {
Map<String, RangerResourceACLs.AccessResult> groupAccessResult = groupRangerACLs.get(group);
for (HiveResourceACLs.Privilege groupACL : groupACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(groupAccessResult, groupACL.name());
if (policy != null) {
String aclname = getPermission(groupACL, groupAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(principal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
}
} else if (principal.getType() == HivePrincipal.HivePrincipalType.ROLE) {
String role = principal.getName();
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> roleACLs = rolePermissions.get(role);
if (roleACLs != null) {
Map<String, RangerResourceACLs.AccessResult> roleAccessResult = roleRangerACLs.get(role);
for (HiveResourceACLs.Privilege roleACL : roleACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(roleAccessResult, roleACL.name());
if (policy != null) {
String aclname = getPermission(roleACL, roleAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(principal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
}
} else {
// Request is for all the ACLs on a resource
for (String user : userRangerACLs.keySet()) {
HivePrincipal hivePrincipal = new HivePrincipal(user, HivePrincipal.HivePrincipalType.USER);
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> userACLs = userPermissions.get(user);
if (userACLs != null) {
Map<String, RangerResourceACLs.AccessResult> userAccessResult = userRangerACLs.get(user);
for (HiveResourceACLs.Privilege userACL : userACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(userAccessResult, userACL.name());
if (policy != null) {
String aclname = getPermission(userACL, userAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(hivePrincipal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
}
for (String group : groupRangerACLs.keySet()) {
HivePrincipal hivePrincipal = new HivePrincipal(group, HivePrincipal.HivePrincipalType.GROUP);
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> groupACLs = groupPermissions.get(group);
if (groupACLs != null) {
Map<String, RangerResourceACLs.AccessResult> groupAccessResult = groupRangerACLs.get(group);
for (HiveResourceACLs.Privilege groupACL : groupACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(groupAccessResult, groupACL.name());
if (policy != null) {
String aclname = getPermission(groupACL, groupAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(hivePrincipal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
}
for (String role : roleRangerACLs.keySet()) {
HivePrincipal hivePrincipal = new HivePrincipal(role, HivePrincipal.HivePrincipalType.ROLE);
Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> roleACLs = rolePermissions.get(role);
if (roleACLs != null) {
Map<String, RangerResourceACLs.AccessResult> roleAccessResult = roleRangerACLs.get(role);
for (HiveResourceACLs.Privilege roleACL : roleACLs.keySet()) {
RangerPolicy policy = getRangerPolicy(roleAccessResult, roleACL.name());
if (policy != null) {
String aclname = getPermission(roleACL, roleAccessResult, policy);
HivePrivilegeInfo privilegeInfo = createHivePrivilegeInfo(hivePrincipal, objectType, dbName, objectName, columnName, partValues, aclname, policy);
ret.add(privilegeInfo);
}
}
}
}
}
}
}
}
} catch (Exception e) {
throw new HiveAuthzPluginException("hive showPrivileges" + ": " + e.getMessage(), e);
}
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.
the class RangerHivePlugin method createHivePrivilegeInfo.
private HivePrivilegeInfo createHivePrivilegeInfo(HivePrincipal hivePrincipal, HivePrivilegeObject.HivePrivilegeObjectType objectType, String dbName, String objectName, String columnName, List<String> partValues, String aclName, RangerPolicy policy) {
HivePrivilegeInfo ret = null;
int creationDate = 0;
boolean delegateAdmin = false;
for (RangerPolicy.RangerPolicyItem policyItem : policy.getPolicyItems()) {
List<RangerPolicy.RangerPolicyItemAccess> policyItemAccesses = policyItem.getAccesses();
List<String> users = policyItem.getUsers();
List<String> groups = policyItem.getGroups();
List<String> accessTypes = new ArrayList<>();
for (RangerPolicy.RangerPolicyItemAccess policyItemAccess : policyItemAccesses) {
accessTypes.add(policyItemAccess.getType());
}
if (accessTypes.contains(aclName.toLowerCase()) && (users.contains(hivePrincipal.getName()) || groups.contains(hivePrincipal.getName()))) {
creationDate = (policy.getCreateTime() == null) ? creationDate : (int) (policy.getCreateTime().getTime() / 1000);
delegateAdmin = (policyItem.getDelegateAdmin() == null) ? delegateAdmin : policyItem.getDelegateAdmin().booleanValue();
}
}
HivePrincipal grantorPrincipal = new HivePrincipal(DEFAULT_RANGER_POLICY_GRANTOR, HivePrincipal.HivePrincipalType.USER);
HivePrivilegeObject privilegeObject = new HivePrivilegeObject(objectType, dbName, objectName, partValues, columnName);
HivePrivilege privilege = new HivePrivilege(aclName, null);
ret = new HivePrivilegeInfo(hivePrincipal, privilege, privilegeObject, grantorPrincipal, delegateAdmin, creationDate);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.
the class RangerHivePlugin method filterListCmdObjects.
/**
* Check if user has privileges to do this action on these objects
* @param objs
* @param context
* @throws HiveAuthzPluginException
* @throws HiveAccessControlException
*/
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
}
RangerPerfTracer perf = null;
RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler(hivePlugin.getConfig());
if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.filterListCmdObjects()");
}
List<HivePrivilegeObject> ret = null;
// bail out early if nothing is there to validate!
if (objs == null) {
LOG.debug("filterListCmdObjects: meta objects list was null!");
} else if (objs.isEmpty()) {
LOG.debug("filterListCmdObjects: meta objects list was empty!");
ret = objs;
} else if (getCurrentUserGroupInfo() == null) {
/*
* This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2!
*/
LOG.warn("filterListCmdObjects: user information not available");
ret = objs;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
}
// get user/group info
// we know this can't be null since we checked it above!
UserGroupInformation ugi = getCurrentUserGroupInfo();
HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
String user = ugi.getShortUserName();
Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
Set<String> roles = getCurrentRolesForUser(user, groups);
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: user[%s], groups[%s], roles[%s] ", user, groups, roles));
}
if (ret == null) {
// if we got any items to filter then we can't return back a null. We must return back a list even if its empty.
ret = new ArrayList<HivePrivilegeObject>(objs.size());
}
for (HivePrivilegeObject privilegeObject : objs) {
if (LOG.isDebugEnabled()) {
HivePrivObjectActionType actionType = privilegeObject.getActionType();
HivePrivilegeObjectType objectType = privilegeObject.getType();
String objectName = privilegeObject.getObjectName();
String dbName = privilegeObject.getDbname();
List<String> columns = privilegeObject.getColumns();
List<String> partitionKeys = privilegeObject.getPartKeys();
String commandString = context == null ? null : context.getCommandString();
String ipAddress = context == null ? null : context.getIpAddress();
final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
}
RangerHiveResource resource = createHiveResourceForFiltering(privilegeObject);
if (resource == null) {
LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
} else {
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, context, sessionContext);
RangerAccessResult result = hivePlugin.isAccessAllowed(request, auditHandler);
if (result == null) {
LOG.error("filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
} else if (!result.getIsAllowed()) {
if (LOG.isDebugEnabled()) {
String path = resource.getAsString();
LOG.debug(String.format("filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]", user, request.getHiveAccessType().name(), path, resource, request, result));
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]", resource, request, result));
}
ret.add(privilegeObject);
}
}
}
}
auditHandler.flushAudit();
RangerPerfTracer.log(perf);
if (LOG.isDebugEnabled()) {
int count = ret == null ? 0 : ret.size();
LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
}
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.
the class RangerHivePlugin method getHiveResource.
private RangerHiveResource getHiveResource(HiveOperationType hiveOpType, HivePrivilegeObject hiveObj, List<HivePrivilegeObject> inputs, List<HivePrivilegeObject> outputs) {
RangerHiveResource ret = null;
HiveObjectType objectType = getObjectType(hiveObj, hiveOpType);
switch(objectType) {
case DATABASE:
ret = new RangerHiveResource(objectType, hiveObj.getDbname());
if (!isCreateOperation(hiveOpType)) {
setOwnerUser(ret, hiveObj, getMetaStoreClient());
}
break;
case TABLE:
case VIEW:
case FUNCTION:
ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName());
// To suppress PMD violations
if (LOG.isDebugEnabled()) {
LOG.debug("Size of inputs = [" + (CollectionUtils.isNotEmpty(inputs) ? inputs.size() : 0) + ", Size of outputs = [" + (CollectionUtils.isNotEmpty(outputs) ? outputs.size() : 0) + "]");
}
setOwnerUser(ret, hiveObj, getMetaStoreClient());
if (isCreateOperation(hiveOpType)) {
HivePrivilegeObject dbObject = getDatabaseObject(hiveObj.getDbname(), inputs, outputs);
if (dbObject != null) {
setOwnerUser(ret, dbObject, getMetaStoreClient());
}
}
break;
case PARTITION:
case INDEX:
ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName());
break;
case COLUMN:
ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName(), StringUtils.join(hiveObj.getColumns(), COLUMN_SEP));
setOwnerUser(ret, hiveObj, getMetaStoreClient());
break;
case URI:
case SERVICE_NAME:
ret = new RangerHiveResource(objectType, hiveObj.getObjectName());
break;
case GLOBAL:
ret = new RangerHiveResource(objectType, hiveObj.getObjectName());
break;
case NONE:
break;
}
if (ret != null) {
ret.setServiceDef(hivePlugin == null ? null : hivePlugin.getServiceDef());
}
return ret;
}
Aggregations