use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType in project hive by apache.
the class DatabaseFilterContext method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> DatabaseFilterContext.getOutputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
for (String database : databases) {
HivePrivilegeObjectType type = HivePrivilegeObjectType.DATABASE;
HivePrivObjectActionType objectActionType = HivePrivObjectActionType.OTHER;
HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, database, null, null, null, objectActionType, null, null);
ret.add(hivePrivilegeObject);
}
LOG.debug("<== DatabaseFilterContext.getOutputHObjs(): ret=" + ret);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType in project hive by apache.
the class Driver method getHivePrivObjects.
private static List<HivePrivilegeObject> getHivePrivObjects(Set<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols) {
List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
if (privObjects == null) {
return hivePrivobjs;
}
for (Entity privObject : privObjects) {
HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
if (privObject.isDummy()) {
// do not authorize dummy readEntity or writeEntity
continue;
}
if (privObject instanceof ReadEntity && !((ReadEntity) privObject).isDirect()) {
// See description of the isDirect in ReadEntity
continue;
}
if (privObject instanceof WriteEntity && ((WriteEntity) privObject).isTempURI()) {
// do not authorize temporary uris
continue;
}
// support for authorization on partitions needs to be added
String dbname = null;
String objName = null;
List<String> partKeys = null;
List<String> columns = null;
String className = null;
switch(privObject.getType()) {
case DATABASE:
dbname = privObject.getDatabase().getName();
break;
case TABLE:
dbname = privObject.getTable().getDbName();
objName = privObject.getTable().getTableName();
columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(dbname, objName));
break;
case DFS_DIR:
case LOCAL_DIR:
objName = privObject.getD().toString();
break;
case FUNCTION:
if (privObject.getDatabase() != null) {
dbname = privObject.getDatabase().getName();
}
objName = privObject.getFunctionName();
className = privObject.getClassName();
break;
case DUMMYPARTITION:
case PARTITION:
// not currently handled
continue;
case SERVICE_NAME:
objName = privObject.getServiceName();
break;
default:
throw new AssertionError("Unexpected object type");
}
HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, partKeys, columns, actionType, null, className);
hivePrivobjs.add(hPrivObject);
}
return hivePrivobjs;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType in project hive by apache.
the class TableFilterContext method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> TableFilterContext.getOutputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
if (tables != null) {
for (Table table : tables) {
HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW;
HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER;
HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, table.getDbName(), table.getTableName(), null, null, objectActionType, null, null, table.getOwner(), table.getOwnerType());
ret.add(hivePrivilegeObject);
}
} else {
for (String tableName : tableNames) {
HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW;
HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER;
HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, dbName, tableName, null, null, objectActionType, null, null);
ret.add(hivePrivilegeObject);
}
}
LOG.debug("<== TableFilterContext.getOutputHObjs(): ret=" + ret);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType in project hive by apache.
the class CommandAuthorizerV2 method addHivePrivObject.
private static void addHivePrivObject(Entity privObject, Map<String, List<String>> tableName2Cols, List<HivePrivilegeObject> hivePrivObjs, HiveOperationType hiveOpType) throws HiveException {
HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
HivePrivilegeObject hivePrivObject = null;
switch(privObject.getType()) {
case DATABASE:
Database database = privObject.getDatabase();
hivePrivObject = new HivePrivilegeObject(privObjType, database.getName(), null, null, null, actionType, null, null, database.getOwnerName(), database.getOwnerType());
break;
case TABLE:
Table table = privObject.getTable();
List<String> columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(table.getDbName(), table.getTableName()));
hivePrivObject = new HivePrivilegeObject(privObjType, table.getDbName(), table.getTableName(), null, columns, actionType, null, null, table.getOwner(), table.getOwnerType());
if (table.getStorageHandler() != null) {
// TODO: add hive privilege object for storage based handlers for create and alter table commands.
if (hiveOpType == HiveOperationType.CREATETABLE || hiveOpType == HiveOperationType.ALTERTABLE_PROPERTIES || hiveOpType == HiveOperationType.CREATETABLE_AS_SELECT) {
try {
String storageUri = table.getStorageHandler().getURIForAuth(table.getTTable()).toString();
hivePrivObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageUri, null, null, actionType, null, table.getStorageHandler().getClass().getName(), table.getOwner(), table.getOwnerType()));
} catch (Exception ex) {
LOG.error("Exception occurred while getting the URI from storage handler: " + ex.getMessage(), ex);
throw new HiveException("Exception occurred while getting the URI from storage handler: " + ex.getMessage());
}
}
}
break;
case DFS_DIR:
case LOCAL_DIR:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getD().toString(), null, null, actionType, null, null, null, null);
break;
case FUNCTION:
String dbName = privObject.getDatabase() != null ? privObject.getDatabase().getName() : null;
hivePrivObject = new HivePrivilegeObject(privObjType, dbName, privObject.getFunctionName(), null, null, actionType, null, privObject.getClassName(), null, null);
break;
case DUMMYPARTITION:
case PARTITION:
// TODO: not currently handled
return;
case SERVICE_NAME:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getServiceName(), null, null, actionType, null, null, null, null);
break;
case DATACONNECTOR:
DataConnector connector = privObject.getDataConnector();
hivePrivObject = new HivePrivilegeObject(privObjType, null, connector.getName(), null, null, actionType, null, null, connector.getOwnerName(), connector.getOwnerType());
break;
default:
throw new AssertionError("Unexpected object type");
}
hivePrivObjs.add(hivePrivObject);
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType in project ranger by apache.
the class RangerHivePlugin method filterListCmdObjects.
/**
* Check if user has privileges to do this action on these objects
* @param objs
* @param context
* @throws HiveAuthzPluginException
* @throws HiveAccessControlException
*/
// Commented out to avoid build errors until this interface is stable in Hive Branch
// @Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> objs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("==> filterListCmdObjects(%s, %s)", objs, context));
}
RangerPerfTracer perf = null;
RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler(hivePlugin.getConfig());
if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEAUTH_REQUEST_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_HIVEAUTH_REQUEST_LOG, "RangerHiveAuthorizer.filterListCmdObjects()");
}
List<HivePrivilegeObject> ret = null;
// bail out early if nothing is there to validate!
if (objs == null) {
LOG.debug("filterListCmdObjects: meta objects list was null!");
} else if (objs.isEmpty()) {
LOG.debug("filterListCmdObjects: meta objects list was empty!");
ret = objs;
} else if (getCurrentUserGroupInfo() == null) {
/*
* This is null for metastore and there doesn't seem to be a way to tell if one is running as metastore or hiveserver2!
*/
LOG.warn("filterListCmdObjects: user information not available");
ret = objs;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("filterListCmdObjects: number of input objects[" + objs.size() + "]");
}
// get user/group info
// we know this can't be null since we checked it above!
UserGroupInformation ugi = getCurrentUserGroupInfo();
HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
String user = ugi.getShortUserName();
Set<String> groups = Sets.newHashSet(ugi.getGroupNames());
Set<String> roles = getCurrentRolesForUser(user, groups);
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: user[%s], groups[%s], roles[%s] ", user, groups, roles));
}
if (ret == null) {
// if we got any items to filter then we can't return back a null. We must return back a list even if its empty.
ret = new ArrayList<HivePrivilegeObject>(objs.size());
}
for (HivePrivilegeObject privilegeObject : objs) {
if (LOG.isDebugEnabled()) {
HivePrivObjectActionType actionType = privilegeObject.getActionType();
HivePrivilegeObjectType objectType = privilegeObject.getType();
String objectName = privilegeObject.getObjectName();
String dbName = privilegeObject.getDbname();
List<String> columns = privilegeObject.getColumns();
List<String> partitionKeys = privilegeObject.getPartKeys();
String commandString = context == null ? null : context.getCommandString();
String ipAddress = context == null ? null : context.getIpAddress();
final String format = "filterListCmdObjects: actionType[%s], objectType[%s], objectName[%s], dbName[%s], columns[%s], partitionKeys[%s]; context: commandString[%s], ipAddress[%s]";
LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
}
RangerHiveResource resource = createHiveResourceForFiltering(privilegeObject);
if (resource == null) {
LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
} else {
RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, roles, context, sessionContext);
RangerAccessResult result = hivePlugin.isAccessAllowed(request, auditHandler);
if (result == null) {
LOG.error("filterListCmdObjects: Internal error: null RangerAccessResult object received back from isAccessAllowed()!");
} else if (!result.getIsAllowed()) {
if (LOG.isDebugEnabled()) {
String path = resource.getAsString();
LOG.debug(String.format("filterListCmdObjects: Permission denied: user [%s] does not have [%s] privilege on [%s]. resource[%s], request[%s], result[%s]", user, request.getHiveAccessType().name(), path, resource, request, result));
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("filterListCmdObjects: access allowed. resource[%s], request[%s], result[%s]", resource, request, result));
}
ret.add(privilegeObject);
}
}
}
}
auditHandler.flushAudit();
RangerPerfTracer.log(perf);
if (LOG.isDebugEnabled()) {
int count = ret == null ? 0 : ret.size();
LOG.debug(String.format("<== filterListCmdObjects: count[%d], ret[%s]", count, ret));
}
return ret;
}
Aggregations