use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class DefaultHiveAuthorizationTranslator method getHivePrivilegeObject.
@Override
public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) throws HiveException {
// null means ALL for show grants, GLOBAL for grant/revoke
HivePrivilegeObjectType objectType = null;
String[] dbTable;
List<String> partSpec = null;
List<String> columns = null;
if (privSubjectDesc == null) {
dbTable = new String[] { null, null };
} else {
if (privSubjectDesc.getTable()) {
dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
} else {
dbTable = new String[] { privSubjectDesc.getObject(), null };
}
if (privSubjectDesc.getPartSpec() != null) {
partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values());
}
columns = privSubjectDesc.getColumns();
objectType = AuthorizationUtils.getPrivObjectType(privSubjectDesc);
}
return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null);
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project drill by apache.
the class HiveAuthorizationHelper method authorizeReadTable.
/**
* Check authorization for "READ TABLE" for given db.table. A {@link HiveAccessControlException} is thrown
* for illegal access.
* @param dbName
* @param tableName
*/
public void authorizeReadTable(final String dbName, final String tableName) throws HiveAccessControlException {
if (!authzEnabled) {
return;
}
HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName);
authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), Collections.<HivePrivilegeObject>emptyList(), "READ TABLE");
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project ranger by apache.
the class RangerHivePlugin method showPrivileges.
@Override
public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
try {
LOG.debug("RangerHiveAuthorizer.showPrivileges()");
IMetaStoreClient mClient = getMetastoreClientFactory().getHiveMetastoreClient();
List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
String principalName = null;
PrincipalType principalType = null;
if (principal != null) {
principalName = principal.getName();
principalType = AuthorizationUtils.getThriftPrincipalType(principal.getType());
}
List<HiveObjectPrivilege> msObjPrivs = mClient.list_privileges(principalName, principalType, this.getThriftHiveObjectRef(privObj));
if (msObjPrivs != null) {
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
HiveObjectRef msObjRef = msObjPriv.getHiveObject();
org.apache.hadoop.hive.metastore.api.HiveObjectType objectType = msObjRef.getObjectType();
if (!isSupportedObjectType(objectType)) {
continue;
}
HivePrincipal resPrincipal = new HivePrincipal(msObjPriv.getPrincipalName(), AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
HivePrivilegeObject resPrivObj = new HivePrivilegeObject(getPluginPrivilegeObjType(objectType), msObjRef.getDbName(), msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(), AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege, resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption(), msGrantInfo.getCreateTime());
resPrivInfos.add(resPrivInfo);
}
} else {
throw new HiveAccessControlException("RangerHiveAuthorizer.showPrivileges():User has to specify" + " a user name or role in the show grant. ");
}
return resPrivInfos;
} catch (Exception e) {
LOG.error("RangerHiveAuthorizer.showPrivileges: showPrivileges returned by showPrivileges is null");
throw new HiveAuthzPluginException("hive showPrivileges" + ": " + e.getMessage(), e);
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SemanticAnalyzer method walkASTMarkTABREF.
private static void walkASTMarkTABREF(TableMask tableMask, ASTNode ast, Set<String> cteAlias, Context ctx, Hive db, Map<String, Table> tabNameToTabObject, Set<Integer> ignoredTokens) throws SemanticException {
Queue<Node> queue = new LinkedList<>();
queue.add(ast);
Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
while (!queue.isEmpty()) {
ASTNode astNode = (ASTNode) queue.poll();
if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
int aliasIndex = 0;
StringBuilder additionalTabInfo = new StringBuilder();
for (int index = 1; index < astNode.getChildCount(); index++) {
ASTNode ct = (ASTNode) astNode.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex()));
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (astNode.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
} else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
// select * from TAB2 [no masking]
if (cteAlias.contains(tabIdName)) {
continue;
}
String replacementText = null;
Table table = null;
try {
if (!tabNameToTabObject.containsKey(tabIdName)) {
table = db.getTable(tabIdName, true);
tabNameToTabObject.put(tabIdName, table);
} else {
table = tabNameToTabObject.get(tabIdName);
}
} catch (HiveException e) {
// Table may not be found when materialization of CTE is on.
STATIC_LOG.debug("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
continue;
}
List<String> colNames = new ArrayList<>();
List<String> colTypes = new ArrayList<>();
for (FieldSchema col : table.getAllCols()) {
colNames.add(col.getName());
colTypes.add(col.getType());
}
basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView()));
}
if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) {
for (Node child : astNode.getChildren()) {
queue.offer(child);
}
}
}
List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>();
basicPrivObjs.addAll(basicInfos.keySet());
List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
for (HivePrivilegeObject privObj : needRewritePrivObjs) {
MaskAndFilterInfo info = basicInfos.get(privObj);
String replacementText = tableMask.create(privObj, info);
if (replacementText != null) {
// We don't support masking/filtering against ACID query at the moment
if (ctx.getIsUpdateDeleteMerge()) {
throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName());
}
tableMask.setNeedsRewrite(true);
tableMask.addTranslation(info.astNode, replacementText);
}
}
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SQLStdHiveAccessController method showPrivileges.
@Override
public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
try {
// First authorize the call
if (principal == null) {
// only the admin is allowed to list privileges for any user
if (!isUserAdmin()) {
throw new HiveAccessControlException("User : " + currentUserName + " has to specify" + " a user name or role in the show grant. " + ADMIN_ONLY_MSG);
}
} else {
// principal is specified, authorize on it
if (!isUserAdmin()) {
ensureShowGrantAllowed(principal);
}
}
IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
String principalName = principal == null ? null : principal.getName();
PrincipalType principalType = principal == null ? null : AuthorizationUtils.getThriftPrincipalType(principal.getType());
// get metastore/thrift privilege object using metastore api
List<HiveObjectPrivilege> msObjPrivs = mClient.list_privileges(principalName, principalType, SQLAuthorizationUtils.getThriftHiveObjectRef(privObj));
// convert the metastore thrift objects to result objects
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
// result principal
HivePrincipal resPrincipal = new HivePrincipal(msObjPriv.getPrincipalName(), AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
// result privilege
PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
// result object
HiveObjectRef msObjRef = msObjPriv.getHiveObject();
if (!isSupportedObjectType(msObjRef.getObjectType())) {
// ignore them
continue;
}
HivePrivilegeObject resPrivObj = new HivePrivilegeObject(getPluginPrivilegeObjType(msObjRef.getObjectType()), msObjRef.getDbName(), msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
// result grantor principal
HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(), AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege, resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption(), msGrantInfo.getCreateTime());
resPrivInfos.add(resPrivInfo);
}
return resPrivInfos;
} catch (Exception e) {
throw SQLAuthorizationUtils.getPluginException("Error showing privileges", e);
}
}
Aggregations