use of org.apache.hadoop.hive.metastore.api.PrincipalType in project flink by apache.
the class HiveParserAuthorizationParseUtils method getPrincipalDesc.
public static PrincipalDesc getPrincipalDesc(HiveParserASTNode principal) {
PrincipalType type = getPrincipalType(principal);
if (type != null) {
String text = principal.getChild(0).getText();
String principalName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(text);
return new PrincipalDesc(principalName, type);
}
return null;
}
use of org.apache.hadoop.hive.metastore.api.PrincipalType in project hive by apache.
the class AuthorizationParseUtils method getPrincipalDesc.
public static PrincipalDesc getPrincipalDesc(ASTNode principal) {
PrincipalType type = getPrincipalType(principal);
if (type != null) {
String text = principal.getChild(0).getText();
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(text);
return new PrincipalDesc(principalName, type);
}
return null;
}
use of org.apache.hadoop.hive.metastore.api.PrincipalType in project hive by apache.
the class SQLStdHiveAccessController method showPrivileges.
@Override
public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
try {
// First authorize the call
if (principal == null) {
// only the admin is allowed to list privileges for any user
if (!isUserAdmin()) {
throw new HiveAccessControlException("User : " + currentUserName + " has to specify" + " a user name or role in the show grant. " + ADMIN_ONLY_MSG);
}
} else {
// principal is specified, authorize on it
if (!isUserAdmin()) {
ensureShowGrantAllowed(principal);
}
}
IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
String principalName = principal == null ? null : principal.getName();
PrincipalType principalType = principal == null ? null : AuthorizationUtils.getThriftPrincipalType(principal.getType());
// get metastore/thrift privilege object using metastore api
List<HiveObjectPrivilege> msObjPrivs = mClient.list_privileges(principalName, principalType, SQLAuthorizationUtils.getThriftHiveObjectRef(privObj));
// convert the metastore thrift objects to result objects
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
// result principal
HivePrincipal resPrincipal = new HivePrincipal(msObjPriv.getPrincipalName(), AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
// result privilege
PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
// result object
HiveObjectRef msObjRef = msObjPriv.getHiveObject();
if (!isSupportedObjectType(msObjRef.getObjectType())) {
// ignore them
continue;
}
HivePrivilegeObject resPrivObj = new HivePrivilegeObject(getPluginPrivilegeObjType(msObjRef.getObjectType()), msObjRef.getDbName(), msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
// result grantor principal
HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(), AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege, resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption(), msGrantInfo.getCreateTime());
resPrivInfos.add(resPrivInfo);
}
return resPrivInfos;
} catch (Exception e) {
throw SQLAuthorizationUtils.getPluginException("Error showing privileges", e);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalType in project hive by apache.
the class HiveV1Authorizer method toPrivilegeBag.
private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption, String authorizer) throws HiveException {
PrivilegeBag privBag = new PrivilegeBag();
if (privileges.isEmpty()) {
return privBag;
}
String grantorName = grantor.getName();
PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType());
if (privObject.getType() == null || privObject.getType() == HivePrivilegeObject.HivePrivilegeObjectType.GLOBAL) {
for (HivePrivilege priv : privileges) {
List<String> columns = priv.getColumns();
if (columns != null && !columns.isEmpty()) {
throw new HiveException("For user-level privileges, column sets should be null. columns=" + columns.toString());
}
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType, grantOption), authorizer));
}
return privBag;
}
if (privObject.getPartKeys() != null && grantOption) {
throw new HiveException("Grant does not support partition level.");
}
Hive hive = Hive.getWithFastCheck(this.conf);
Database dbObj = hive.getDatabase(privObject.getDbname());
if (dbObj == null) {
throw new HiveException("Database " + privObject.getDbname() + " does not exists");
}
Table tableObj = null;
if (privObject.getObjectName() != null) {
tableObj = hive.getTable(dbObj.getName(), privObject.getObjectName());
}
List<String> partValues = null;
if (tableObj != null) {
if ((!tableObj.isPartitioned()) && privObject.getPartKeys() != null) {
throw new HiveException("Table is not partitioned, but partition name is present: partSpec=" + privObject.getPartKeys());
}
if (privObject.getPartKeys() != null) {
Map<String, String> partSpec = Warehouse.makeSpecFromValues(tableObj.getPartitionKeys(), privObject.getPartKeys());
Partition partObj = hive.getPartition(tableObj, partSpec, false).getTPartition();
partValues = partObj.getValues();
}
}
for (HivePrivilege priv : privileges) {
List<String> columns = priv.getColumns();
if (columns != null && !columns.isEmpty()) {
if (!priv.supportsScope(PrivilegeScope.COLUMN_LEVEL_SCOPE)) {
throw new HiveException(priv.getName() + " does not support column level privilege.");
}
if (tableObj == null) {
throw new HiveException("For user-level/database-level privileges, column sets should be null. columns=" + columns);
}
for (int i = 0; i < columns.size(); i++) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(), partValues, columns.get(i)), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
}
} else if (tableObj == null) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
} else if (partValues == null) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(), null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
} else {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(), partValues, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
}
}
return privBag;
}
use of org.apache.hadoop.hive.metastore.api.PrincipalType in project hive by apache.
the class HiveV1Authorizer method grantOrRevokePrivs.
private void grantOrRevokePrivs(List<HivePrincipal> principals, PrivilegeBag privBag, boolean isGrant, boolean grantOption) throws HiveException {
for (HivePrincipal principal : principals) {
PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal.getType());
for (HiveObjectPrivilege priv : privBag.getPrivileges()) {
priv.setPrincipalName(principal.getName());
priv.setPrincipalType(type);
}
Hive hive = Hive.getWithFastCheck(this.conf);
if (isGrant) {
hive.grantPrivileges(privBag);
} else {
hive.revokePrivileges(privBag, grantOption);
}
}
}
Aggregations