use of org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo in project hive by apache.
the class HBaseStore method grantPrivileges.
@Override
public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, MetaException, NoSuchObjectException {
boolean commit = false;
openTransaction();
try {
for (HiveObjectPrivilege priv : privileges.getPrivileges()) {
// Locate the right object to deal with
PrivilegeInfo privilegeInfo = findPrivilegeToGrantOrRevoke(priv);
// Now, let's see if we've already got this privilege
for (PrivilegeGrantInfo info : privilegeInfo.grants) {
if (info.getPrivilege().equals(priv.getGrantInfo().getPrivilege())) {
throw new InvalidObjectException(priv.getPrincipalName() + " already has " + priv.getGrantInfo().getPrivilege() + " on " + privilegeInfo.typeErrMsg);
}
}
privilegeInfo.grants.add(priv.getGrantInfo());
writeBackGrantOrRevoke(priv, privilegeInfo);
}
commit = true;
return true;
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo in project hive by apache.
the class BitSetCheckedAuthorizationProvider method getPrivilegeStringList.
private List<String> getPrivilegeStringList(Collection<List<PrivilegeGrantInfo>> privCollection) {
List<String> userPrivs = new ArrayList<String>();
if (privCollection != null && privCollection.size() > 0) {
for (List<PrivilegeGrantInfo> grantList : privCollection) {
if (grantList == null) {
continue;
}
for (int i = 0; i < grantList.size(); i++) {
PrivilegeGrantInfo grant = grantList.get(i);
userPrivs.add(grant.getPrivilege());
}
}
}
return userPrivs;
}
use of org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo in project hive by apache.
the class HiveV1Authorizer method toPrivilegeBag.
private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption) throws HiveException {
PrivilegeBag privBag = new PrivilegeBag();
if (privileges.isEmpty()) {
return privBag;
}
String grantorName = grantor.getName();
PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType());
if (privObject.getType() == null || privObject.getType() == HivePrivilegeObject.HivePrivilegeObjectType.GLOBAL) {
for (HivePrivilege priv : privileges) {
List<String> columns = priv.getColumns();
if (columns != null && !columns.isEmpty()) {
throw new HiveException("For user-level privileges, column sets should be null. columns=" + columns.toString());
}
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType, grantOption)));
}
return privBag;
}
if (privObject.getPartKeys() != null && grantOption) {
throw new HiveException("Grant does not support partition level.");
}
Hive hive = Hive.getWithFastCheck(this.conf);
Database dbObj = hive.getDatabase(privObject.getDbname());
if (dbObj == null) {
throw new HiveException("Database " + privObject.getDbname() + " does not exists");
}
Table tableObj = null;
if (privObject.getObjectName() != null) {
tableObj = hive.getTable(dbObj.getName(), privObject.getObjectName());
}
List<String> partValues = null;
if (tableObj != null) {
if ((!tableObj.isPartitioned()) && privObject.getPartKeys() != null) {
throw new HiveException("Table is not partitioned, but partition name is present: partSpec=" + privObject.getPartKeys());
}
if (privObject.getPartKeys() != null) {
Map<String, String> partSpec = Warehouse.makeSpecFromValues(tableObj.getPartitionKeys(), privObject.getPartKeys());
Partition partObj = hive.getPartition(tableObj, partSpec, false).getTPartition();
partValues = partObj.getValues();
}
}
for (HivePrivilege priv : privileges) {
List<String> columns = priv.getColumns();
if (columns != null && !columns.isEmpty()) {
if (!priv.supportsScope(PrivilegeScope.COLUMN_LEVEL_SCOPE)) {
throw new HiveException(priv.getName() + " does not support column level privilege.");
}
if (tableObj == null) {
throw new HiveException("For user-level/database-level privileges, column sets should be null. columns=" + columns);
}
for (int i = 0; i < columns.size(); i++) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(), partValues, columns.get(i)), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
}
} else if (tableObj == null) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
} else if (partValues == null) {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(), null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
} else {
privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(), partValues, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
}
}
return privBag;
}
use of org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo in project hive by apache.
the class SQLAuthorizationUtils method getThriftPrivilegesBag.
/**
* Create thrift privileges bag
*
* @param hivePrincipals
* @param hivePrivileges
* @param hivePrivObject
* @param grantorPrincipal
* @param grantOption
* @return
* @throws HiveAuthzPluginException
*/
static PrivilegeBag getThriftPrivilegesBag(List<HivePrincipal> hivePrincipals, List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException {
HiveObjectRef privObj = getThriftHiveObjectRef(hivePrivObject);
PrivilegeBag privBag = new PrivilegeBag();
for (HivePrivilege privilege : hivePrivileges) {
if (privilege.getColumns() != null && privilege.getColumns().size() > 0) {
throw new HiveAuthzPluginException("Privileges on columns not supported currently" + " in sql standard authorization mode");
}
if (!SUPPORTED_PRIVS_SET.contains(privilege.getName().toUpperCase(Locale.US))) {
throw new HiveAuthzPluginException("Privilege: " + privilege.getName() + " is not supported in sql standard authorization mode");
}
PrivilegeGrantInfo grantInfo = getThriftPrivilegeGrantInfo(privilege, grantorPrincipal, grantOption, 0);
for (HivePrincipal principal : hivePrincipals) {
HiveObjectPrivilege objPriv = new HiveObjectPrivilege(privObj, principal.getName(), AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
privBag.addToPrivileges(objPriv);
}
}
return privBag;
}
Aggregations