use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method listPrincipalTableGrantsAll.
@Override
public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(String principalName, PrincipalType principalType) {
List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
boolean commit = false;
openTransaction();
try {
List<Table> tables = getHBase().scanTables(null, null);
for (Table table : tables) {
List<PrivilegeGrantInfo> grants;
PrincipalPrivilegeSet pps = table.getPrivileges();
if (pps == null)
continue;
Map<String, List<PrivilegeGrantInfo>> map;
switch(principalType) {
case USER:
map = pps.getUserPrivileges();
break;
case ROLE:
map = pps.getRolePrivileges();
break;
default:
throw new RuntimeException("Unknown or unsupported principal type " + principalType.toString());
}
if (map == null)
continue;
grants = map.get(principalName);
if (grants == null || grants.size() == 0)
continue;
for (PrivilegeGrantInfo pgi : grants) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null, null), principalName, principalType, pgi));
}
}
commit = true;
return privileges;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method getDBPrivilegeSet.
@Override
public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException {
boolean commit = false;
openTransaction();
try {
PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet();
Database db = getHBase().getDb(dbName);
if (db.getPrivileges() != null) {
List<PrivilegeGrantInfo> pgi;
// Find the user privileges for this db
if (db.getPrivileges().getUserPrivileges() != null) {
pgi = db.getPrivileges().getUserPrivileges().get(userName);
if (pgi != null) {
pps.putToUserPrivileges(userName, pgi);
}
}
if (db.getPrivileges().getRolePrivileges() != null) {
List<String> roles = getHBase().getUserRoles(userName);
if (roles != null) {
for (String role : roles) {
pgi = db.getPrivileges().getRolePrivileges().get(role);
if (pgi != null) {
pps.putToRolePrivileges(role, pgi);
}
}
}
}
}
commit = true;
return pps;
} catch (IOException e) {
LOG.error("Unable to get db privileges for user", e);
throw new MetaException("Unable to get db privileges for user, " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseReadWrite method removeRoleGrants.
/**
* Remove all of the grants for a role. This is not cheap.
* @param roleName Role to remove from all other roles and grants
* @throws IOException
*/
void removeRoleGrants(String roleName) throws IOException {
buildRoleCache();
List<Put> puts = new ArrayList<>();
// First, walk the role table and remove any references to this role
for (Map.Entry<String, HbaseMetastoreProto.RoleGrantInfoList> e : roleCache.entrySet()) {
boolean madeAChange = false;
List<HbaseMetastoreProto.RoleGrantInfo> rgil = new ArrayList<>();
rgil.addAll(e.getValue().getGrantInfoList());
for (int i = 0; i < rgil.size(); i++) {
if (HBaseUtils.convertPrincipalTypes(rgil.get(i).getPrincipalType()) == PrincipalType.ROLE && rgil.get(i).getPrincipalName().equals(roleName)) {
rgil.remove(i);
madeAChange = true;
break;
}
}
if (madeAChange) {
Put put = new Put(HBaseUtils.buildKey(e.getKey()));
HbaseMetastoreProto.RoleGrantInfoList proto = HbaseMetastoreProto.RoleGrantInfoList.newBuilder().addAllGrantInfo(rgil).build();
put.add(CATALOG_CF, ROLES_COL, proto.toByteArray());
puts.add(put);
roleCache.put(e.getKey(), proto);
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(ROLE_TABLE);
htab.put(puts);
conn.flush(htab);
}
// Remove any global privileges held by this role
PrincipalPrivilegeSet global = getGlobalPrivs();
if (global != null && global.getRolePrivileges() != null && global.getRolePrivileges().remove(roleName) != null) {
putGlobalPrivs(global);
}
// Now, walk the db table
puts.clear();
List<Database> dbs = scanDatabases(null);
// rare, but can happen
if (dbs == null)
dbs = new ArrayList<>();
for (Database db : dbs) {
if (db.getPrivileges() != null && db.getPrivileges().getRolePrivileges() != null && db.getPrivileges().getRolePrivileges().remove(roleName) != null) {
byte[][] serialized = HBaseUtils.serializeDatabase(db);
Put put = new Put(serialized[0]);
put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
puts.add(put);
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(DB_TABLE);
htab.put(puts);
conn.flush(htab);
}
// Finally, walk the table table
puts.clear();
for (Database db : dbs) {
List<Table> tables = scanTables(db.getName(), null);
if (tables != null) {
for (Table table : tables) {
if (table.getPrivileges() != null && table.getPrivileges().getRolePrivileges() != null && table.getPrivileges().getRolePrivileges().remove(roleName) != null) {
byte[][] serialized = HBaseUtils.serializeTable(table, HBaseUtils.hashStorageDescriptor(table.getSd(), md));
Put put = new Put(serialized[0]);
put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
puts.add(put);
}
}
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
htab.put(puts);
conn.flush(htab);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method listAllTableGrants.
@Override
public List<HiveObjectPrivilege> listAllTableGrants(String principalName, PrincipalType principalType, String dbName, String tableName) {
List<PrivilegeGrantInfo> grants;
List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
boolean commit = false;
openTransaction();
try {
Table table = getHBase().getTable(dbName, tableName);
if (table == null)
return privileges;
PrincipalPrivilegeSet pps = table.getPrivileges();
if (pps == null)
return privileges;
Map<String, List<PrivilegeGrantInfo>> map;
switch(principalType) {
case USER:
map = pps.getUserPrivileges();
break;
case ROLE:
map = pps.getRolePrivileges();
break;
default:
throw new RuntimeException("Unknown or unsupported principal type " + principalType.toString());
}
if (map == null)
return privileges;
grants = map.get(principalName);
if (grants == null || grants.size() == 0)
return privileges;
for (PrivilegeGrantInfo pgi : grants) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, dbName, tableName, null, null), principalName, principalType, pgi));
}
commit = true;
return privileges;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method listGlobalGrantsAll.
@Override
public List<HiveObjectPrivilege> listGlobalGrantsAll() {
List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
boolean commit = false;
openTransaction();
try {
PrincipalPrivilegeSet pps = getHBase().getGlobalPrivs();
if (pps != null) {
for (Map.Entry<String, List<PrivilegeGrantInfo>> e : pps.getUserPrivileges().entrySet()) {
for (PrivilegeGrantInfo pgi : e.getValue()) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), e.getKey(), PrincipalType.USER, pgi));
}
}
for (Map.Entry<String, List<PrivilegeGrantInfo>> e : pps.getRolePrivileges().entrySet()) {
for (PrivilegeGrantInfo pgi : e.getValue()) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), e.getKey(), PrincipalType.ROLE, pgi));
}
}
}
commit = true;
return privileges;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
Aggregations