use of org.apache.hadoop.hive.metastore.api.Role in project hive by apache.
the class HBaseUtils method deserializeRole.
/**
* Deserialize a role. This method should be used when the rolename is already known as it
* doesn't have to re-deserialize it.
* @param roleName name of the role
* @param value value fetched from hbase
* @return A role
* @throws InvalidProtocolBufferException
*/
static Role deserializeRole(String roleName, byte[] value) throws InvalidProtocolBufferException {
Role role = new Role();
role.setRoleName(roleName);
HbaseMetastoreProto.Role protoRole = HbaseMetastoreProto.Role.parseFrom(value);
role.setCreateTime((int) protoRole.getCreateTime());
if (protoRole.hasOwnerName())
role.setOwnerName(protoRole.getOwnerName());
return role;
}
use of org.apache.hadoop.hive.metastore.api.Role in project hive by apache.
the class HBaseStore method getRole.
@Override
public Role getRole(String roleName) throws NoSuchObjectException {
boolean commit = false;
openTransaction();
try {
Role role = getHBase().getRole(roleName);
if (role == null) {
throw new NoSuchObjectException("Unable to find role " + roleName);
}
commit = true;
return role;
} catch (IOException e) {
LOG.error("Unable to get role", e);
throw new NoSuchObjectException("Error reading table " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.Role in project hive by apache.
the class HBaseStore method listRoleNames.
@Override
public List<String> listRoleNames() {
boolean commit = false;
openTransaction();
try {
List<Role> roles = getHBase().scanRoles();
List<String> roleNames = new ArrayList<String>(roles.size());
for (Role role : roles) roleNames.add(role.getRoleName());
commit = true;
return roleNames;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.Role in project hive by apache.
the class HBaseReadWrite method getPrincipalDirectRoles.
/**
* Find all roles directly participated in by a given principal. This builds the role cache
* because it assumes that subsequent calls may be made to find roles participated in indirectly.
* @param name username or role name
* @param type user or role
* @return map of role name to grant info for all roles directly participated in.
*/
List<Role> getPrincipalDirectRoles(String name, PrincipalType type) throws IOException {
buildRoleCache();
Set<String> rolesFound = new HashSet<>();
for (Map.Entry<String, HbaseMetastoreProto.RoleGrantInfoList> e : roleCache.entrySet()) {
for (HbaseMetastoreProto.RoleGrantInfo giw : e.getValue().getGrantInfoList()) {
if (HBaseUtils.convertPrincipalTypes(giw.getPrincipalType()) == type && giw.getPrincipalName().equals(name)) {
rolesFound.add(e.getKey());
break;
}
}
}
List<Role> directRoles = new ArrayList<>(rolesFound.size());
List<Get> gets = new ArrayList<>();
HTableInterface htab = conn.getHBaseTable(ROLE_TABLE);
for (String roleFound : rolesFound) {
byte[] key = HBaseUtils.buildKey(roleFound);
Get g = new Get(key);
g.addColumn(CATALOG_CF, CATALOG_COL);
gets.add(g);
}
Result[] results = htab.get(gets);
for (int i = 0; i < results.length; i++) {
byte[] serialized = results[i].getValue(CATALOG_CF, CATALOG_COL);
if (serialized != null) {
directRoles.add(HBaseUtils.deserializeRole(results[i].getRow(), serialized));
}
}
return directRoles;
}
use of org.apache.hadoop.hive.metastore.api.Role in project hive by apache.
the class HBaseStore method listRoles.
@Override
public List<Role> listRoles(String principalName, PrincipalType principalType) {
List<Role> roles = new ArrayList<Role>();
boolean commit = false;
openTransaction();
try {
try {
roles.addAll(getHBase().getPrincipalDirectRoles(principalName, principalType));
} catch (IOException e) {
throw new RuntimeException(e);
}
// Add the public role if this is a user
if (principalType == PrincipalType.USER) {
roles.add(new Role(HiveMetaStore.PUBLIC, 0, null));
}
commit = true;
return roles;
} finally {
commitOrRoleBack(commit);
}
}
Aggregations