use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class SQLAuthorizationUtils method getPrivilegesFromMetaStore.
/**
* Get the privileges this user(userName argument) has on the object
* (hivePrivObject argument) If isAdmin is true, adds an admin privilege as
* well.
*
* @param metastoreClient
* @param userName
* @param hivePrivObject
* @param curRoles
* current active roles for user
* @param isAdmin
* if user can run as admin user
* @return
* @throws HiveAuthzPluginException
*/
static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient, String userName, HivePrivilegeObject hivePrivObject, List<String> curRoles, boolean isAdmin) throws HiveAuthzPluginException {
// get privileges for this user and its role on this object
PrincipalPrivilegeSet thrifPrivs = null;
try {
HiveObjectRef objectRef = AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
if (objectRef.getObjectType() == null) {
objectRef.setObjectType(HiveObjectType.GLOBAL);
}
thrifPrivs = metastoreClient.get_privilege_set(objectRef, userName, null);
} catch (MetaException e) {
throwGetPrivErr(e, hivePrivObject, userName);
} catch (TException e) {
throwGetPrivErr(e, hivePrivObject, userName);
} catch (HiveException e) {
throwGetPrivErr(e, hivePrivObject, userName);
}
filterPrivsByCurrentRoles(thrifPrivs, curRoles);
// convert to RequiredPrivileges
RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs);
// add owner privilege if user is owner of the object
if (isOwner(metastoreClient, userName, curRoles, hivePrivObject)) {
privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
}
if (isAdmin) {
privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
}
return privs;
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class SQLStdHiveAccessController method grantRole.
@Override
public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roleNames, boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, HiveAccessControlException {
if (!(isUserAdmin() || doesUserHasAdminOption(roleNames))) {
throw new HiveAccessControlException("Current user : " + currentUserName + " is not" + " allowed to grant role. " + ADMIN_ONLY_MSG + " Otherwise, " + HAS_ADMIN_PRIV_MSG);
}
for (HivePrincipal hivePrincipal : hivePrincipals) {
for (String roleName : roleNames) {
try {
IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
mClient.grant_role(roleName, hivePrincipal.getName(), AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()), grantorPrinc.getName(), AuthorizationUtils.getThriftPrincipalType(grantorPrinc.getType()), grantOption);
} catch (MetaException e) {
throw SQLAuthorizationUtils.getPluginException("Error granting role", e);
} catch (Exception e) {
String msg = "Error granting roles for " + hivePrincipal.getName() + " to role " + roleName;
throw SQLAuthorizationUtils.getPluginException(msg, e);
}
}
}
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class StorageBasedAuthorizationProvider method authorize.
@Override
public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, AuthorizationException {
try {
initWh();
} catch (MetaException ex) {
throw hiveException(ex);
}
// extract any drop privileges out of required privileges
DropPrivilegeExtractor privExtractor = new DropPrivilegeExtractor(readRequiredPriv, writeRequiredPriv);
readRequiredPriv = privExtractor.getReadReqPriv();
writeRequiredPriv = privExtractor.getWriteReqPriv();
// the database directory
if (privExtractor.hasDropPrivilege || requireCreatePrivilege(readRequiredPriv) || requireCreatePrivilege(writeRequiredPriv)) {
authorize(hive_db.getDatabase(table.getDbName()), new Privilege[] {}, new Privilege[] { Privilege.ALTER_DATA });
}
Path path = table.getDataLocation();
// set to true
if (privExtractor.hasDropPrivilege() && (table.getTableType() != TableType.EXTERNAL_TABLE || getConf().getBoolean(HiveConf.ConfVars.METASTORE_AUTHORIZATION_EXTERNALTABLE_DROP_CHECK.varname, HiveConf.ConfVars.METASTORE_AUTHORIZATION_EXTERNALTABLE_DROP_CHECK.defaultBoolVal))) {
checkDeletePermission(path, getConf(), authenticator.getUserName());
}
// has the permissions on the table dir
if (path != null) {
authorize(path, readRequiredPriv, writeRequiredPriv);
}
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class StorageBasedAuthorizationProvider method authorize.
@Override
public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, AuthorizationException {
// Currently not used in hive code-base, but intended to authorize actions
// that are directly user-level. As there's no storage based aspect to this,
// we can follow one of two routes:
// a) We can allow by default - that way, this call stays out of the way
// b) We can deny by default - that way, no privileges are authorized that
// is not understood and explicitly allowed.
// Both approaches have merit, but given that things like grants and revokes
// that are user-level do not make sense from the context of storage-permission
// based auth, denying seems to be more canonical here.
// Update to previous comment: there does seem to be one place that uses this
// and that is to authorize "show databases" in hcat commandline, which is used
// by webhcat. And user-level auth seems to be a reasonable default in this case.
// The now deprecated HdfsAuthorizationProvider in hcatalog approached this in
// another way, and that was to see if the user had said above appropriate requested
// privileges for the hive root warehouse directory. That seems to be the best
// mapping for user level privileges to storage. Using that strategy here.
Path root = null;
try {
initWh();
root = wh.getWhRoot();
authorize(root, readRequiredPriv, writeRequiredPriv);
} catch (MetaException ex) {
throw hiveException(ex);
}
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class RevokePrivAuthUtils method authorizeAndGetRevokePrivileges.
public static List<HiveObjectPrivilege> authorizeAndGetRevokePrivileges(List<HivePrincipal> principals, List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, boolean grantOption, IMetaStoreClient mClient, String userName) throws HiveAuthzPluginException, HiveAccessControlException {
List<HiveObjectPrivilege> matchingPrivs = new ArrayList<HiveObjectPrivilege>();
StringBuilder errMsg = new StringBuilder();
for (HivePrincipal principal : principals) {
// get metastore/thrift privilege object for this principal and object, not looking at
// privileges obtained indirectly via roles
List<HiveObjectPrivilege> msObjPrivs;
try {
msObjPrivs = mClient.list_privileges(principal.getName(), AuthorizationUtils.getThriftPrincipalType(principal.getType()), SQLAuthorizationUtils.getThriftHiveObjectRef(hivePrivObject));
} catch (MetaException e) {
throw new HiveAuthzPluginException(e);
} catch (TException e) {
throw new HiveAuthzPluginException(e);
}
// the resulting privileges need to be filtered on privilege type and
// username
// create a Map to capture object privileges corresponding to privilege
// type
Map<String, HiveObjectPrivilege> priv2privObj = new HashMap<String, HiveObjectPrivilege>();
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
PrivilegeGrantInfo grantInfo = msObjPriv.getGrantInfo();
// check if the grantor matches current user
if (grantInfo.getGrantor() != null && grantInfo.getGrantor().equals(userName) && grantInfo.getGrantorType() == PrincipalType.USER) {
// add to the map
priv2privObj.put(grantInfo.getPrivilege(), msObjPriv);
}
// else skip this one
}
// find the privileges that we are looking for
for (HivePrivilege hivePrivilege : hivePrivileges) {
HiveObjectPrivilege matchedPriv = priv2privObj.get(hivePrivilege.getName());
if (matchedPriv != null) {
matchingPrivs.add(matchedPriv);
} else {
errMsg.append("Cannot find privilege ").append(hivePrivilege).append(" for ").append(principal).append(" on ").append(hivePrivObject).append(" granted by ").append(userName).append(System.getProperty("line.separator"));
}
}
}
if (errMsg.length() != 0) {
throw new HiveAccessControlException(errMsg.toString());
}
return matchingPrivs;
}
Aggregations