use of org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException in project hive by apache.
the class MetadataOperation method authorizeMetaGets.
protected void authorizeMetaGets(HiveOperationType opType, List<HivePrivilegeObject> inpObjs, String cmdString) throws HiveSQLException {
SessionState ss = SessionState.get();
HiveAuthzContext.Builder ctxBuilder = new HiveAuthzContext.Builder();
ctxBuilder.setUserIpAddress(ss.getUserIpAddress());
ctxBuilder.setForwardedAddresses(ss.getForwardedAddresses());
ctxBuilder.setCommandString(cmdString);
try {
ss.getAuthorizerV2().checkPrivileges(opType, inpObjs, null, ctxBuilder.build());
} catch (HiveAuthzPluginException | HiveAccessControlException e) {
throw new HiveSQLException(e.getMessage(), e);
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException in project drill by apache.
the class HiveAuthorizationHelper method authorize.
/* Helper method to check privileges */
private void authorize(final HiveOperationType hiveOpType, final List<HivePrivilegeObject> toRead, final List<HivePrivilegeObject> toWrite, final String cmd) throws HiveAccessControlException {
try {
HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder();
authzContextBuilder.setUserIpAddress("Not available");
authzContextBuilder.setCommandString(cmd);
authorizerV2.checkPrivileges(hiveOpType, toRead, toWrite, authzContextBuilder.build());
} catch (final HiveAccessControlException e) {
throw e;
} catch (final Exception e) {
throw new DrillRuntimeException("Failed to use the Hive authorization components: " + e.getMessage(), e);
}
}
Aggregations