use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class ResourceMaps method setAuthorizerV2Config.
private void setAuthorizerV2Config() throws HiveException {
// avoid processing the same config multiple times, check marker
if (sessionConf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) {
return;
}
String metastoreHook = sessionConf.get(ConfVars.METASTORE_FILTER_HOOK.name());
if (!ConfVars.METASTORE_FILTER_HOOK.getDefaultValue().equals(metastoreHook) && !AuthorizationMetaStoreFilterHook.class.getName().equals(metastoreHook)) {
LOG.warn(ConfVars.METASTORE_FILTER_HOOK.name() + " will be ignored, since hive.security.authorization.manager" + " is set to instance of HiveAuthorizerFactory.");
}
sessionConf.setVar(ConfVars.METASTORE_FILTER_HOOK, AuthorizationMetaStoreFilterHook.class.getName());
authorizerV2.applyAuthorizationConfigPolicy(sessionConf);
// update config in Hive thread local as well and init the metastore client
try {
Hive.get(sessionConf).getMSC();
} catch (Exception e) {
// that would cause ClassNoFoundException otherwise
throw new HiveException(e.getMessage(), e);
}
// set a marker that this conf has been processed.
sessionConf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HiveV1Authorizer method getCurrentRoleNames.
@Override
public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
String userName = SessionState.get().getUserName();
if (userName == null) {
userName = SessionState.getUserFromAuthenticator();
}
if (userName == null) {
throw new HiveAuthzPluginException("Cannot resolve current user name");
}
try {
Hive hive = Hive.getWithFastCheck(this.conf);
List<String> roleNames = new ArrayList<String>();
for (Role role : hive.listRoles(userName, PrincipalType.USER)) {
roleNames.add(role.getRoleName());
}
return roleNames;
} catch (HiveException e) {
throw new HiveAuthzPluginException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HiveV1Authorizer method dropRole.
@Override
public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
try {
Hive hive = Hive.getWithFastCheck(this.conf);
hive.dropRole(roleName);
} catch (HiveException e) {
throw new HiveAuthzPluginException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HiveV1Authorizer method createRole.
@Override
public void createRole(String roleName, HivePrincipal adminGrantor) throws HiveAuthzPluginException, HiveAccessControlException {
try {
Hive hive = Hive.getWithFastCheck(this.conf);
hive.createRole(roleName, adminGrantor == null ? null : adminGrantor.getName());
} catch (HiveException e) {
throw new HiveAuthzPluginException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class DDLTask method showLocksNewFormat.
private int showLocksNewFormat(ShowLocksDesc showLocks, HiveLockManager lm) throws HiveException {
DbLockManager lockMgr;
if (!(lm instanceof DbLockManager)) {
throw new RuntimeException("New lock format only supported with db lock manager.");
}
lockMgr = (DbLockManager) lm;
String dbName = showLocks.getDbName();
String tblName = showLocks.getTableName();
Map<String, String> partSpec = showLocks.getPartSpec();
if (dbName == null && tblName != null) {
dbName = SessionState.get().getCurrentDatabase();
}
ShowLocksRequest rqst = new ShowLocksRequest();
rqst.setDbname(dbName);
rqst.setTablename(tblName);
if (partSpec != null) {
List<String> keyList = new ArrayList<String>();
List<String> valList = new ArrayList<String>();
for (String partKey : partSpec.keySet()) {
String partVal = partSpec.remove(partKey);
keyList.add(partKey);
valList.add(partVal);
}
String partName = FileUtils.makePartName(keyList, valList);
rqst.setPartname(partName);
}
ShowLocksResponse rsp = lockMgr.getLocks(rqst);
// write the results in the file
DataOutputStream os = getOutputStream(showLocks.getResFile());
try {
dumpLockInfo(os, rsp);
} catch (FileNotFoundException e) {
LOG.warn("show function: " + stringifyException(e));
return 1;
} catch (IOException e) {
LOG.warn("show function: " + stringifyException(e));
return 1;
} catch (Exception e) {
throw new HiveException(e.toString());
} finally {
IOUtils.closeStream(os);
}
return 0;
}
Aggregations