use of org.apache.hadoop.security.UserGroupInformation in project hive by apache.
the class FileUtils method checkFileAccessWithImpersonation.
/**
* Perform a check to determine if the user is able to access the file passed in.
* If the user name passed in is different from the current user, this method will
* attempt to do impersonate the user to do the check; the current user should be
* able to create proxy users in this case.
* @param fs FileSystem of the path to check
* @param stat FileStatus representing the file
* @param action FsAction that will be checked
* @param user User name of the user that will be checked for access. If the user name
* is null or the same as the current user, no user impersonation will be done
* and the check will be done as the current user. Otherwise the file access
* check will be performed within a doAs() block to use the access privileges
* of this user. In this case the user must be configured to impersonate other
* users, otherwise this check will fail with error.
* @throws IOException
* @throws AccessControlException
* @throws InterruptedException
* @throws Exception
*/
public static void checkFileAccessWithImpersonation(final FileSystem fs, final FileStatus stat, final FsAction action, final String user) throws IOException, AccessControlException, InterruptedException, Exception {
UserGroupInformation ugi = Utils.getUGI();
String currentUser = ugi.getShortUserName();
if (user == null || currentUser.equals(user)) {
// No need to impersonate user, do the checks as the currently configured user.
ShimLoader.getHadoopShims().checkFileAccess(fs, stat, action);
return;
}
// Otherwise, try user impersonation. Current user must be configured to do user impersonation.
UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
try {
proxyUser.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf());
ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action);
return null;
}
});
} finally {
FileSystem.closeAllForUGI(proxyUser);
}
}
use of org.apache.hadoop.security.UserGroupInformation in project hive by apache.
the class PostExecutePrinter method run.
@Override
public void run(HookContext hookContext) throws Exception {
assert (hookContext.getHookType() == HookType.POST_EXEC_HOOK);
Set<ReadEntity> inputs = hookContext.getInputs();
Set<WriteEntity> outputs = hookContext.getOutputs();
LineageInfo linfo = hookContext.getLinfo();
UserGroupInformation ugi = hookContext.getUgi();
this.run(hookContext.getQueryState(), inputs, outputs, linfo, ugi);
}
use of org.apache.hadoop.security.UserGroupInformation in project hive by apache.
the class PreExecutePrinter method run.
@Override
public void run(HookContext hookContext) throws Exception {
assert (hookContext.getHookType() == HookType.PRE_EXEC_HOOK);
SessionState ss = SessionState.get();
QueryState queryState = hookContext.getQueryState();
if (ss != null && ss.out instanceof FetchConverter) {
boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain();
((FetchConverter) ss.out).foundQuery(foundQuery);
}
Set<ReadEntity> inputs = hookContext.getInputs();
Set<WriteEntity> outputs = hookContext.getOutputs();
UserGroupInformation ugi = hookContext.getUgi();
this.run(queryState, inputs, outputs, ugi);
}
use of org.apache.hadoop.security.UserGroupInformation in project hive by apache.
the class ATSHook method createOrGetDomain.
private String createOrGetDomain(final HookContext hookContext) throws Exception {
final String domainId;
String domainReaders = null;
String domainWriters = null;
boolean create = false;
if (SessionState.get() != null) {
if (SessionState.get().getATSDomainId() == null) {
domainId = ATS_DOMAIN_PREFIX + hookContext.getSessionId();
// Create session domain if not present
if (SessionState.get().getATSDomainId() == null) {
String requestuser = hookContext.getUserName();
if (hookContext.getUserName() == null) {
requestuser = hookContext.getUgi().getShortUserName();
}
boolean addHs2User = HiveConf.getBoolVar(hookContext.getConf(), ConfVars.HIVETEZHS2USERACCESS);
UserGroupInformation loginUserUgi = UserGroupInformation.getLoginUser();
String loginUser = loginUserUgi == null ? null : loginUserUgi.getShortUserName();
// In Tez, TEZ_AM_VIEW_ACLS/TEZ_AM_MODIFY_ACLS is used as the base for Tez ATS ACLS,
// so if exists, honor it. So we get the same ACLS for Tez ATS entries and
// Hive entries
domainReaders = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, requestuser, loginUser);
domainWriters = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, requestuser, loginUser);
SessionState.get().setATSDomainId(domainId);
create = true;
}
} else {
domainId = SessionState.get().getATSDomainId();
}
} else {
// SessionState is null, this is unlikely to happen, just in case
if (!defaultATSDomainCreated) {
domainReaders = domainWriters = UserGroupInformation.getCurrentUser().getShortUserName();
defaultATSDomainCreated = true;
create = true;
}
domainId = DEFAULT_ATS_DOMAIN;
}
if (create) {
final String readers = domainReaders;
final String writers = domainWriters;
// executor is single thread, so we can guarantee
// domain created before any ATS entries
executor.submit(new Runnable() {
@Override
public void run() {
try {
createTimelineDomain(domainId, readers, writers);
} catch (Exception e) {
LOG.warn("Failed to create ATS domain " + domainId, e);
}
}
});
}
return domainId;
}
use of org.apache.hadoop.security.UserGroupInformation in project hive by apache.
the class TezTask method setAccessControlsForCurrentUser.
private static void setAccessControlsForCurrentUser(DAG dag, String queryId, Configuration conf) throws IOException {
String user = SessionState.getUserFromAuthenticator();
UserGroupInformation loginUserUgi = UserGroupInformation.getLoginUser();
String loginUser = loginUserUgi == null ? null : loginUserUgi.getShortUserName();
boolean addHs2User = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVETEZHS2USERACCESS);
// Temporarily re-using the TEZ AM View ACLs property for individual dag access control.
// Hive may want to setup it's own parameters if it wants to control per dag access.
// Setting the tez-property per dag should work for now.
String viewStr = Utilities.getAclStringWithHiveModification(conf, TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, user, loginUser);
String modifyStr = Utilities.getAclStringWithHiveModification(conf, TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, user, loginUser);
if (LOG.isDebugEnabled()) {
LOG.debug("Setting Tez DAG access for queryId={} with viewAclString={}, modifyStr={}", queryId, viewStr, modifyStr);
}
// set permissions for current user on DAG
DAGAccessControls ac = new DAGAccessControls(viewStr, modifyStr);
dag.setAccessControls(ac);
}
Aggregations