Search in sources :

Example 96 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestWebHdfsUrl method testSecureProxyAuthParamsInUrl.

@Test(timeout = 60000)
public void testSecureProxyAuthParamsInUrl() throws IOException {
    Configuration conf = new Configuration();
    // fake turning on security so api thinks it should use tokens
    SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    ugi.setAuthenticationMethod(KERBEROS);
    ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
    UserGroupInformation.setLoginUser(ugi);
    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/");
    String tokenString = webhdfs.getDelegationToken().encodeToUrlString();
    // send real+effective
    URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
    checkQueryParams(new String[] { GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, getTokenUrl);
    // send real+effective
    URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(new String[] { PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString() }, renewTokenUrl);
    // send token
    URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(new String[] { PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString() }, cancelTokenUrl);
    // send token
    URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(new String[] { GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl);
    // wipe out internal token to simulate auth always required
    webhdfs.setDelegationToken(null);
    // send real+effective
    cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString));
    checkQueryParams(new String[] { PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString() }, cancelTokenUrl);
    // send real+effective
    fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(new String[] { GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl);
}
Also used : Path(org.apache.hadoop.fs.Path) UserParam(org.apache.hadoop.hdfs.web.resources.UserParam) Configuration(org.apache.hadoop.conf.Configuration) DoAsParam(org.apache.hadoop.hdfs.web.resources.DoAsParam) TokenArgumentParam(org.apache.hadoop.hdfs.web.resources.TokenArgumentParam) DelegationParam(org.apache.hadoop.hdfs.web.resources.DelegationParam) URL(java.net.URL) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 97 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestWebHdfsUrl method testBatchedListingUrl.

@Test(timeout = 60000)
public void testBatchedListingUrl() throws Exception {
    Configuration conf = new Configuration();
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    UserGroupInformation.setLoginUser(ugi);
    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/p1");
    final StartAfterParam startAfter = new StartAfterParam("last");
    URL url = webhdfs.toUrl(GetOpParam.Op.LISTSTATUS_BATCH, fsPath, startAfter);
    checkQueryParams(new String[] { GetOpParam.Op.LISTSTATUS_BATCH.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), StartAfterParam.NAME + "=" + "last" }, url);
}
Also used : Path(org.apache.hadoop.fs.Path) UserParam(org.apache.hadoop.hdfs.web.resources.UserParam) Configuration(org.apache.hadoop.conf.Configuration) StartAfterParam(org.apache.hadoop.hdfs.web.resources.StartAfterParam) URL(java.net.URL) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 98 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestWebHdfsUrl method testSimpleAuthParamsInUrl.

@Test(timeout = 60000)
public void testSimpleAuthParamsInUrl() throws IOException {
    Configuration conf = new Configuration();
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    UserGroupInformation.setLoginUser(ugi);
    WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
    Path fsPath = new Path("/");
    // send user+token
    URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
    checkQueryParams(new String[] { GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getShortUserName()).toString() }, fileStatusUrl);
}
Also used : Path(org.apache.hadoop.fs.Path) UserParam(org.apache.hadoop.hdfs.web.resources.UserParam) Configuration(org.apache.hadoop.conf.Configuration) URL(java.net.URL) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 99 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class YarnChild method main.

public static void main(String[] args) throws Throwable {
    Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
    LOG.debug("Child starting");
    final JobConf job = new JobConf(MRJobConfig.JOB_CONF_FILE);
    // Initing with our JobConf allows us to avoid loading confs twice
    Limits.init(job);
    UserGroupInformation.setConfiguration(job);
    // MAPREDUCE-6565: need to set configuration for SecurityUtil.
    SecurityUtil.setConfiguration(job);
    String host = args[0];
    int port = Integer.parseInt(args[1]);
    final InetSocketAddress address = NetUtils.createSocketAddrForHost(host, port);
    final TaskAttemptID firstTaskid = TaskAttemptID.forName(args[2]);
    long jvmIdLong = Long.parseLong(args[3]);
    JVMId jvmId = new JVMId(firstTaskid.getJobID(), firstTaskid.getTaskType() == TaskType.MAP, jvmIdLong);
    CallerContext.setCurrent(new CallerContext.Builder("mr_" + firstTaskid.toString()).build());
    // initialize metrics
    DefaultMetricsSystem.initialize(StringUtils.camelize(firstTaskid.getTaskType().name()) + "Task");
    // Security framework already loaded the tokens into current ugi
    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
    LOG.info("Executing with tokens:");
    for (Token<?> token : credentials.getAllTokens()) {
        LOG.info(token);
    }
    // Create TaskUmbilicalProtocol as actual task owner.
    UserGroupInformation taskOwner = UserGroupInformation.createRemoteUser(firstTaskid.getJobID().toString());
    Token<JobTokenIdentifier> jt = TokenCache.getJobToken(credentials);
    SecurityUtil.setTokenService(jt, address);
    taskOwner.addToken(jt);
    final TaskUmbilicalProtocol umbilical = taskOwner.doAs(new PrivilegedExceptionAction<TaskUmbilicalProtocol>() {

        @Override
        public TaskUmbilicalProtocol run() throws Exception {
            return (TaskUmbilicalProtocol) RPC.getProxy(TaskUmbilicalProtocol.class, TaskUmbilicalProtocol.versionID, address, job);
        }
    });
    // report non-pid to application master
    JvmContext context = new JvmContext(jvmId, "-1000");
    LOG.debug("PID: " + System.getenv().get("JVM_PID"));
    Task task = null;
    UserGroupInformation childUGI = null;
    ScheduledExecutorService logSyncer = null;
    try {
        int idleLoopCount = 0;
        JvmTask myTask = null;
        ;
        // poll for new task
        for (int idle = 0; null == myTask; ++idle) {
            long sleepTimeMilliSecs = Math.min(idle * 500, 1500);
            LOG.info("Sleeping for " + sleepTimeMilliSecs + "ms before retrying again. Got null now.");
            MILLISECONDS.sleep(sleepTimeMilliSecs);
            myTask = umbilical.getTask(context);
        }
        if (myTask.shouldDie()) {
            return;
        }
        task = myTask.getTask();
        YarnChild.taskid = task.getTaskID();
        // Create the job-conf and set credentials
        configureTask(job, task, credentials, jt);
        // log the system properties
        String systemPropsToLog = MRApps.getSystemPropertiesToLog(job);
        if (systemPropsToLog != null) {
            LOG.info(systemPropsToLog);
        }
        // Initiate Java VM metrics
        JvmMetrics.initSingleton(jvmId.toString(), job.getSessionId());
        childUGI = UserGroupInformation.createRemoteUser(System.getenv(ApplicationConstants.Environment.USER.toString()));
        // Add tokens to new user so that it may execute its task correctly.
        childUGI.addCredentials(credentials);
        // set job classloader if configured before invoking the task
        MRApps.setJobClassLoader(job);
        logSyncer = TaskLog.createLogSyncer();
        // Create a final reference to the task for the doAs block
        final Task taskFinal = task;
        childUGI.doAs(new PrivilegedExceptionAction<Object>() {

            @Override
            public Object run() throws Exception {
                // use job-specified working directory
                setEncryptedSpillKeyIfRequired(taskFinal);
                FileSystem.get(job).setWorkingDirectory(job.getWorkingDirectory());
                // run the task
                taskFinal.run(job, umbilical);
                return null;
            }
        });
    } catch (FSError e) {
        LOG.fatal("FSError from child", e);
        if (!ShutdownHookManager.get().isShutdownInProgress()) {
            umbilical.fsError(taskid, e.getMessage());
        }
    } catch (Exception exception) {
        LOG.warn("Exception running child : " + StringUtils.stringifyException(exception));
        try {
            if (task != null) {
                // do cleanup for the task
                if (childUGI == null) {
                    // no need to job into doAs block
                    task.taskCleanup(umbilical);
                } else {
                    final Task taskFinal = task;
                    childUGI.doAs(new PrivilegedExceptionAction<Object>() {

                        @Override
                        public Object run() throws Exception {
                            taskFinal.taskCleanup(umbilical);
                            return null;
                        }
                    });
                }
            }
        } catch (Exception e) {
            LOG.info("Exception cleaning up: " + StringUtils.stringifyException(e));
        }
        // Report back any failures, for diagnostic purposes
        if (taskid != null) {
            if (!ShutdownHookManager.get().isShutdownInProgress()) {
                umbilical.fatalError(taskid, StringUtils.stringifyException(exception));
            }
        }
    } catch (Throwable throwable) {
        LOG.fatal("Error running child : " + StringUtils.stringifyException(throwable));
        if (taskid != null) {
            if (!ShutdownHookManager.get().isShutdownInProgress()) {
                Throwable tCause = throwable.getCause();
                String cause = tCause == null ? throwable.getMessage() : StringUtils.stringifyException(tCause);
                umbilical.fatalError(taskid, cause);
            }
        }
    } finally {
        RPC.stopProxy(umbilical);
        DefaultMetricsSystem.shutdown();
        TaskLog.syncLogsShutdown(logSyncer);
    }
}
Also used : YarnUncaughtExceptionHandler(org.apache.hadoop.yarn.YarnUncaughtExceptionHandler) InetSocketAddress(java.net.InetSocketAddress) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) FSError(org.apache.hadoop.fs.FSError) JobTokenIdentifier(org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) IOException(java.io.IOException) DiskErrorException(org.apache.hadoop.util.DiskChecker.DiskErrorException) Credentials(org.apache.hadoop.security.Credentials)

Example 100 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class ViewFileSystemBaseTest method testCheckOwnerWithFileStatus.

@Test
public void testCheckOwnerWithFileStatus() throws IOException, InterruptedException {
    final UserGroupInformation userUgi = UserGroupInformation.createUserForTesting("user@HADOOP.COM", new String[] { "hadoop" });
    userUgi.doAs(new PrivilegedExceptionAction<Object>() {

        @Override
        public Object run() throws IOException {
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            String doAsUserName = ugi.getUserName();
            assertEquals(doAsUserName, "user@HADOOP.COM");
            FileSystem vfs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
            FileStatus stat = vfs.getFileStatus(new Path("/internalDir"));
            assertEquals(userUgi.getShortUserName(), stat.getOwner());
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)621 IOException (java.io.IOException)274 Test (org.junit.Test)220 Configuration (org.apache.hadoop.conf.Configuration)138 Path (org.apache.hadoop.fs.Path)91 FileSystem (org.apache.hadoop.fs.FileSystem)59 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)57 AccessControlException (org.apache.hadoop.security.AccessControlException)54 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)50 Path (javax.ws.rs.Path)47 Produces (javax.ws.rs.Produces)45 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)45 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)43 AuthorizationException (org.apache.hadoop.security.authorize.AuthorizationException)39 Token (org.apache.hadoop.security.token.Token)39 ArrayList (java.util.ArrayList)38 FsPermission (org.apache.hadoop.fs.permission.FsPermission)36 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)36 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)35 Text (org.apache.hadoop.io.Text)34