Search in sources :

Example 66 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class RoundRobinUserResolver method parseUserList.

/**
   * Userlist assumes one user per line.
   * Each line in users-list-file is of the form <username>[,group]* 
   * <br> Group names are ignored(they are not parsed at all).
   */
private List<UserGroupInformation> parseUserList(URI userUri, Configuration conf) throws IOException {
    if (null == userUri) {
        return Collections.emptyList();
    }
    final Path userloc = new Path(userUri.toString());
    final Text rawUgi = new Text();
    final FileSystem fs = userloc.getFileSystem(conf);
    final ArrayList<UserGroupInformation> ugiList = new ArrayList<UserGroupInformation>();
    LineReader in = null;
    try {
        in = new LineReader(fs.open(userloc));
        while (in.readLine(rawUgi) > 0) {
            //line is of the form username[,group]*
            if (rawUgi.toString().trim().equals("")) {
                //Continue on empty line
                continue;
            }
            // e is end position of user name in this line
            int e = rawUgi.find(",");
            if (e == 0) {
                throw new IOException("Missing username: " + rawUgi);
            }
            if (e == -1) {
                e = rawUgi.getLength();
            }
            final String username = Text.decode(rawUgi.getBytes(), 0, e).trim();
            UserGroupInformation ugi = null;
            try {
                ugi = UserGroupInformation.createProxyUser(username, UserGroupInformation.getLoginUser());
            } catch (IOException ioe) {
                LOG.error("Error while creating a proxy user ", ioe);
            }
            if (ugi != null) {
                ugiList.add(ugi);
            }
        // No need to parse groups, even if they exist. Go to next line
        }
    } finally {
        if (in != null) {
            in.close();
        }
    }
    return ugiList;
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) LineReader(org.apache.hadoop.util.LineReader) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 67 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class RoundRobinUserResolver method getTargetUgi.

@Override
public synchronized UserGroupInformation getTargetUgi(UserGroupInformation ugi) {
    // UGI of proxy user
    UserGroupInformation targetUGI = usercache.get(ugi.getUserName());
    if (targetUGI == null) {
        targetUGI = users.get(uidx++ % users.size());
        usercache.put(ugi.getUserName(), targetUGI);
    }
    return targetUGI;
}
Also used : UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 68 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestCopyMapper method testSkipCopyNoPerms.

@Test(timeout = 40000)
public void testSkipCopyNoPerms() {
    try {
        deleteState();
        createSourceData();
        UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest");
        final CopyMapper copyMapper = new CopyMapper();
        final StubContext stubContext = tmpUser.doAs(new PrivilegedAction<StubContext>() {

            @Override
            public StubContext run() {
                try {
                    return new StubContext(getConfiguration(), null, 0);
                } catch (Exception e) {
                    LOG.error("Exception encountered ", e);
                    throw new RuntimeException(e);
                }
            }
        });
        final Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text> context = stubContext.getContext();
        EnumSet<DistCpOptions.FileAttribute> preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class);
        preserveStatus.remove(DistCpOptions.FileAttribute.ACL);
        preserveStatus.remove(DistCpOptions.FileAttribute.XATTR);
        preserveStatus.remove(DistCpOptions.FileAttribute.TIMES);
        context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus));
        touchFile(SOURCE_PATH + "/src/file");
        touchFile(TARGET_PATH + "/src/file");
        cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ));
        cluster.getFileSystem().setPermission(new Path(TARGET_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ));
        final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() {

            @Override
            public FileSystem run() {
                try {
                    return FileSystem.get(configuration);
                } catch (IOException e) {
                    LOG.error("Exception encountered ", e);
                    Assert.fail("Test failed: " + e.getMessage());
                    throw new RuntimeException("Test ought to fail here");
                }
            }
        });
        tmpUser.doAs(new PrivilegedAction<Integer>() {

            @Override
            public Integer run() {
                try {
                    copyMapper.setup(context);
                    copyMapper.map(new Text("/src/file"), new CopyListingFileStatus(tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file"))), context);
                    Assert.assertEquals(stubContext.getWriter().values().size(), 1);
                    Assert.assertTrue(stubContext.getWriter().values().get(0).toString().startsWith("SKIP"));
                    Assert.assertTrue(stubContext.getWriter().values().get(0).toString().contains(SOURCE_PATH + "/src/file"));
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
                return null;
            }
        });
    } catch (Exception e) {
        LOG.error("Exception encountered ", e);
        Assert.fail("Test failed: " + e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException) Mapper(org.apache.hadoop.mapreduce.Mapper) DistCpOptions(org.apache.hadoop.tools.DistCpOptions) CopyListingFileStatus(org.apache.hadoop.tools.CopyListingFileStatus) StubContext(org.apache.hadoop.tools.StubContext) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 69 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestCopyMapper method testPreserve.

@Test(timeout = 40000)
public void testPreserve() {
    try {
        deleteState();
        createSourceData();
        UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest");
        final CopyMapper copyMapper = new CopyMapper();
        final Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text> context = tmpUser.doAs(new PrivilegedAction<Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text>>() {

            @Override
            public Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text> run() {
                try {
                    StubContext stubContext = new StubContext(getConfiguration(), null, 0);
                    return stubContext.getContext();
                } catch (Exception e) {
                    LOG.error("Exception encountered ", e);
                    throw new RuntimeException(e);
                }
            }
        });
        EnumSet<DistCpOptions.FileAttribute> preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class);
        preserveStatus.remove(DistCpOptions.FileAttribute.ACL);
        preserveStatus.remove(DistCpOptions.FileAttribute.XATTR);
        context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus));
        touchFile(SOURCE_PATH + "/src/file");
        mkdirs(TARGET_PATH);
        cluster.getFileSystem().setPermission(new Path(TARGET_PATH), new FsPermission((short) 511));
        final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() {

            @Override
            public FileSystem run() {
                try {
                    return FileSystem.get(configuration);
                } catch (IOException e) {
                    LOG.error("Exception encountered ", e);
                    Assert.fail("Test failed: " + e.getMessage());
                    throw new RuntimeException("Test ought to fail here");
                }
            }
        });
        tmpUser.doAs(new PrivilegedAction<Integer>() {

            @Override
            public Integer run() {
                try {
                    copyMapper.setup(context);
                    copyMapper.map(new Text("/src/file"), new CopyListingFileStatus(tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file"))), context);
                    Assert.fail("Expected copy to fail");
                } catch (AccessControlException e) {
                    Assert.assertTrue("Got exception: " + e.getMessage(), true);
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
                return null;
            }
        });
    } catch (Exception e) {
        LOG.error("Exception encountered ", e);
        Assert.fail("Test failed: " + e.getMessage());
    }
}
Also used : StubContext(org.apache.hadoop.tools.StubContext) Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException) Mapper(org.apache.hadoop.mapreduce.Mapper) DistCpOptions(org.apache.hadoop.tools.DistCpOptions) CopyListingFileStatus(org.apache.hadoop.tools.CopyListingFileStatus) StubContext(org.apache.hadoop.tools.StubContext) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 70 with UserGroupInformation

use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.

the class TestAggregatedLogFormat method testContainerLogsFileAccess.

@Test(timeout = 10000)
public void testContainerLogsFileAccess() throws IOException {
    // This test will run only if NativeIO is enabled as SecureIOUtils 
    // require it to be enabled.
    Assume.assumeTrue(NativeIO.isAvailable());
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    File workDir = new File(testWorkDir, "testContainerLogsFileAccess1");
    Path remoteAppLogFile = new Path(workDir.getAbsolutePath(), "aggregatedLogFile");
    Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles");
    String data = "Log File content for container : ";
    // Creating files for container1. Log aggregator will try to read log files
    // with illegal user.
    ApplicationId applicationId = ApplicationId.newInstance(1, 1);
    ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 1);
    ContainerId testContainerId1 = ContainerId.newContainerId(applicationAttemptId, 1);
    Path appDir = new Path(srcFileRoot, testContainerId1.getApplicationAttemptId().getApplicationId().toString());
    Path srcFilePath1 = new Path(appDir, testContainerId1.toString());
    String stdout = "stdout";
    String stderr = "stderr";
    writeSrcFile(srcFilePath1, stdout, data + testContainerId1.toString() + stdout);
    writeSrcFile(srcFilePath1, stderr, data + testContainerId1.toString() + stderr);
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    LogWriter logWriter = new LogWriter(conf, remoteAppLogFile, ugi);
    LogKey logKey = new LogKey(testContainerId1);
    String randomUser = "randomUser";
    LogValue logValue = spy(new LogValue(Collections.singletonList(srcFileRoot.toString()), testContainerId1, randomUser));
    // It is trying simulate a situation where first log file is owned by
    // different user (probably symlink) and second one by the user itself.
    // The first file should not be aggregated. Because this log file has the invalid
    // user name.
    when(logValue.getUser()).thenReturn(randomUser).thenReturn(ugi.getShortUserName());
    logWriter.append(logKey, logValue);
    logWriter.close();
    BufferedReader in = new BufferedReader(new FileReader(new File(remoteAppLogFile.toUri().getRawPath())));
    String line;
    StringBuffer sb = new StringBuffer("");
    while ((line = in.readLine()) != null) {
        LOG.info(line);
        sb.append(line);
    }
    line = sb.toString();
    String expectedOwner = ugi.getShortUserName();
    if (Path.WINDOWS) {
        final String adminsGroupString = "Administrators";
        if (Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString)) {
            expectedOwner = adminsGroupString;
        }
    }
    // This file: stderr should not be aggregated.
    // And we will not aggregate the log message.
    String stdoutFile1 = StringUtils.join(File.separator, Arrays.asList(new String[] { workDir.getAbsolutePath(), "srcFiles", testContainerId1.getApplicationAttemptId().getApplicationId().toString(), testContainerId1.toString(), stderr }));
    // The file: stdout is expected to be aggregated.
    String stdoutFile2 = StringUtils.join(File.separator, Arrays.asList(new String[] { workDir.getAbsolutePath(), "srcFiles", testContainerId1.getApplicationAttemptId().getApplicationId().toString(), testContainerId1.toString(), stdout }));
    String message2 = "Owner '" + expectedOwner + "' for path " + stdoutFile2 + " did not match expected owner '" + ugi.getShortUserName() + "'";
    Assert.assertFalse(line.contains(message2));
    Assert.assertFalse(line.contains(data + testContainerId1.toString() + stderr));
    Assert.assertTrue(line.contains(data + testContainerId1.toString() + stdout));
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) LogKey(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) LogValue(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue) TestContainerId(org.apache.hadoop.yarn.api.TestContainerId) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) LogWriter(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)664 IOException (java.io.IOException)281 Test (org.junit.Test)242 Configuration (org.apache.hadoop.conf.Configuration)142 Path (org.apache.hadoop.fs.Path)105 FileSystem (org.apache.hadoop.fs.FileSystem)73 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)57 AccessControlException (org.apache.hadoop.security.AccessControlException)54 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)51 FsPermission (org.apache.hadoop.fs.permission.FsPermission)49 Path (javax.ws.rs.Path)47 Token (org.apache.hadoop.security.token.Token)46 Produces (javax.ws.rs.Produces)45 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)45 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)43 AuthorizationException (org.apache.hadoop.security.authorize.AuthorizationException)40 ArrayList (java.util.ArrayList)38 Text (org.apache.hadoop.io.Text)38 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)36 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)35