Search in sources :

Example 1 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class ShellBasedUnixGroupsMapping method resolvePartialGroupNames.

/**
   * Attempt to partially resolve group names.
   *
   * @param userName the user's name
   * @param errMessage error message from the shell command
   * @param groupNames the incomplete list of group names
   * @return a list of resolved group names
   * @throws PartialGroupNameException if the resolution fails or times out
   */
private List<String> resolvePartialGroupNames(String userName, String errMessage, String groupNames) throws PartialGroupNameException {
    // does.
    if (Shell.WINDOWS) {
        throw new PartialGroupNameException("Does not support partial group" + " name resolution on Windows. " + errMessage);
    }
    if (groupNames.isEmpty()) {
        throw new PartialGroupNameException("The user name '" + userName + "' is not found. " + errMessage);
    } else {
        LOG.warn("Some group names for '{}' are not resolvable. {}", userName, errMessage);
        // attempt to partially resolve group names
        ShellCommandExecutor partialResolver = createGroupIDExecutor(userName);
        try {
            partialResolver.execute();
            return parsePartialGroupNames(groupNames, partialResolver.getOutput());
        } catch (ExitCodeException ece) {
            // something is terribly wrong, so give up.
            throw new PartialGroupNameException("failed to get group id list for user '" + userName + "'", ece);
        } catch (IOException ioe) {
            String message = "Can't execute the shell command to " + "get the list of group id for user '" + userName + "'";
            if (partialResolver.isTimedOut()) {
                message += " because of the command taking longer than " + "the configured timeout: " + timeout + " seconds";
            }
            throw new PartialGroupNameException(message, ioe);
        }
    }
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException)

Example 2 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class ShellBasedUnixGroupsMapping method getUnixGroups.

/**
   * Get the current user's group list from Unix by running the command 'groups'
   * NOTE. For non-existing user it will return EMPTY list.
   *
   * @param user get groups for this user
   * @return the groups list that the <code>user</code> belongs to. The primary
   *         group is returned first.
   * @throws IOException if encounter any error when running the command
   */
private List<String> getUnixGroups(String user) throws IOException {
    ShellCommandExecutor executor = createGroupExecutor(user);
    List<String> groups;
    try {
        executor.execute();
        groups = resolveFullGroupNames(executor.getOutput());
    } catch (ExitCodeException e) {
        try {
            groups = resolvePartialGroupNames(user, e.getMessage(), executor.getOutput());
        } catch (PartialGroupNameException pge) {
            LOG.warn("unable to return groups for user {}", user, pge);
            return EMPTY_GROUPS;
        }
    } catch (IOException ioe) {
        // similar to how partial resolution failures are handled above
        if (executor.isTimedOut()) {
            LOG.warn("Unable to return groups for user '{}' as shell group lookup " + "command '{}' ran longer than the configured timeout limit of " + "{} seconds.", user, Joiner.on(' ').join(executor.getExecString()), timeout);
            return EMPTY_GROUPS;
        } else {
            // If its not an executor timeout, we should let the caller handle it
            throw ioe;
        }
    }
    // remove duplicated primary group
    if (!Shell.WINDOWS) {
        for (int i = 1; i < groups.size(); i++) {
            if (groups.get(i).equals(groups.get(0))) {
                groups.remove(i);
                break;
            }
        }
    }
    return groups;
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException)

Example 3 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class SysInfoWindows method getSystemInfoInfoFromShell.

String getSystemInfoInfoFromShell() {
    try {
        ShellCommandExecutor shellExecutor = new ShellCommandExecutor(new String[] { Shell.getWinUtilsFile().getCanonicalPath(), "systeminfo" });
        shellExecutor.execute();
        return shellExecutor.getOutput();
    } catch (IOException e) {
        LOG.error(StringUtils.stringifyException(e));
    }
    return null;
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException)

Example 4 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class FileUtil method unTarUsingTar.

private static void unTarUsingTar(File inFile, File untarDir, boolean gzipped) throws IOException {
    StringBuffer untarCommand = new StringBuffer();
    if (gzipped) {
        untarCommand.append(" gzip -dc '");
        untarCommand.append(FileUtil.makeShellPath(inFile));
        untarCommand.append("' | (");
    }
    untarCommand.append("cd '");
    untarCommand.append(FileUtil.makeShellPath(untarDir));
    untarCommand.append("' ; ");
    untarCommand.append("tar -xf ");
    if (gzipped) {
        untarCommand.append(" -)");
    } else {
        untarCommand.append(FileUtil.makeShellPath(inFile));
    }
    String[] shellCmd = { "bash", "-c", untarCommand.toString() };
    ShellCommandExecutor shexec = new ShellCommandExecutor(shellCmd);
    shexec.execute();
    int exitcode = shexec.getExitCode();
    if (exitcode != 0) {
        throw new IOException("Error untarring file " + inFile + ". Tar process exited with exit code " + exitcode);
    }
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException)

Example 5 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class TestShellBasedUnixGroupsMapping method testFiniteGroupResolutionTime.

@Test(timeout = 4000)
public void testFiniteGroupResolutionTime() throws Exception {
    Configuration conf = new Configuration();
    String userName = "foobarnonexistinguser";
    String commandTimeoutMessage = "ran longer than the configured timeout limit";
    long testTimeout = 1L;
    // Test a 1 second max-runtime timeout
    conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_SECS, testTimeout);
    TestDelayedGroupCommand mapping = ReflectionUtils.newInstance(TestDelayedGroupCommand.class, conf);
    ShellCommandExecutor executor = mapping.createGroupExecutor(userName);
    assertEquals("Expected the group names executor to carry the configured timeout", testTimeout, executor.getTimeoutInterval());
    executor = mapping.createGroupIDExecutor(userName);
    assertEquals("Expected the group ID executor to carry the configured timeout", testTimeout, executor.getTimeoutInterval());
    assertEquals("Expected no groups to be returned given a shell command timeout", 0, mapping.getGroups(userName).size());
    assertTrue("Expected the logs to carry " + "a message about command timeout but was: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
    shellMappingLog.clearOutput();
    // Test also the parent Groups framework for expected behaviour
    conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, TestDelayedGroupCommand.class, GroupMappingServiceProvider.class);
    Groups groups = new Groups(conf);
    try {
        groups.getGroups(userName);
        fail("The groups framework call should " + "have failed with a command timeout");
    } catch (IOException e) {
        assertTrue("Expected the logs to carry " + "a message about command timeout but was: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
    }
    shellMappingLog.clearOutput();
    // Test the no-timeout (default) configuration
    conf = new Configuration();
    long defaultTimeout = CommonConfigurationKeys.HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_SECS_DEFAULT;
    mapping = ReflectionUtils.newInstance(TestDelayedGroupCommand.class, conf);
    executor = mapping.createGroupExecutor(userName);
    assertEquals("Expected the group names executor to carry the default timeout", defaultTimeout, executor.getTimeoutInterval());
    executor = mapping.createGroupIDExecutor(userName);
    assertEquals("Expected the group ID executor to carry the default timeout", defaultTimeout, executor.getTimeoutInterval());
    mapping.getGroups(userName);
    assertFalse("Didn't expect a timeout of command in execution but logs carry it: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

ShellCommandExecutor (org.apache.hadoop.util.Shell.ShellCommandExecutor)21 IOException (java.io.IOException)19 ExitCodeException (org.apache.hadoop.util.Shell.ExitCodeException)6 Test (org.junit.Test)3 File (java.io.File)2 ZipFile (java.util.zip.ZipFile)2 BufferedReader (java.io.BufferedReader)1 FileNotFoundException (java.io.FileNotFoundException)1 StringReader (java.io.StringReader)1 ArrayList (java.util.ArrayList)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)1 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)1 TestContext (org.apache.hadoop.test.MultithreadedTestUtil.TestContext)1 Shell (org.apache.hadoop.util.Shell)1 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)1 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)1 Matchers.anyBoolean (org.mockito.Matchers.anyBoolean)1