Search in sources :

Example 6 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class ShellBasedUnixGroupsMapping method getUnixGroups.

/**
   * Get the current user's group list from Unix by running the command 'groups'
   * NOTE. For non-existing user it will return EMPTY list.
   *
   * @param user get groups for this user
   * @return the groups list that the <code>user</code> belongs to. The primary
   *         group is returned first.
   * @throws IOException if encounter any error when running the command
   */
private List<String> getUnixGroups(String user) throws IOException {
    ShellCommandExecutor executor = createGroupExecutor(user);
    List<String> groups;
    try {
        executor.execute();
        groups = resolveFullGroupNames(executor.getOutput());
    } catch (ExitCodeException e) {
        try {
            groups = resolvePartialGroupNames(user, e.getMessage(), executor.getOutput());
        } catch (PartialGroupNameException pge) {
            LOG.warn("unable to return groups for user {}", user, pge);
            return EMPTY_GROUPS;
        }
    } catch (IOException ioe) {
        // similar to how partial resolution failures are handled above
        if (executor.isTimedOut()) {
            LOG.warn("Unable to return groups for user '{}' as shell group lookup " + "command '{}' ran longer than the configured timeout limit of " + "{} seconds.", user, Joiner.on(' ').join(executor.getExecString()), timeout);
            return EMPTY_GROUPS;
        } else {
            // If its not an executor timeout, we should let the caller handle it
            throw ioe;
        }
    }
    // remove duplicated primary group
    if (!Shell.WINDOWS) {
        for (int i = 1; i < groups.size(); i++) {
            if (groups.get(i).equals(groups.get(0))) {
                groups.remove(i);
                break;
            }
        }
    }
    return groups;
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException)

Example 7 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class TestShellBasedUnixGroupsMapping method testFiniteGroupResolutionTime.

@Test(timeout = 4000)
public void testFiniteGroupResolutionTime() throws Exception {
    Configuration conf = new Configuration();
    String userName = "foobarnonexistinguser";
    String commandTimeoutMessage = "ran longer than the configured timeout limit";
    long testTimeout = 1L;
    // Test a 1 second max-runtime timeout
    conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_SECS, testTimeout);
    TestDelayedGroupCommand mapping = ReflectionUtils.newInstance(TestDelayedGroupCommand.class, conf);
    ShellCommandExecutor executor = mapping.createGroupExecutor(userName);
    assertEquals("Expected the group names executor to carry the configured timeout", testTimeout, executor.getTimeoutInterval());
    executor = mapping.createGroupIDExecutor(userName);
    assertEquals("Expected the group ID executor to carry the configured timeout", testTimeout, executor.getTimeoutInterval());
    assertEquals("Expected no groups to be returned given a shell command timeout", 0, mapping.getGroups(userName).size());
    assertTrue("Expected the logs to carry " + "a message about command timeout but was: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
    shellMappingLog.clearOutput();
    // Test also the parent Groups framework for expected behaviour
    conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, TestDelayedGroupCommand.class, GroupMappingServiceProvider.class);
    Groups groups = new Groups(conf);
    try {
        groups.getGroups(userName);
        fail("The groups framework call should " + "have failed with a command timeout");
    } catch (IOException e) {
        assertTrue("Expected the logs to carry " + "a message about command timeout but was: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
    }
    shellMappingLog.clearOutput();
    // Test the no-timeout (default) configuration
    conf = new Configuration();
    long defaultTimeout = CommonConfigurationKeys.HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_SECS_DEFAULT;
    mapping = ReflectionUtils.newInstance(TestDelayedGroupCommand.class, conf);
    executor = mapping.createGroupExecutor(userName);
    assertEquals("Expected the group names executor to carry the default timeout", defaultTimeout, executor.getTimeoutInterval());
    executor = mapping.createGroupIDExecutor(userName);
    assertEquals("Expected the group ID executor to carry the default timeout", defaultTimeout, executor.getTimeoutInterval());
    mapping.getGroups(userName);
    assertFalse("Didn't expect a timeout of command in execution but logs carry it: " + shellMappingLog.getOutput(), shellMappingLog.getOutput().contains(commandTimeoutMessage));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) Test(org.junit.Test)

Example 8 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class ProcessTree method isAlive.

/**
   * Is the process with PID pid still alive?
   * This method assumes that isAlive is called on a pid that was alive not
   * too long ago, and hence assumes no chance of pid-wrapping-around.
   * 
   * @param pid pid of the process to check.
   * @return true if process is alive.
   */
public static boolean isAlive(String pid) {
    ShellCommandExecutor shexec = null;
    try {
        String[] args = { "kill", "-0", pid };
        shexec = new ShellCommandExecutor(args);
        shexec.execute();
    } catch (ExitCodeException ee) {
        return false;
    } catch (IOException ioe) {
        LOG.warn("Error executing shell command " + shexec.toString() + ioe);
        return false;
    }
    return (shexec.getExitCode() == 0 ? true : false);
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException)

Example 9 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hadoop by apache.

the class ProcessTree method sendSignal.

/**
   * Send a specified signal to the specified pid
   *
   * @param pid the pid of the process [group] to signal.
   * @param signalNum the signal to send.
   * @param signalName the human-readable description of the signal
   * (for logging).
   */
private static void sendSignal(String pid, int signalNum, String signalName) {
    ShellCommandExecutor shexec = null;
    try {
        String[] args = { "kill", "-" + signalNum, pid };
        shexec = new ShellCommandExecutor(args);
        shexec.execute();
    } catch (IOException ioe) {
        LOG.warn("Error executing shell command " + ioe);
    } finally {
        if (pid.startsWith("-")) {
            LOG.info("Sending signal to all members of process group " + pid + ": " + signalName + ". Exit code " + shexec.getExitCode());
        } else {
            LOG.info("Signaling process " + pid + " with " + signalName + ". Exit code " + shexec.getExitCode());
        }
    }
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException)

Example 10 with ShellCommandExecutor

use of org.apache.hadoop.util.Shell.ShellCommandExecutor in project hbase by apache.

the class HealthChecker method init.

/**
   * Initialize.
   *
   * @param configuration
   */
public void init(String location, long timeout) {
    this.healthCheckScript = location;
    this.scriptTimeout = timeout;
    ArrayList<String> execScript = new ArrayList<>();
    execScript.add(healthCheckScript);
    this.shexec = new ShellCommandExecutor(execScript.toArray(new String[execScript.size()]), null, null, scriptTimeout);
    LOG.info("HealthChecker initialized with script at " + this.healthCheckScript + ", timeout=" + timeout);
}
Also used : ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) ArrayList(java.util.ArrayList)

Aggregations

ShellCommandExecutor (org.apache.hadoop.util.Shell.ShellCommandExecutor)21 IOException (java.io.IOException)19 ExitCodeException (org.apache.hadoop.util.Shell.ExitCodeException)6 Test (org.junit.Test)3 File (java.io.File)2 ZipFile (java.util.zip.ZipFile)2 BufferedReader (java.io.BufferedReader)1 FileNotFoundException (java.io.FileNotFoundException)1 StringReader (java.io.StringReader)1 ArrayList (java.util.ArrayList)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)1 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)1 TestContext (org.apache.hadoop.test.MultithreadedTestUtil.TestContext)1 Shell (org.apache.hadoop.util.Shell)1 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)1 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)1 Matchers.anyBoolean (org.mockito.Matchers.anyBoolean)1