Search in sources :

Example 1 with Shell

use of org.apache.hadoop.util.Shell in project hadoop by apache.

the class DefaultContainerExecutor method launchContainer.

@Override
public int launchContainer(ContainerStartContext ctx) throws IOException {
    Container container = ctx.getContainer();
    Path nmPrivateContainerScriptPath = ctx.getNmPrivateContainerScriptPath();
    Path nmPrivateTokensPath = ctx.getNmPrivateTokensPath();
    String user = ctx.getUser();
    Path containerWorkDir = ctx.getContainerWorkDir();
    List<String> localDirs = ctx.getLocalDirs();
    List<String> logDirs = ctx.getLogDirs();
    FsPermission dirPerm = new FsPermission(APPDIR_PERM);
    ContainerId containerId = container.getContainerId();
    // create container dirs on all disks
    String containerIdStr = containerId.toString();
    String appIdStr = containerId.getApplicationAttemptId().getApplicationId().toString();
    for (String sLocalDir : localDirs) {
        Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE);
        Path userdir = new Path(usersdir, user);
        Path appCacheDir = new Path(userdir, ContainerLocalizer.APPCACHE);
        Path appDir = new Path(appCacheDir, appIdStr);
        Path containerDir = new Path(appDir, containerIdStr);
        createDir(containerDir, dirPerm, true, user);
    }
    // Create the container log-dirs on all disks
    createContainerLogDirs(appIdStr, containerIdStr, logDirs, user);
    Path tmpDir = new Path(containerWorkDir, YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR);
    createDir(tmpDir, dirPerm, false, user);
    // copy container tokens to work dir
    Path tokenDst = new Path(containerWorkDir, ContainerLaunch.FINAL_CONTAINER_TOKENS_FILE);
    copyFile(nmPrivateTokensPath, tokenDst, user);
    // copy launch script to work dir
    Path launchDst = new Path(containerWorkDir, ContainerLaunch.CONTAINER_SCRIPT);
    copyFile(nmPrivateContainerScriptPath, launchDst, user);
    // Create new local launch wrapper script
    LocalWrapperScriptBuilder sb = getLocalWrapperScriptBuilder(containerIdStr, containerWorkDir);
    // Windows path length limitation.
    if (Shell.WINDOWS && sb.getWrapperScriptPath().toString().length() > WIN_MAX_PATH) {
        throw new IOException(String.format("Cannot launch container using script at path %s, because it exceeds " + "the maximum supported path length of %d characters.  Consider " + "configuring shorter directories in %s.", sb.getWrapperScriptPath(), WIN_MAX_PATH, YarnConfiguration.NM_LOCAL_DIRS));
    }
    Path pidFile = getPidFilePath(containerId);
    if (pidFile != null) {
        sb.writeLocalWrapperScript(launchDst, pidFile);
    } else {
        LOG.info("Container " + containerIdStr + " was marked as inactive. Returning terminated error");
        return ExitCode.TERMINATED.getExitCode();
    }
    // create log dir under app
    // fork script
    Shell.CommandExecutor shExec = null;
    try {
        setScriptExecutable(launchDst, user);
        setScriptExecutable(sb.getWrapperScriptPath(), user);
        shExec = buildCommandExecutor(sb.getWrapperScriptPath().toString(), containerIdStr, user, pidFile, container.getResource(), new File(containerWorkDir.toUri().getPath()), container.getLaunchContext().getEnvironment());
        if (isContainerActive(containerId)) {
            shExec.execute();
        } else {
            LOG.info("Container " + containerIdStr + " was marked as inactive. Returning terminated error");
            return ExitCode.TERMINATED.getExitCode();
        }
    } catch (IOException e) {
        if (null == shExec) {
            return -1;
        }
        int exitCode = shExec.getExitCode();
        LOG.warn("Exit code from container " + containerId + " is : " + exitCode);
        // container-executor's output
        if (exitCode != ExitCode.FORCE_KILLED.getExitCode() && exitCode != ExitCode.TERMINATED.getExitCode()) {
            LOG.warn("Exception from container-launch with container ID: " + containerId + " and exit code: " + exitCode, e);
            StringBuilder builder = new StringBuilder();
            builder.append("Exception from container-launch.\n");
            builder.append("Container id: ").append(containerId).append("\n");
            builder.append("Exit code: ").append(exitCode).append("\n");
            if (!Optional.fromNullable(e.getMessage()).or("").isEmpty()) {
                builder.append("Exception message: ");
                builder.append(e.getMessage()).append("\n");
            }
            builder.append("Stack trace: ");
            builder.append(StringUtils.stringifyException(e)).append("\n");
            if (!shExec.getOutput().isEmpty()) {
                builder.append("Shell output: ");
                builder.append(shExec.getOutput()).append("\n");
            }
            String diagnostics = builder.toString();
            logOutput(diagnostics);
            container.handle(new ContainerDiagnosticsUpdateEvent(containerId, diagnostics));
        } else {
            container.handle(new ContainerDiagnosticsUpdateEvent(containerId, "Container killed on request. Exit code is " + exitCode));
        }
        return exitCode;
    } finally {
        if (shExec != null)
            shExec.close();
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) ContainerDiagnosticsUpdateEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent) IOException(java.io.IOException) CommandExecutor(org.apache.hadoop.util.Shell.CommandExecutor) Container(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container) Shell(org.apache.hadoop.util.Shell) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) FsPermission(org.apache.hadoop.fs.permission.FsPermission) File(java.io.File)

Example 2 with Shell

use of org.apache.hadoop.util.Shell in project hadoop by apache.

the class TestContainerLaunch method testShellScriptBuilderNonZeroExitCode.

/**
   * Test that script exists with non-zero exit code when command fails.
   * @throws IOException
   */
@Test(timeout = 10000)
public void testShellScriptBuilderNonZeroExitCode() throws IOException {
    ShellScriptBuilder builder = ShellScriptBuilder.create();
    builder.command(Arrays.asList(new String[] { "unknownCommand" }));
    File shellFile = Shell.appendScriptExtension(tmpDir, "testShellScriptBuilderError");
    PrintStream writer = new PrintStream(new FileOutputStream(shellFile));
    builder.write(writer);
    writer.close();
    try {
        FileUtil.setExecutable(shellFile, true);
        Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { shellFile.getAbsolutePath() }, tmpDir);
        try {
            shexc.execute();
            fail("builder shell command was expected to throw");
        } catch (IOException e) {
            // expected
            System.out.println("Received an expected exception: " + e.getMessage());
        }
    } finally {
        FileUtil.fullyDelete(shellFile);
    }
}
Also used : PrintStream(java.io.PrintStream) Shell(org.apache.hadoop.util.Shell) ShellScriptBuilder(org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.ShellScriptBuilder) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) JarFile(java.util.jar.JarFile) File(java.io.File) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 3 with Shell

use of org.apache.hadoop.util.Shell in project hadoop by apache.

the class FileUtil method symLink.

/**
   * Create a soft link between a src and destination
   * only on a local disk. HDFS does not support this.
   * On Windows, when symlink creation fails due to security
   * setting, we will log a warning. The return code in this
   * case is 2.
   *
   * @param target the target for symlink
   * @param linkname the symlink
   * @return 0 on success
   */
public static int symLink(String target, String linkname) throws IOException {
    // Run the input paths through Java's File so that they are converted to the
    // native OS form
    File targetFile = new File(Path.getPathWithoutSchemeAndAuthority(new Path(target)).toString());
    File linkFile = new File(Path.getPathWithoutSchemeAndAuthority(new Path(linkname)).toString());
    String[] cmd = Shell.getSymlinkCommand(targetFile.toString(), linkFile.toString());
    ShellCommandExecutor shExec;
    try {
        if (Shell.WINDOWS && linkFile.getParentFile() != null && !new Path(target).isAbsolute()) {
            // Relative links on Windows must be resolvable at the time of
            // creation. To ensure this we run the shell command in the directory
            // of the link.
            //
            shExec = new ShellCommandExecutor(cmd, linkFile.getParentFile());
        } else {
            shExec = new ShellCommandExecutor(cmd);
        }
        shExec.execute();
    } catch (Shell.ExitCodeException ec) {
        int returnVal = ec.getExitCode();
        if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) {
            LOG.warn("Fail to create symbolic links on Windows. " + "The default security settings in Windows disallow non-elevated " + "administrators and all non-administrators from creating symbolic links. " + "This behavior can be changed in the Local Security Policy management console");
        } else if (returnVal != 0) {
            LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " + returnVal + " with: " + ec.getMessage());
        }
        return returnVal;
    } catch (IOException e) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Error while create symlink " + linkname + " to " + target + "." + " Exception: " + StringUtils.stringifyException(e));
        }
        throw e;
    }
    return shExec.getExitCode();
}
Also used : Shell(org.apache.hadoop.util.Shell) ShellCommandExecutor(org.apache.hadoop.util.Shell.ShellCommandExecutor) IOException(java.io.IOException) ZipFile(java.util.zip.ZipFile) File(java.io.File)

Aggregations

File (java.io.File)3 IOException (java.io.IOException)3 Shell (org.apache.hadoop.util.Shell)3 FileOutputStream (java.io.FileOutputStream)1 PrintStream (java.io.PrintStream)1 JarFile (java.util.jar.JarFile)1 ZipFile (java.util.zip.ZipFile)1 Path (org.apache.hadoop.fs.Path)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 CommandExecutor (org.apache.hadoop.util.Shell.CommandExecutor)1 ShellCommandExecutor (org.apache.hadoop.util.Shell.ShellCommandExecutor)1 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)1 BaseContainerManagerTest (org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest)1 Container (org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container)1 ContainerDiagnosticsUpdateEvent (org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent)1 ShellScriptBuilder (org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.ShellScriptBuilder)1 Test (org.junit.Test)1