Search in sources :

Example 1 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerSchedulerQueuing method createContainerExecutor.

@Override
protected ContainerExecutor createContainerExecutor() {
    DefaultContainerExecutor exec = new DefaultContainerExecutor() {

        @Override
        public int launchContainer(ContainerStartContext ctx) throws IOException {
            if (delayContainers) {
                try {
                    Thread.sleep(10000);
                } catch (InterruptedException e) {
                // Nothing..
                }
            }
            return super.launchContainer(ctx);
        }
    };
    exec.setConf(conf);
    return spy(exec);
}
Also used : DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) ContainerStartContext(org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext)

Example 2 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testWriteEnvExport.

@Test(timeout = 20000)
public void testWriteEnvExport() throws Exception {
    // Valid only for unix
    assumeNotWindows();
    File shellFile = Shell.appendScriptExtension(tmpDir, "hello");
    Map<String, String> env = new HashMap<String, String>();
    env.put("HADOOP_COMMON_HOME", "/opt/hadoopcommon");
    env.put("HADOOP_MAPRED_HOME", "/opt/hadoopbuild");
    Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
    FileOutputStream fos = new FileOutputStream(shellFile);
    List<String> commands = new ArrayList<String>();
    DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
    YarnConfiguration conf = new YarnConfiguration();
    conf.set(YarnConfiguration.NM_ENV_WHITELIST, "HADOOP_MAPRED_HOME,HADOOP_YARN_HOME");
    defaultContainerExecutor.setConf(conf);
    defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
    String shellContent = new String(Files.readAllBytes(Paths.get(shellFile.getAbsolutePath())), StandardCharsets.UTF_8);
    Assert.assertTrue(shellContent.contains("export HADOOP_COMMON_HOME=\"/opt/hadoopcommon\""));
    // Not available in env and whitelist
    Assert.assertTrue(shellContent.contains("export HADOOP_MAPRED_HOME=" + "${HADOOP_MAPRED_HOME:-\"/opt/hadoopbuild\"}"));
    // Not available in env but in whitelist
    Assert.assertFalse(shellContent.contains("HADOOP_YARN_HOME"));
    fos.flush();
    fos.close();
}
Also used : Path(org.apache.hadoop.fs.Path) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) HashMap(java.util.HashMap) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) ArrayList(java.util.ArrayList) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 3 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testInvalidSymlinkDiagnostics.

// test the diagnostics are generated
@Test(timeout = 20000)
public void testInvalidSymlinkDiagnostics() throws IOException {
    File shellFile = null;
    File tempFile = null;
    String symLink = Shell.WINDOWS ? "test.cmd" : "test";
    File symLinkFile = null;
    try {
        shellFile = Shell.appendScriptExtension(tmpDir, "hello");
        tempFile = Shell.appendScriptExtension(tmpDir, "temp");
        String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\"";
        PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
        FileUtil.setExecutable(shellFile, true);
        writer.println(timeoutCommand);
        writer.close();
        Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
        //This is an invalid path and should throw exception because of No such file.
        Path invalidPath = new Path(shellFile.getAbsolutePath() + "randomPath");
        resources.put(invalidPath, Arrays.asList(symLink));
        FileOutputStream fos = new FileOutputStream(tempFile);
        Map<String, String> env = new HashMap<String, String>();
        List<String> commands = new ArrayList<String>();
        if (Shell.WINDOWS) {
            commands.add("cmd");
            commands.add("/c");
            commands.add("\"" + symLink + "\"");
        } else {
            commands.add("/bin/sh ./\\\"" + symLink + "\\\"");
        }
        DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
        defaultContainerExecutor.setConf(new YarnConfiguration());
        defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
        fos.flush();
        fos.close();
        FileUtil.setExecutable(tempFile, true);
        Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { tempFile.getAbsolutePath() }, tmpDir);
        String diagnostics = null;
        try {
            shexc.execute();
            Assert.fail("Should catch exception");
        } catch (ExitCodeException e) {
            diagnostics = e.getMessage();
        }
        Assert.assertNotNull(diagnostics);
        Assert.assertTrue(shexc.getExitCode() != 0);
        symLinkFile = new File(tmpDir, symLink);
    } finally {
        // cleanup
        if (shellFile != null && shellFile.exists()) {
            shellFile.delete();
        }
        if (tempFile != null && tempFile.exists()) {
            tempFile.delete();
        }
        if (symLinkFile != null && symLinkFile.exists()) {
            symLinkFile.delete();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) Shell(org.apache.hadoop.util.Shell) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) PrintWriter(java.io.PrintWriter) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 4 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testSpecialCharSymlinks.

@Test
public void testSpecialCharSymlinks() throws IOException {
    File shellFile = null;
    File tempFile = null;
    String badSymlink = Shell.WINDOWS ? "foo@zz_#!-+bar.cmd" : "foo@zz%_#*&!-+= bar()";
    File symLinkFile = null;
    try {
        shellFile = Shell.appendScriptExtension(tmpDir, "hello");
        tempFile = Shell.appendScriptExtension(tmpDir, "temp");
        String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\"";
        PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
        FileUtil.setExecutable(shellFile, true);
        writer.println(timeoutCommand);
        writer.close();
        Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
        Path path = new Path(shellFile.getAbsolutePath());
        resources.put(path, Arrays.asList(badSymlink));
        FileOutputStream fos = new FileOutputStream(tempFile);
        Map<String, String> env = new HashMap<String, String>();
        List<String> commands = new ArrayList<String>();
        if (Shell.WINDOWS) {
            commands.add("cmd");
            commands.add("/c");
            commands.add("\"" + badSymlink + "\"");
        } else {
            commands.add("/bin/sh ./\\\"" + badSymlink + "\\\"");
        }
        DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
        defaultContainerExecutor.setConf(new YarnConfiguration());
        defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user", tempFile.getName());
        fos.flush();
        fos.close();
        FileUtil.setExecutable(tempFile, true);
        Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { tempFile.getAbsolutePath() }, tmpDir);
        shexc.execute();
        assertEquals(shexc.getExitCode(), 0);
        assert (shexc.getOutput().contains("hello"));
        symLinkFile = new File(tmpDir, badSymlink);
    } finally {
        // cleanup
        if (shellFile != null && shellFile.exists()) {
            shellFile.delete();
        }
        if (tempFile != null && tempFile.exists()) {
            tempFile.delete();
        }
        if (symLinkFile != null && symLinkFile.exists()) {
            symLinkFile.delete();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) Shell(org.apache.hadoop.util.Shell) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) PrintWriter(java.io.PrintWriter) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 5 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class BaseContainerManagerTest method createContainerExecutor.

protected ContainerExecutor createContainerExecutor() {
    DefaultContainerExecutor exec = new DefaultContainerExecutor();
    exec.setConf(conf);
    return spy(exec);
}
Also used : DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor)

Aggregations

DefaultContainerExecutor (org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor)8 File (java.io.File)6 FileOutputStream (java.io.FileOutputStream)6 ArrayList (java.util.ArrayList)6 HashMap (java.util.HashMap)6 List (java.util.List)6 JarFile (java.util.jar.JarFile)6 Path (org.apache.hadoop.fs.Path)6 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 BaseContainerManagerTest (org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest)6 Test (org.junit.Test)6 Shell (org.apache.hadoop.util.Shell)5 PrintWriter (java.io.PrintWriter)4 ExitCodeException (org.apache.hadoop.util.Shell.ExitCodeException)3 ContainerExecutor (org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor)2 Configuration (org.apache.hadoop.conf.Configuration)1 ContainerStartContext (org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext)1