Search in sources :

Example 6 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testContainerLaunchStdoutAndStderrDiagnostics.

@Test(timeout = 20000)
public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {
    File shellFile = null;
    try {
        shellFile = Shell.appendScriptExtension(tmpDir, "hello");
        // echo "hello" to stdout and "error" to stderr and exit code with 2;
        String command = Shell.WINDOWS ? "@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" : "echo \"hello\"; echo \"error\" 1>&2; exit 2;";
        PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
        FileUtil.setExecutable(shellFile, true);
        writer.println(command);
        writer.close();
        Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
        FileOutputStream fos = new FileOutputStream(shellFile, true);
        Map<String, String> env = new HashMap<String, String>();
        List<String> commands = new ArrayList<String>();
        commands.add(command);
        ContainerExecutor exec = new DefaultContainerExecutor();
        exec.setConf(new YarnConfiguration());
        exec.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
        fos.flush();
        fos.close();
        Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { shellFile.getAbsolutePath() }, tmpDir);
        String diagnostics = null;
        try {
            shexc.execute();
            Assert.fail("Should catch exception");
        } catch (ExitCodeException e) {
            diagnostics = e.getMessage();
        }
        // test stderr
        Assert.assertTrue(diagnostics.contains("error"));
        // test stdout
        Assert.assertTrue(shexc.getOutput().contains("hello"));
        Assert.assertTrue(shexc.getExitCode() == 2);
    } finally {
        // cleanup
        if (shellFile != null && shellFile.exists()) {
            shellFile.delete();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) ContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) Shell(org.apache.hadoop.util.Shell) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) PrintWriter(java.io.PrintWriter) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 7 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testDebuggingInformation.

@Test
public void testDebuggingInformation() throws IOException {
    File shellFile = null;
    File tempFile = null;
    Configuration conf = new YarnConfiguration();
    try {
        shellFile = Shell.appendScriptExtension(tmpDir, "hello");
        tempFile = Shell.appendScriptExtension(tmpDir, "temp");
        String testCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\"";
        PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
        FileUtil.setExecutable(shellFile, true);
        writer.println(testCommand);
        writer.close();
        Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
        Map<String, String> env = new HashMap<String, String>();
        List<String> commands = new ArrayList<String>();
        if (Shell.WINDOWS) {
            commands.add("cmd");
            commands.add("/c");
            commands.add("\"" + shellFile.getAbsolutePath() + "\"");
        } else {
            commands.add("/bin/sh \\\"" + shellFile.getAbsolutePath() + "\\\"");
        }
        boolean[] debugLogsExistArray = { false, true };
        for (boolean debugLogsExist : debugLogsExistArray) {
            conf.setBoolean(YarnConfiguration.NM_LOG_CONTAINER_DEBUG_INFO, debugLogsExist);
            FileOutputStream fos = new FileOutputStream(tempFile);
            ContainerExecutor exec = new DefaultContainerExecutor();
            exec.setConf(conf);
            exec.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user", tempFile.getName());
            fos.flush();
            fos.close();
            FileUtil.setExecutable(tempFile, true);
            Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { tempFile.getAbsolutePath() }, tmpDir);
            shexc.execute();
            assertEquals(shexc.getExitCode(), 0);
            File directorInfo = new File(localLogDir, ContainerExecutor.DIRECTORY_CONTENTS);
            File scriptCopy = new File(localLogDir, tempFile.getName());
            Assert.assertEquals("Directory info file missing", debugLogsExist, directorInfo.exists());
            Assert.assertEquals("Copy of launch script missing", debugLogsExist, scriptCopy.exists());
            if (debugLogsExist) {
                Assert.assertTrue("Directory info file size is 0", directorInfo.length() > 0);
                Assert.assertTrue("Size of copy of launch script is 0", scriptCopy.length() > 0);
            }
        }
    } finally {
        // cleanup
        if (shellFile != null && shellFile.exists()) {
            shellFile.delete();
        }
        if (tempFile != null && tempFile.exists()) {
            tempFile.delete();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) ContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) Shell(org.apache.hadoop.util.Shell) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) PrintWriter(java.io.PrintWriter) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 8 with DefaultContainerExecutor

use of org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor in project hadoop by apache.

the class TestContainerLaunch method testInvalidEnvSyntaxDiagnostics.

@Test(timeout = 20000)
public void testInvalidEnvSyntaxDiagnostics() throws IOException {
    File shellFile = null;
    try {
        shellFile = Shell.appendScriptExtension(tmpDir, "hello");
        Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
        FileOutputStream fos = new FileOutputStream(shellFile);
        FileUtil.setExecutable(shellFile, true);
        Map<String, String> env = new HashMap<String, String>();
        // invalid env
        env.put("APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," + "\"workflowName\":\"\n\ninsert table " + "\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, ");
        List<String> commands = new ArrayList<String>();
        DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
        defaultContainerExecutor.setConf(new YarnConfiguration());
        defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
        fos.flush();
        fos.close();
        // It is supposed that LANG is set as C.
        Map<String, String> cmdEnv = new HashMap<String, String>();
        cmdEnv.put("LANG", "C");
        Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { shellFile.getAbsolutePath() }, tmpDir, cmdEnv);
        String diagnostics = null;
        try {
            shexc.execute();
            Assert.fail("Should catch exception");
        } catch (ExitCodeException e) {
            diagnostics = e.getMessage();
        }
        Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ? "is not recognized as an internal or external command" : "command not found"));
        Assert.assertTrue(shexc.getExitCode() != 0);
    } finally {
        // cleanup
        if (shellFile != null && shellFile.exists()) {
            shellFile.delete();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException) DefaultContainerExecutor(org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor) Shell(org.apache.hadoop.util.Shell) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) FileOutputStream(java.io.FileOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) JarFile(java.util.jar.JarFile) File(java.io.File) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Aggregations

DefaultContainerExecutor (org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor)8 File (java.io.File)6 FileOutputStream (java.io.FileOutputStream)6 ArrayList (java.util.ArrayList)6 HashMap (java.util.HashMap)6 List (java.util.List)6 JarFile (java.util.jar.JarFile)6 Path (org.apache.hadoop.fs.Path)6 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 BaseContainerManagerTest (org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest)6 Test (org.junit.Test)6 Shell (org.apache.hadoop.util.Shell)5 PrintWriter (java.io.PrintWriter)4 ExitCodeException (org.apache.hadoop.util.Shell.ExitCodeException)3 ContainerExecutor (org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor)2 Configuration (org.apache.hadoop.conf.Configuration)1 ContainerStartContext (org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext)1