Search in sources :

Example 66 with ApplicationSubmissionContext

use of org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext in project hadoop by apache.

the class TestYARNRunner method testAMAdminCommandOpts.

@Test(timeout = 20000)
public void testAMAdminCommandOpts() throws Exception {
    JobConf jobConf = new JobConf();
    jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true");
    jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m");
    YARNRunner yarnRunner = new YARNRunner(jobConf);
    ApplicationSubmissionContext submissionContext = buildSubmitContext(yarnRunner, jobConf);
    ContainerLaunchContext containerSpec = submissionContext.getAMContainerSpec();
    List<String> commands = containerSpec.getCommands();
    int index = 0;
    int adminIndex = 0;
    int adminPos = -1;
    int userIndex = 0;
    int userPos = -1;
    int tmpDirPos = -1;
    for (String command : commands) {
        if (command != null) {
            assertFalse("Profiler should be disabled by default", command.contains(PROFILE_PARAMS));
            adminPos = command.indexOf("-Djava.net.preferIPv4Stack=true");
            if (adminPos >= 0)
                adminIndex = index;
            userPos = command.indexOf("-Xmx1024m");
            if (userPos >= 0)
                userIndex = index;
            tmpDirPos = command.indexOf("-Djava.io.tmpdir=");
        }
        index++;
    }
    // Check java.io.tmpdir opts are set in the commands
    assertTrue("java.io.tmpdir is not set for AM", tmpDirPos > 0);
    // Check both admin java opts and user java opts are in the commands
    assertTrue("AM admin command opts not in the commands.", adminPos > 0);
    assertTrue("AM user command opts not in the commands.", userPos > 0);
    // Check the admin java opts is before user java opts in the commands
    if (adminIndex == userIndex) {
        assertTrue("AM admin command opts is after user command opts.", adminPos < userPos);
    } else {
        assertTrue("AM admin command opts is after user command opts.", adminIndex < userIndex);
    }
}
Also used : ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) Test(org.junit.Test)

Example 67 with ApplicationSubmissionContext

use of org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext in project hadoop by apache.

the class TestYARNRunner method testNodeLabelExp.

@Test
public void testNodeLabelExp() throws Exception {
    JobConf jobConf = new JobConf();
    jobConf.set(MRJobConfig.JOB_NODE_LABEL_EXP, "GPU");
    jobConf.set(MRJobConfig.AM_NODE_LABEL_EXP, "highMem");
    YARNRunner yarnRunner = new YARNRunner(jobConf);
    ApplicationSubmissionContext appSubCtx = buildSubmitContext(yarnRunner, jobConf);
    assertEquals(appSubCtx.getNodeLabelExpression(), "GPU");
    assertEquals(appSubCtx.getAMContainerResourceRequest().getNodeLabelExpression(), "highMem");
}
Also used : ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) Test(org.junit.Test)

Example 68 with ApplicationSubmissionContext

use of org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext in project hadoop by apache.

the class TestYARNRunner method testSendJobConf.

// Test configs that match regex expression should be set in
// containerLaunchContext
@Test
public void testSendJobConf() throws IOException {
    JobConf jobConf = new JobConf();
    jobConf.set("dfs.nameservices", "mycluster1,mycluster2");
    jobConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
    jobConf.set("dfs.namenode.rpc-address.mycluster2.nn2", "123.0.0.2");
    jobConf.set("dfs.ha.namenodes.mycluster2", "nn1,nn2");
    jobConf.set("dfs.client.failover.proxy.provider.mycluster2", "provider");
    jobConf.set("hadoop.tmp.dir", "testconfdir");
    jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    jobConf.set("mapreduce.job.send-token-conf", "dfs.nameservices|^dfs.namenode.rpc-address.*$|^dfs.ha.namenodes.*$" + "|^dfs.client.failover.proxy.provider.*$" + "|dfs.namenode.kerberos.principal");
    UserGroupInformation.setConfiguration(jobConf);
    YARNRunner yarnRunner = new YARNRunner(jobConf);
    ApplicationSubmissionContext submissionContext = buildSubmitContext(yarnRunner, jobConf);
    Configuration confSent = BuilderUtils.parseTokensConf(submissionContext);
    // configs that match regex should be included
    Assert.assertTrue(confSent.get("dfs.namenode.rpc-address.mycluster2.nn1").equals("123.0.0.1"));
    Assert.assertTrue(confSent.get("dfs.namenode.rpc-address.mycluster2.nn2").equals("123.0.0.2"));
    // configs that aren't matching regex should not be included
    Assert.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent.get("hadoop.tmp.dir").equals("testconfdir"));
    UserGroupInformation.reset();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) Test(org.junit.Test)

Example 69 with ApplicationSubmissionContext

use of org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext in project hadoop by apache.

the class TestYARNRunner method testAMStandardEnv.

private void testAMStandardEnv(boolean customLibPath) throws Exception {
    // the Windows behavior is different and this test currently doesn't really
    // apply
    // MAPREDUCE-6588 should revisit this test
    assumeNotWindows();
    final String ADMIN_LIB_PATH = "foo";
    final String USER_LIB_PATH = "bar";
    final String USER_SHELL = "shell";
    JobConf jobConf = new JobConf();
    String pathKey = Environment.LD_LIBRARY_PATH.name();
    if (customLibPath) {
        jobConf.set(MRJobConfig.MR_AM_ADMIN_USER_ENV, pathKey + "=" + ADMIN_LIB_PATH);
        jobConf.set(MRJobConfig.MR_AM_ENV, pathKey + "=" + USER_LIB_PATH);
    }
    jobConf.set(MRJobConfig.MAPRED_ADMIN_USER_SHELL, USER_SHELL);
    YARNRunner yarnRunner = new YARNRunner(jobConf);
    ApplicationSubmissionContext appSubCtx = buildSubmitContext(yarnRunner, jobConf);
    // make sure PWD is first in the lib path
    ContainerLaunchContext clc = appSubCtx.getAMContainerSpec();
    Map<String, String> env = clc.getEnvironment();
    String libPath = env.get(pathKey);
    assertNotNull(pathKey + " not set", libPath);
    String cps = jobConf.getBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM) ? ApplicationConstants.CLASS_PATH_SEPARATOR : File.pathSeparator;
    String expectedLibPath = MRApps.crossPlatformifyMREnv(conf, Environment.PWD);
    if (customLibPath) {
        // append admin libpath and user libpath
        expectedLibPath += cps + ADMIN_LIB_PATH + cps + USER_LIB_PATH;
    } else {
        expectedLibPath += cps + MRJobConfig.DEFAULT_MR_AM_ADMIN_USER_ENV.substring(pathKey.length() + 1);
    }
    assertEquals("Bad AM " + pathKey + " setting", expectedLibPath, libPath);
    // make sure SHELL is set
    String shell = env.get(Environment.SHELL.name());
    assertNotNull("SHELL not set", shell);
    assertEquals("Bad SHELL setting", USER_SHELL, shell);
}
Also used : ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)

Example 70 with ApplicationSubmissionContext

use of org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext in project hadoop by apache.

the class TestYARNRunner method testWarnCommandOpts.

@Test(timeout = 20000)
public void testWarnCommandOpts() throws Exception {
    Logger logger = Logger.getLogger(YARNRunner.class);
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    Layout layout = new SimpleLayout();
    Appender appender = new WriterAppender(layout, bout);
    logger.addAppender(appender);
    JobConf jobConf = new JobConf();
    jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true -Djava.library.path=foo");
    jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m -Djava.library.path=bar");
    YARNRunner yarnRunner = new YARNRunner(jobConf);
    @SuppressWarnings("unused") ApplicationSubmissionContext submissionContext = buildSubmitContext(yarnRunner, jobConf);
    String logMsg = bout.toString();
    assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + "yarn.app.mapreduce.am.admin-command-opts can cause programs to no " + "longer function if hadoop native libraries are used. These values " + "should be set as part of the LD_LIBRARY_PATH in the app master JVM " + "env using yarn.app.mapreduce.am.admin.user.env config settings."));
    assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + "yarn.app.mapreduce.am.command-opts can cause programs to no longer " + "function if hadoop native libraries are used. These values should " + "be set as part of the LD_LIBRARY_PATH in the app master JVM env " + "using yarn.app.mapreduce.am.env config settings."));
}
Also used : WriterAppender(org.apache.log4j.WriterAppender) Appender(org.apache.log4j.Appender) SimpleLayout(org.apache.log4j.SimpleLayout) SimpleLayout(org.apache.log4j.SimpleLayout) Layout(org.apache.log4j.Layout) ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) ByteArrayOutputStream(java.io.ByteArrayOutputStream) WriterAppender(org.apache.log4j.WriterAppender) Logger(org.apache.log4j.Logger) Test(org.junit.Test)

Aggregations

ApplicationSubmissionContext (org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)86 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)46 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)42 Test (org.junit.Test)29 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)22 Resource (org.apache.hadoop.yarn.api.records.Resource)21 IOException (java.io.IOException)18 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)18 Configuration (org.apache.hadoop.conf.Configuration)15 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)15 Priority (org.apache.hadoop.yarn.api.records.Priority)15 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)15 ByteBuffer (java.nio.ByteBuffer)14 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)14 SubmitApplicationRequest (org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest)13 YarnClientApplication (org.apache.hadoop.yarn.client.api.YarnClientApplication)13 RMAppAttemptMetrics (org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics)13 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)12 RMAppAttemptImpl (org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl)11 ArrayList (java.util.ArrayList)10