Search in sources :

Example 41 with AMRMTokenIdentifier

use of org.apache.hadoop.yarn.security.AMRMTokenIdentifier in project hadoop by apache.

the class TestRMContainerAllocator method testAMRMTokenUpdate.

@Test(timeout = 60000)
public void testAMRMTokenUpdate() throws Exception {
    LOG.info("Running testAMRMTokenUpdate");
    final String rmAddr = "somermaddress:1234";
    final Configuration conf = new YarnConfiguration();
    conf.setLong(YarnConfiguration.RM_AMRM_TOKEN_MASTER_KEY_ROLLING_INTERVAL_SECS, 8);
    conf.setLong(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 2000);
    conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, rmAddr);
    final MyResourceManager rm = new MyResourceManager(conf);
    rm.start();
    AMRMTokenSecretManager secretMgr = rm.getRMContext().getAMRMTokenSecretManager();
    DrainDispatcher dispatcher = (DrainDispatcher) rm.getRMContext().getDispatcher();
    // Submit the application
    RMApp app = rm.submitApp(1024);
    dispatcher.await();
    MockNM amNodeManager = rm.registerNode("amNM:1234", 2048);
    amNodeManager.nodeHeartbeat(true);
    dispatcher.await();
    final ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt().getAppAttemptId();
    final ApplicationId appId = app.getApplicationId();
    rm.sendAMLaunched(appAttemptId);
    dispatcher.await();
    JobId jobId = MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
    final Job mockJob = mock(Job.class);
    when(mockJob.getReport()).thenReturn(MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "jobfile", null, false, ""));
    final Token<AMRMTokenIdentifier> oldToken = rm.getRMContext().getRMApps().get(appId).getRMAppAttempt(appAttemptId).getAMRMToken();
    Assert.assertNotNull("app should have a token", oldToken);
    UserGroupInformation testUgi = UserGroupInformation.createUserForTesting("someuser", new String[0]);
    Token<AMRMTokenIdentifier> newToken = testUgi.doAs(new PrivilegedExceptionAction<Token<AMRMTokenIdentifier>>() {

        @Override
        public Token<AMRMTokenIdentifier> run() throws Exception {
            MyContainerAllocator allocator = new MyContainerAllocator(rm, conf, appAttemptId, mockJob);
            // Keep heartbeating until RM thinks the token has been updated
            Token<AMRMTokenIdentifier> currentToken = oldToken;
            long startTime = Time.monotonicNow();
            while (currentToken == oldToken) {
                if (Time.monotonicNow() - startTime > 20000) {
                    Assert.fail("Took to long to see AMRM token change");
                }
                Thread.sleep(100);
                allocator.schedule();
                currentToken = rm.getRMContext().getRMApps().get(appId).getRMAppAttempt(appAttemptId).getAMRMToken();
            }
            return currentToken;
        }
    });
    // verify there is only one AMRM token in the UGI and it matches the
    // updated token from the RM
    int tokenCount = 0;
    Token<? extends TokenIdentifier> ugiToken = null;
    for (Token<? extends TokenIdentifier> token : testUgi.getTokens()) {
        if (AMRMTokenIdentifier.KIND_NAME.equals(token.getKind())) {
            ugiToken = token;
            ++tokenCount;
        }
    }
    Assert.assertEquals("too many AMRM tokens", 1, tokenCount);
    Assert.assertArrayEquals("token identifier not updated", newToken.getIdentifier(), ugiToken.getIdentifier());
    Assert.assertArrayEquals("token password not updated", newToken.getPassword(), ugiToken.getPassword());
    Assert.assertEquals("AMRM token service not updated", new Text(rmAddr), ugiToken.getService());
}
Also used : DrainDispatcher(org.apache.hadoop.yarn.event.DrainDispatcher) RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) MockNM(org.apache.hadoop.yarn.server.resourcemanager.MockNM) NMToken(org.apache.hadoop.yarn.api.records.NMToken) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) AMRMTokenSecretManager(org.apache.hadoop.yarn.server.resourcemanager.security.AMRMTokenSecretManager) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 42 with AMRMTokenIdentifier

use of org.apache.hadoop.yarn.security.AMRMTokenIdentifier in project hadoop by apache.

the class TestPipeApplication method testPipesReduser.

/**
   * test org.apache.hadoop.mapred.pipes.PipesReducer
   * test the transfer of data: key and value
   *
   * @throws Exception
   */
@Test
public void testPipesReduser() throws Exception {
    File[] psw = cleanTokenPasswordFile();
    JobConf conf = new JobConf();
    try {
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.configure(conf);
        BooleanWritable bw = new BooleanWritable(true);
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        initStdOut(conf);
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(new Counters.Counter(), new Progress());
        Reporter reporter = new TestTaskReporter();
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));
        reducer.reduce(bw, texts.iterator(), output, reporter);
        reducer.close();
        String stdOut = readStdOut(conf);
        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : Reporter(org.apache.hadoop.mapred.Reporter) ArrayList(java.util.ArrayList) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) Counter(org.apache.hadoop.mapred.Counters.Counter) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 43 with AMRMTokenIdentifier

use of org.apache.hadoop.yarn.security.AMRMTokenIdentifier in project hadoop by apache.

the class TestPipeApplication method testApplication.

/**
   * test org.apache.hadoop.mapred.pipes.Application
   * test a internal functions: MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
   *
   * @throws Throwable
   */
@Test
public void testApplication() throws Throwable {
    JobConf conf = new JobConf();
    RecordReader<FloatWritable, NullWritable> rReader = new Reader();
    // client for test
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationStub");
    TestTaskReporter reporter = new TestTaskReporter();
    File[] psw = cleanTokenPasswordFile();
    try {
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        FakeCollector output = new FakeCollector(new Counters.Counter(), new Progress());
        FileSystem fs = new RawLocalFileSystem();
        fs.initialize(FsConstants.LOCAL_FS_URI, conf);
        Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(new Path(workSpace.getAbsolutePath() + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true);
        output.setWriter(wr);
        conf.set(Submitter.PRESERVE_COMMANDFILE, "true");
        initStdOut(conf);
        Application<WritableComparable<IntWritable>, Writable, IntWritable, Text> application = new Application<WritableComparable<IntWritable>, Writable, IntWritable, Text>(conf, rReader, output, reporter, IntWritable.class, Text.class);
        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();
        wr.close();
        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));
        // reporter test counter, and status should be sended
        // test MessageType.REGISTER_COUNTER and INCREMENT_COUNTER
        assertEquals(1.0, reporter.getProgress(), 0.01);
        assertNotNull(reporter.getCounter("group", "name"));
        // test status MessageType.STATUS
        assertEquals(reporter.getStatus(), "PROGRESS");
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        // check MessageType.PROGRESS
        assertEquals(0.55f, rReader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        Entry<IntWritable, Text> entry = output.getCollect().entrySet().iterator().next();
        assertEquals(123, entry.getKey().get());
        assertEquals("value", entry.getValue().toString());
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) RecordReader(org.apache.hadoop.mapred.RecordReader) NullWritable(org.apache.hadoop.io.NullWritable) Writable(org.apache.hadoop.io.Writable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Token(org.apache.hadoop.security.token.Token) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) FileSystem(org.apache.hadoop.fs.FileSystem) RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) NullWritable(org.apache.hadoop.io.NullWritable) Counter(org.apache.hadoop.mapred.Counters.Counter) FloatWritable(org.apache.hadoop.io.FloatWritable) WritableComparable(org.apache.hadoop.io.WritableComparable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) Writer(org.apache.hadoop.mapred.IFile.Writer) Test(org.junit.Test)

Example 44 with AMRMTokenIdentifier

use of org.apache.hadoop.yarn.security.AMRMTokenIdentifier in project hadoop by apache.

the class TestPipeApplication method testRunner.

/**
   * test PipesMapRunner    test the transfer data from reader
   *
   * @throws Exception
   */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
        JobConf conf = new JobConf();
        conf.set(Submitter.IS_JAVA_RR, "true");
        // for stdour and stderror
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(new Counters.Counter(), new Progress());
        FileSystem fs = new RawLocalFileSystem();
        fs.initialize(FsConstants.LOCAL_FS_URI, conf);
        Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(new Path(workSpace + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true);
        output.setWriter(wr);
        // stub for client
        File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        TestTaskReporter reporter = new TestTaskReporter();
        PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();
        initStdOut(conf);
        runner.configure(conf);
        runner.run(rReader, output, reporter);
        String stdOut = readStdOut(conf);
        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) Token(org.apache.hadoop.security.token.Token) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) FileSystem(org.apache.hadoop.fs.FileSystem) RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) NullWritable(org.apache.hadoop.io.NullWritable) Counter(org.apache.hadoop.mapred.Counters.Counter) FloatWritable(org.apache.hadoop.io.FloatWritable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) Writer(org.apache.hadoop.mapred.IFile.Writer) Test(org.junit.Test)

Example 45 with AMRMTokenIdentifier

use of org.apache.hadoop.yarn.security.AMRMTokenIdentifier in project hadoop by apache.

the class AMSimulator method registerAM.

private void registerAM() throws YarnException, IOException, InterruptedException {
    // register application master
    final RegisterApplicationMasterRequest amRegisterRequest = Records.newRecord(RegisterApplicationMasterRequest.class);
    amRegisterRequest.setHost("localhost");
    amRegisterRequest.setRpcPort(1000);
    amRegisterRequest.setTrackingUrl("localhost:1000");
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(appAttemptId.toString());
    Token<AMRMTokenIdentifier> token = rm.getRMContext().getRMApps().get(appId).getRMAppAttempt(appAttemptId).getAMRMToken();
    ugi.addTokenIdentifier(token.decodeIdentifier());
    ugi.doAs(new PrivilegedExceptionAction<RegisterApplicationMasterResponse>() {

        @Override
        public RegisterApplicationMasterResponse run() throws Exception {
            return rm.getApplicationMasterService().registerApplicationMaster(amRegisterRequest);
        }
    });
    LOG.info(MessageFormat.format("Register the application master for application {0}", appId));
}
Also used : AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) RegisterApplicationMasterResponse(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse) RegisterApplicationMasterRequest(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

AMRMTokenIdentifier (org.apache.hadoop.yarn.security.AMRMTokenIdentifier)48 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)21 Text (org.apache.hadoop.io.Text)17 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)17 Test (org.junit.Test)13 IOException (java.io.IOException)12 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)11 Token (org.apache.hadoop.security.token.Token)9 AllocateResponse (org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse)9 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)7 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)7 Credentials (org.apache.hadoop.security.Credentials)6 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)6 MockNM (org.apache.hadoop.yarn.server.resourcemanager.MockNM)6 File (java.io.File)5 ArrayList (java.util.ArrayList)5 Configuration (org.apache.hadoop.conf.Configuration)5 AllocateRequest (org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest)5 NMToken (org.apache.hadoop.yarn.api.records.NMToken)5 Token (org.apache.hadoop.yarn.api.records.Token)5