Search in sources :

Example 11 with MRClientProtocol

use of org.apache.hadoop.mapreduce.v2.api.MRClientProtocol in project hadoop by apache.

the class TestJHSSecurity method getMRClientProtocol.

private MRClientProtocol getMRClientProtocol(Token token, final InetSocketAddress hsAddress, String user, final Configuration conf) {
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
    ugi.addToken(ConverterUtils.convertFromYarn(token, hsAddress));
    final YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol hsWithDT = ugi.doAs(new PrivilegedAction<MRClientProtocol>() {

        @Override
        public MRClientProtocol run() {
            return (MRClientProtocol) rpc.getProxy(HSClientProtocol.class, hsAddress, conf);
        }
    });
    return hsWithDT;
}
Also used : YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)

Example 12 with MRClientProtocol

use of org.apache.hadoop.mapreduce.v2.api.MRClientProtocol in project hadoop by apache.

the class TestRPCFactories method testPbClientFactory.

private void testPbClientFactory() {
    InetSocketAddress addr = new InetSocketAddress(0);
    System.err.println(addr.getHostName() + addr.getPort());
    Configuration conf = new Configuration();
    MRClientProtocol instance = new MRClientProtocolTestImpl();
    Server server = null;
    try {
        server = RpcServerFactoryPBImpl.get().getServer(MRClientProtocol.class, instance, addr, conf, null, 1);
        server.start();
        System.err.println(server.getListenerAddress());
        System.err.println(NetUtils.getConnectAddress(server));
        MRClientProtocol client = null;
        try {
            client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf);
        } catch (YarnRuntimeException e) {
            e.printStackTrace();
            Assert.fail("Failed to crete client");
        }
    } catch (YarnRuntimeException e) {
        e.printStackTrace();
        Assert.fail("Failed to crete server");
    } finally {
        server.stop();
    }
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.ipc.Server) InetSocketAddress(java.net.InetSocketAddress) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)

Example 13 with MRClientProtocol

use of org.apache.hadoop.mapreduce.v2.api.MRClientProtocol in project hadoop by apache.

the class MRDelegationTokenRenewer method renew.

@Override
public long renew(Token<?> token, Configuration conf) throws IOException, InterruptedException {
    org.apache.hadoop.yarn.api.records.Token dToken = org.apache.hadoop.yarn.api.records.Token.newInstance(token.getIdentifier(), token.getKind().toString(), token.getPassword(), token.getService().toString());
    MRClientProtocol histProxy = instantiateHistoryProxy(conf, SecurityUtil.getTokenServiceAddr(token));
    try {
        RenewDelegationTokenRequest request = Records.newRecord(RenewDelegationTokenRequest.class);
        request.setDelegationToken(dToken);
        return histProxy.renewDelegationToken(request).getNextExpirationTime();
    } finally {
        stopHistoryProxy(histProxy);
    }
}
Also used : RenewDelegationTokenRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)

Example 14 with MRClientProtocol

use of org.apache.hadoop.mapreduce.v2.api.MRClientProtocol in project hadoop by apache.

the class MRDelegationTokenRenewer method instantiateHistoryProxy.

protected MRClientProtocol instantiateHistoryProxy(final Configuration conf, final InetSocketAddress hsAddress) throws IOException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("Connecting to MRHistoryServer at: " + hsAddress);
    }
    final YarnRPC rpc = YarnRPC.create(conf);
    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
    return currentUser.doAs(new PrivilegedAction<MRClientProtocol>() {

        @Override
        public MRClientProtocol run() {
            return (MRClientProtocol) rpc.getProxy(HSClientProtocol.class, hsAddress, conf);
        }
    });
}
Also used : YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)

Example 15 with MRClientProtocol

use of org.apache.hadoop.mapreduce.v2.api.MRClientProtocol in project hadoop by apache.

the class TestMRClientService method testViewAclOnlyCannotModify.

@Test
public void testViewAclOnlyCannotModify() throws Exception {
    final MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
    final Configuration conf = new Configuration();
    conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
    conf.set(MRJobConfig.JOB_ACL_VIEW_JOB, "viewonlyuser");
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
    Iterator<Task> it = job.getTasks().values().iterator();
    Task task = it.next();
    app.waitForState(task, TaskState.RUNNING);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    app.waitForState(attempt, TaskAttemptState.RUNNING);
    UserGroupInformation viewOnlyUser = UserGroupInformation.createUserForTesting("viewonlyuser", new String[] {});
    Assert.assertTrue("viewonlyuser cannot view job", job.checkAccess(viewOnlyUser, JobACL.VIEW_JOB));
    Assert.assertFalse("viewonlyuser can modify job", job.checkAccess(viewOnlyUser, JobACL.MODIFY_JOB));
    MRClientProtocol client = viewOnlyUser.doAs(new PrivilegedExceptionAction<MRClientProtocol>() {

        @Override
        public MRClientProtocol run() throws Exception {
            YarnRPC rpc = YarnRPC.create(conf);
            return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class, app.clientService.getBindAddress(), conf);
        }
    });
    KillJobRequest killJobRequest = recordFactory.newRecordInstance(KillJobRequest.class);
    killJobRequest.setJobId(app.getJobId());
    try {
        client.killJob(killJobRequest);
        fail("viewonlyuser killed job");
    } catch (AccessControlException e) {
    // pass
    }
    KillTaskRequest killTaskRequest = recordFactory.newRecordInstance(KillTaskRequest.class);
    killTaskRequest.setTaskId(task.getID());
    try {
        client.killTask(killTaskRequest);
        fail("viewonlyuser killed task");
    } catch (AccessControlException e) {
    // pass
    }
    KillTaskAttemptRequest killTaskAttemptRequest = recordFactory.newRecordInstance(KillTaskAttemptRequest.class);
    killTaskAttemptRequest.setTaskAttemptId(attempt.getID());
    try {
        client.killTaskAttempt(killTaskAttemptRequest);
        fail("viewonlyuser killed task attempt");
    } catch (AccessControlException e) {
    // pass
    }
    FailTaskAttemptRequest failTaskAttemptRequest = recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
    failTaskAttemptRequest.setTaskAttemptId(attempt.getID());
    try {
        client.failTaskAttempt(failTaskAttemptRequest);
        fail("viewonlyuser killed task attempt");
    } catch (AccessControlException e) {
    // pass
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) FailTaskAttemptRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest) AccessControlException(org.apache.hadoop.security.AccessControlException) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) KillTaskAttemptRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest) KillJobRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest) KillTaskRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

MRClientProtocol (org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)28 Test (org.junit.Test)16 IOException (java.io.IOException)13 Configuration (org.apache.hadoop.conf.Configuration)10 InetSocketAddress (java.net.InetSocketAddress)8 JobStatus (org.apache.hadoop.mapreduce.JobStatus)7 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)7 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 YarnRPC (org.apache.hadoop.yarn.ipc.YarnRPC)6 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)5 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)5 GetDelegationTokenRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest)4 Text (org.apache.hadoop.io.Text)3 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)3 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)3 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 Server (org.apache.hadoop.ipc.Server)2 GetDelegationTokenResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse)2 GetDiagnosticsRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest)2