use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.
the class TestMiniMRProxyUser method testValidProxyUser.
@Test
public void testValidProxyUser() throws Exception {
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
mrRun();
return null;
}
});
}
use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.
the class TestMRJobs method testSleepJobWithSecurityOn.
//@Test (timeout = 60000)
public void testSleepJobWithSecurityOn() throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("\n\n\nStarting testSleepJobWithSecurityOn().");
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
return;
}
mrCluster.getConfig().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
mrCluster.getConfig().set(YarnConfiguration.RM_KEYTAB, "/etc/krb5.keytab");
mrCluster.getConfig().set(YarnConfiguration.NM_KEYTAB, "/etc/krb5.keytab");
mrCluster.getConfig().set(YarnConfiguration.RM_PRINCIPAL, "rm/sightbusy-lx@LOCALHOST");
mrCluster.getConfig().set(YarnConfiguration.NM_PRINCIPAL, "nm/sightbusy-lx@LOCALHOST");
UserGroupInformation.setConfiguration(mrCluster.getConfig());
// Keep it in here instead of after RM/NM as multiple user logins happen in
// the same JVM.
UserGroupInformation user = UserGroupInformation.getCurrentUser();
LOG.info("User name is " + user.getUserName());
for (Token<? extends TokenIdentifier> str : user.getTokens()) {
LOG.info("Token is " + str.encodeToUrlString());
}
user.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
SleepJob sleepJob = new SleepJob();
sleepJob.setConf(mrCluster.getConfig());
Job job = sleepJob.createJob(3, 0, 10000, 1, 0, 0);
// //Job with reduces
// Job job = sleepJob.createJob(3, 2, 10000, 1, 10000, 1);
// The AppMaster jar itself.
job.addFileToClassPath(APP_JAR);
job.submit();
String trackingUrl = job.getTrackingURL();
String jobId = job.getJobID().toString();
job.waitForCompletion(true);
Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
Assert.assertTrue("Tracking URL was " + trackingUrl + " but didn't Match Job ID " + jobId, trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
return null;
}
});
// TODO later: add explicit "isUber()" checks of some sort
}
use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.
the class TestJHSSecurity method getMRClientProtocol.
private MRClientProtocol getMRClientProtocol(Token token, final InetSocketAddress hsAddress, String user, final Configuration conf) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
ugi.addToken(ConverterUtils.convertFromYarn(token, hsAddress));
final YarnRPC rpc = YarnRPC.create(conf);
MRClientProtocol hsWithDT = ugi.doAs(new PrivilegedAction<MRClientProtocol>() {
@Override
public MRClientProtocol run() {
return (MRClientProtocol) rpc.getProxy(HSClientProtocol.class, hsAddress, conf);
}
});
return hsWithDT;
}
use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.
the class TestUmbilicalProtocolWithJobToken method testJobTokenRpc.
@Test
public void testJobTokenRpc() throws Exception {
TaskUmbilicalProtocol mockTT = mock(TaskUmbilicalProtocol.class);
doReturn(TaskUmbilicalProtocol.versionID).when(mockTT).getProtocolVersion(anyString(), anyLong());
doReturn(ProtocolSignature.getProtocolSignature(mockTT, TaskUmbilicalProtocol.class.getName(), TaskUmbilicalProtocol.versionID, 0)).when(mockTT).getProtocolSignature(anyString(), anyLong(), anyInt());
JobTokenSecretManager sm = new JobTokenSecretManager();
final Server server = new RPC.Builder(conf).setProtocol(TaskUmbilicalProtocol.class).setInstance(mockTT).setBindAddress(ADDRESS).setPort(0).setNumHandlers(5).setVerbose(true).setSecretManager(sm).build();
server.start();
final UserGroupInformation current = UserGroupInformation.getCurrentUser();
final InetSocketAddress addr = NetUtils.getConnectAddress(server);
String jobId = current.getUserName();
JobTokenIdentifier tokenId = new JobTokenIdentifier(new Text(jobId));
Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(tokenId, sm);
sm.addTokenForJob(jobId, token);
SecurityUtil.setTokenService(token, addr);
LOG.info("Service address for token is " + token.getService());
current.addToken(token);
current.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
TaskUmbilicalProtocol proxy = null;
try {
proxy = (TaskUmbilicalProtocol) RPC.getProxy(TaskUmbilicalProtocol.class, TaskUmbilicalProtocol.versionID, addr, conf);
proxy.statusUpdate(null, null);
} finally {
server.stop();
if (proxy != null) {
RPC.stopProxy(proxy);
}
}
return null;
}
});
}
use of org.apache.hadoop.security.UserGroupInformation in project hadoop by apache.
the class Gridmix method run.
public int run(final String[] argv) throws IOException, InterruptedException {
int val = -1;
final Configuration conf = getConf();
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
val = ugi.doAs(new PrivilegedExceptionAction<Integer>() {
public Integer run() throws Exception {
return runJob(conf, argv);
}
});
// print the gridmix summary if the run was successful
if (val == 0) {
// print the run summary
System.out.print("\n\n");
System.out.println(summarizer.toString());
}
return val;
}
Aggregations