use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestCredentials method mergeAll.
@Test
public void mergeAll() {
Credentials creds = new Credentials();
creds.addToken(service[0], token[0]);
creds.addToken(service[1], token[1]);
creds.addSecretKey(secret[0], secret[0].getBytes());
creds.addSecretKey(secret[1], secret[1].getBytes());
Credentials credsToAdd = new Credentials();
// one duplicate with different value, one new
credsToAdd.addToken(service[0], token[3]);
credsToAdd.addToken(service[2], token[2]);
credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
creds.mergeAll(credsToAdd);
assertEquals(3, creds.numberOfTokens());
assertEquals(3, creds.numberOfSecretKeys());
// existing token & secret should not be overwritten
assertEquals(token[0], creds.getToken(service[0]));
assertEquals(secret[0], new Text(creds.getSecretKey(secret[0])));
// non-duplicate token & secret should be present
assertEquals(token[1], creds.getToken(service[1]));
assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
// new token & secret should be added
assertEquals(token[2], creds.getToken(service[2]));
assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class MRAppMasterTestLaunchTime method testMRAppMasterCredentials.
@Test
public void testMRAppMasterCredentials() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
// Simulate credentials passed to AM via client->RM->NM
Credentials credentials = new Credentials();
byte[] identifier = "MyIdentifier".getBytes();
byte[] password = "MyPassword".getBytes();
Text kind = new Text("MyTokenKind");
Text service = new Text("host:port");
Token<? extends TokenIdentifier> myToken = new Token<TokenIdentifier>(identifier, password, kind, service);
Text tokenAlias = new Text("myToken");
credentials.addToken(tokenAlias, myToken);
Text appTokenService = new Text("localhost:0");
Token<AMRMTokenIdentifier> appToken = new Token<AMRMTokenIdentifier>(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService);
credentials.addToken(appTokenService, appToken);
Text keyAlias = new Text("mySecretKeyAlias");
credentials.addSecretKey(keyAlias, "mySecretKey".getBytes());
Token<? extends TokenIdentifier> storedToken = credentials.getToken(tokenAlias);
JobConf conf = new JobConf();
Path tokenFilePath = new Path(testDir, "tokens-file");
Map<String, String> newEnv = new HashMap<String, String>();
newEnv.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION, tokenFilePath.toUri().getPath());
setNewEnvironmentHack(newEnv);
credentials.writeTokenStorageFile(tokenFilePath, conf);
ApplicationId appId = ApplicationId.newInstance(12345, 56);
ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(applicationAttemptId, 546);
String userName = UserGroupInformation.getCurrentUser().getShortUserName();
// Create staging dir, so MRAppMaster doesn't barf.
File stagingDir = new File(MRApps.getStagingAreaDir(conf, userName).toString());
stagingDir.mkdirs();
// Set login-user to null as that is how real world MRApp starts with.
// This is null is the reason why token-file is read by UGI.
UserGroupInformation.setLoginUser(null);
MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, true);
MRAppMaster.initAndStartAppMaster(appMaster, conf, userName);
// Now validate the task credentials
Credentials appMasterCreds = appMaster.getCredentials();
Assert.assertNotNull(appMasterCreds);
Assert.assertEquals(1, appMasterCreds.numberOfSecretKeys());
Assert.assertEquals(1, appMasterCreds.numberOfTokens());
// Validate the tokens - app token should not be present
Token<? extends TokenIdentifier> usedToken = appMasterCreds.getToken(tokenAlias);
Assert.assertNotNull(usedToken);
Assert.assertEquals(storedToken, usedToken);
// Validate the keys
byte[] usedKey = appMasterCreds.getSecretKey(keyAlias);
Assert.assertNotNull(usedKey);
Assert.assertEquals("mySecretKey", new String(usedKey));
// The credentials should also be added to conf so that OuputCommitter can
// access it - app token should not be present
Credentials confCredentials = conf.getCredentials();
Assert.assertEquals(1, confCredentials.numberOfSecretKeys());
Assert.assertEquals(1, confCredentials.numberOfTokens());
Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias));
Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias)));
// Verify the AM's ugi - app token should be present
Credentials ugiCredentials = appMaster.getUgi().getCredentials();
Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys());
Assert.assertEquals(2, ugiCredentials.numberOfTokens());
Assert.assertEquals(storedToken, ugiCredentials.getToken(tokenAlias));
Assert.assertEquals(appToken, ugiCredentials.getToken(appTokenService));
Assert.assertEquals("mySecretKey", new String(ugiCredentials.getSecretKey(keyAlias)));
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestJobImpl method testMetaInfoSizeOverMax.
@Test
public void testMetaInfoSizeOverMax() throws Exception {
Configuration conf = new Configuration();
JobID jobID = JobID.forName("job_1234567890000_0001");
JobId jobId = TypeConverter.toYarn(jobID);
MRAppMetrics mrAppMetrics = MRAppMetrics.create();
JobImpl job = new JobImpl(jobId, ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), conf, mock(EventHandler.class), null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, null, true, null, 0, null, null, null, null);
InitTransition initTransition = new InitTransition() {
@Override
protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
throw new YarnRuntimeException(EXCEPTIONMSG);
}
};
JobEvent mockJobEvent = mock(JobEvent.class);
JobStateInternal jobSI = initTransition.transition(job, mockJobEvent);
Assert.assertTrue("When init fails, return value from InitTransition.transition should equal NEW.", jobSI.equals(JobStateInternal.NEW));
Assert.assertTrue("Job diagnostics should contain YarnRuntimeException", job.getDiagnostics().toString().contains("YarnRuntimeException"));
Assert.assertTrue("Job diagnostics should contain " + EXCEPTIONMSG, job.getDiagnostics().toString().contains(EXCEPTIONMSG));
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestJobImpl method testUberDecision.
private boolean testUberDecision(Configuration conf) {
JobID jobID = JobID.forName("job_1234567890000_0001");
JobId jobId = TypeConverter.toYarn(jobID);
MRAppMetrics mrAppMetrics = MRAppMetrics.create();
JobImpl job = new JobImpl(jobId, ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), conf, mock(EventHandler.class), null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, null, true, null, 0, null, null, null, null);
InitTransition initTransition = getInitTransition(2);
JobEvent mockJobEvent = mock(JobEvent.class);
initTransition.transition(job, mockJobEvent);
boolean isUber = job.isUber();
return isUber;
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestTaskAttempt method testAppDiognosticEventOnNewTask.
@Test
public void testAppDiognosticEventOnNewTask() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemorySize()).thenReturn(1024L);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1, splits, jobConf, taListener, new Token(), new Credentials(), SystemClock.getInstance(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(attemptId, "Task got killed"));
assertFalse("InternalError occurred trying to handle TA_DIAGNOSTICS_UPDATE on assigned task", eventHandler.internalError);
}
Aggregations