use of org.apache.hadoop.yarn.client.api.TimelineClient in project hadoop by apache.
the class TestYarnClient method testBestEffortTimelineDelegationToken.
@Test
public void testBestEffortTimelineDelegationToken() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
YarnClientImpl client = spy(new YarnClientImpl() {
@Override
TimelineClient createTimelineClient() throws IOException, YarnException {
timelineClient = mock(TimelineClient.class);
when(timelineClient.getDelegationToken(any(String.class))).thenThrow(new IOException("Best effort test exception"));
return timelineClient;
}
});
client.init(conf);
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT, true);
client.serviceInit(conf);
client.getTimelineDelegationToken();
try {
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT, false);
client.serviceInit(conf);
client.getTimelineDelegationToken();
Assert.fail("Get delegation token should have thrown an exception");
} catch (Exception e) {
// Success
}
}
use of org.apache.hadoop.yarn.client.api.TimelineClient in project hadoop by apache.
the class TestYarnClient method testAutomaticTimelineDelegationTokenLoading.
@Test
public void testAutomaticTimelineDelegationTokenLoading() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
TimelineDelegationTokenIdentifier timelineDT = new TimelineDelegationTokenIdentifier();
final Token<TimelineDelegationTokenIdentifier> dToken = new Token<TimelineDelegationTokenIdentifier>(timelineDT.getBytes(), new byte[0], timelineDT.getKind(), new Text());
// create a mock client
YarnClientImpl client = spy(new YarnClientImpl() {
@Override
TimelineClient createTimelineClient() throws IOException, YarnException {
timelineClient = mock(TimelineClient.class);
when(timelineClient.getDelegationToken(any(String.class))).thenReturn(dToken);
return timelineClient;
}
@Override
protected void serviceStart() throws Exception {
rmClient = mock(ApplicationClientProtocol.class);
}
@Override
protected void serviceStop() throws Exception {
}
@Override
public ApplicationReport getApplicationReport(ApplicationId appId) {
ApplicationReport report = mock(ApplicationReport.class);
when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.RUNNING);
return report;
}
@Override
public boolean isSecurityEnabled() {
return true;
}
});
client.init(conf);
client.start();
try {
// when i == 1, timeline DT doesn't exist, need to get one more
for (int i = 0; i < 2; ++i) {
ApplicationSubmissionContext context = mock(ApplicationSubmissionContext.class);
ApplicationId applicationId = ApplicationId.newInstance(0, i + 1);
when(context.getApplicationId()).thenReturn(applicationId);
DataOutputBuffer dob = new DataOutputBuffer();
Credentials credentials = new Credentials();
if (i == 0) {
credentials.addToken(client.timelineService, dToken);
}
credentials.writeTokenStorageToStream(dob);
ByteBuffer tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
ContainerLaunchContext clc = ContainerLaunchContext.newInstance(null, null, null, null, tokens, null);
when(context.getAMContainerSpec()).thenReturn(clc);
client.submitApplication(context);
if (i == 0) {
// GetTimelineDelegationToken shouldn't be called
verify(client, never()).getTimelineDelegationToken();
}
// In either way, token should be there
credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
Collection<Token<? extends TokenIdentifier>> dTokens = credentials.getAllTokens();
Assert.assertEquals(1, dTokens.size());
Assert.assertEquals(dToken, dTokens.iterator().next());
}
} finally {
client.stop();
}
}
use of org.apache.hadoop.yarn.client.api.TimelineClient in project hadoop by apache.
the class JobHistoryFileReplayMapperV1 method map.
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
// collect the apps it needs to process
TimelineClient tlc = new TimelineClientImpl();
TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
int replayMode = helper.getReplayMode();
Collection<JobFiles> jobs = helper.getJobFiles();
JobHistoryFileParser parser = helper.getParser();
if (jobs.isEmpty()) {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
} else {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
}
for (JobFiles job : jobs) {
// process each job
String jobIdStr = job.getJobId();
LOG.info("processing " + jobIdStr + "...");
JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
ApplicationId appId = jobId.getAppId();
try {
// parse the job info and configuration
Path historyFilePath = job.getJobHistoryFilePath();
Path confFilePath = job.getJobConfFilePath();
if ((historyFilePath == null) || (confFilePath == null)) {
continue;
}
JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
Configuration jobConf = parser.parseConfiguration(confFilePath);
LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
// create entities from job history and write them
long totalTime = 0;
Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
LOG.info("converted them into timeline entities for job " + jobIdStr);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
switch(replayMode) {
case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
writeAllEntities(tlc, entitySet, ugi);
break;
case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
writePerEntity(tlc, entitySet, ugi);
break;
default:
break;
}
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
int numEntities = entitySet.size();
LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
} finally {
// move it along
context.progress();
}
}
}
use of org.apache.hadoop.yarn.client.api.TimelineClient in project hadoop by apache.
the class Client method prepareTimelineDomain.
private void prepareTimelineDomain() {
TimelineClient timelineClient = null;
if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
timelineClient = TimelineClient.createTimelineClient();
timelineClient.init(conf);
timelineClient.start();
} else {
LOG.warn("Cannot put the domain " + domainId + " because the timeline service is not enabled");
return;
}
try {
//TODO: we need to check and combine the existing timeline domain ACLs,
//but let's do it once we have client java library to query domains.
TimelineDomain domain = new TimelineDomain();
domain.setId(domainId);
domain.setReaders(viewACLs != null && viewACLs.length() > 0 ? viewACLs : " ");
domain.setWriters(modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " ");
timelineClient.putDomain(domain);
LOG.info("Put the timeline domain: " + TimelineUtils.dumpTimelineRecordtoJSON(domain));
} catch (Exception e) {
LOG.error("Error when putting the timeline domain", e);
} finally {
timelineClient.stop();
}
}
use of org.apache.hadoop.yarn.client.api.TimelineClient in project hadoop by apache.
the class TimelineClientImpl method putTimelineDataInJSONFile.
/**
* Put timeline data in a JSON file via command line.
*
* @param path
* path to the timeline data JSON file
* @param type
* the type of the timeline data in the JSON file
*/
private static void putTimelineDataInJSONFile(String path, String type) {
File jsonFile = new File(path);
if (!jsonFile.exists()) {
LOG.error("File [" + jsonFile.getAbsolutePath() + "] doesn't exist");
return;
}
YarnJacksonJaxbJsonProvider.configObjectMapper(MAPPER);
TimelineEntities entities = null;
TimelineDomains domains = null;
try {
if (type.equals(ENTITY_DATA_TYPE)) {
entities = MAPPER.readValue(jsonFile, TimelineEntities.class);
} else if (type.equals(DOMAIN_DATA_TYPE)) {
domains = MAPPER.readValue(jsonFile, TimelineDomains.class);
}
} catch (Exception e) {
LOG.error("Error when reading " + e.getMessage());
e.printStackTrace(System.err);
return;
}
Configuration conf = new YarnConfiguration();
TimelineClient client = TimelineClient.createTimelineClient();
client.init(conf);
client.start();
try {
if (UserGroupInformation.isSecurityEnabled() && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
Token<TimelineDelegationTokenIdentifier> token = client.getDelegationToken(UserGroupInformation.getCurrentUser().getUserName());
UserGroupInformation.getCurrentUser().addToken(token);
}
if (type.equals(ENTITY_DATA_TYPE)) {
TimelinePutResponse response = client.putEntities(entities.getEntities().toArray(new TimelineEntity[entities.getEntities().size()]));
if (response.getErrors().size() == 0) {
LOG.info("Timeline entities are successfully put");
} else {
for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
LOG.error("TimelineEntity [" + error.getEntityType() + ":" + error.getEntityId() + "] is not successfully put. Error code: " + error.getErrorCode());
}
}
} else if (type.equals(DOMAIN_DATA_TYPE) && domains != null) {
boolean hasError = false;
for (TimelineDomain domain : domains.getDomains()) {
try {
client.putDomain(domain);
} catch (Exception e) {
LOG.error("Error when putting domain " + domain.getId(), e);
hasError = true;
}
}
if (!hasError) {
LOG.info("Timeline domains are successfully put");
}
}
} catch (RuntimeException e) {
LOG.error("Error when putting the timeline data", e);
} catch (Exception e) {
LOG.error("Error when putting the timeline data", e);
} finally {
client.stop();
}
}
Aggregations