use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getContainers.
@Override
public Map<ContainerId, ContainerReport> getContainers(ApplicationAttemptId appAttemptId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(ContainerMetricsConstants.ENTITY_TYPE, new NameValuePair(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, appAttemptId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ContainerId, ContainerReport> containers = new LinkedHashMap<ContainerId, ContainerReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
ContainerReport container = convertToContainerReport(entity, serverHttpAddress, app.appReport.getUser());
containers.put(container.getContainerId(), container);
}
}
return containers;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class JobHistoryFileReplayMapperV1 method map.
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
// collect the apps it needs to process
TimelineClient tlc = new TimelineClientImpl();
TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
int replayMode = helper.getReplayMode();
Collection<JobFiles> jobs = helper.getJobFiles();
JobHistoryFileParser parser = helper.getParser();
if (jobs.isEmpty()) {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
} else {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
}
for (JobFiles job : jobs) {
// process each job
String jobIdStr = job.getJobId();
LOG.info("processing " + jobIdStr + "...");
JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
ApplicationId appId = jobId.getAppId();
try {
// parse the job info and configuration
Path historyFilePath = job.getJobHistoryFilePath();
Path confFilePath = job.getJobConfFilePath();
if ((historyFilePath == null) || (confFilePath == null)) {
continue;
}
JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
Configuration jobConf = parser.parseConfiguration(confFilePath);
LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
// create entities from job history and write them
long totalTime = 0;
Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
LOG.info("converted them into timeline entities for job " + jobIdStr);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
switch(replayMode) {
case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
writeAllEntities(tlc, entitySet, ugi);
break;
case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
writePerEntity(tlc, entitySet, ugi);
break;
default:
break;
}
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
int numEntities = entitySet.size();
LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
} finally {
// move it along
context.progress();
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class ApplicationMaster method publishContainerEndEvent.
@VisibleForTesting
void publishContainerEndEvent(final TimelineClient timelineClient, ContainerStatus container, String domainId, UserGroupInformation ugi) {
final TimelineEntity entity = new TimelineEntity();
entity.setEntityId(container.getContainerId().toString());
entity.setEntityType(DSEntity.DS_CONTAINER.toString());
entity.setDomainId(domainId);
entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName());
entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getContainerId().getApplicationAttemptId().getApplicationId().toString());
TimelineEvent event = new TimelineEvent();
event.setTimestamp(System.currentTimeMillis());
event.setEventType(DSEvent.DS_CONTAINER_END.toString());
event.addEventInfo("State", container.getState().name());
event.addEventInfo("Exit Status", container.getExitStatus());
entity.addEvent(event);
try {
processTimelineResponseErrors(putContainerEntity(timelineClient, container.getContainerId().getApplicationAttemptId(), entity));
} catch (YarnException | IOException | ClientHandlerException e) {
LOG.error("Container end event could not be published for " + container.getContainerId().toString(), e);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class FileSystemTimelineWriter method putEntities.
@Override
public TimelinePutResponse putEntities(ApplicationAttemptId appAttemptId, TimelineEntityGroupId groupId, TimelineEntity... entities) throws IOException, YarnException {
if (appAttemptId == null) {
return putEntities(entities);
}
List<TimelineEntity> entitiesToDBStore = new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToSummaryCache = new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToEntityCache = new ArrayList<TimelineEntity>();
Path attemptDir = attemptDirCache.getAppAttemptDir(appAttemptId);
for (TimelineEntity entity : entities) {
if (summaryEntityTypes.contains(entity.getEntityType())) {
entitiesToSummaryCache.add(entity);
} else {
if (groupId != null) {
entitiesToEntityCache.add(entity);
} else {
entitiesToDBStore.add(entity);
}
}
}
if (!entitiesToSummaryCache.isEmpty()) {
Path summaryLogPath = new Path(attemptDir, SUMMARY_LOG_PREFIX + appAttemptId.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("Writing summary log for " + appAttemptId.toString() + " to " + summaryLogPath);
}
this.logFDsCache.writeSummaryEntityLogs(fs, summaryLogPath, objMapper, appAttemptId, entitiesToSummaryCache, isAppendSupported);
}
if (!entitiesToEntityCache.isEmpty()) {
Path entityLogPath = new Path(attemptDir, ENTITY_LOG_PREFIX + groupId.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("Writing entity log for " + groupId.toString() + " to " + entityLogPath);
}
this.logFDsCache.writeEntityLogs(fs, entityLogPath, objMapper, appAttemptId, groupId, entitiesToEntityCache, isAppendSupported);
}
if (!entitiesToDBStore.isEmpty()) {
putEntities(entitiesToDBStore.toArray(new TimelineEntity[entitiesToDBStore.size()]));
}
return new TimelinePutResponse();
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineClientImpl method putTimelineDataInJSONFile.
/**
* Put timeline data in a JSON file via command line.
*
* @param path
* path to the timeline data JSON file
* @param type
* the type of the timeline data in the JSON file
*/
private static void putTimelineDataInJSONFile(String path, String type) {
File jsonFile = new File(path);
if (!jsonFile.exists()) {
LOG.error("File [" + jsonFile.getAbsolutePath() + "] doesn't exist");
return;
}
YarnJacksonJaxbJsonProvider.configObjectMapper(MAPPER);
TimelineEntities entities = null;
TimelineDomains domains = null;
try {
if (type.equals(ENTITY_DATA_TYPE)) {
entities = MAPPER.readValue(jsonFile, TimelineEntities.class);
} else if (type.equals(DOMAIN_DATA_TYPE)) {
domains = MAPPER.readValue(jsonFile, TimelineDomains.class);
}
} catch (Exception e) {
LOG.error("Error when reading " + e.getMessage());
e.printStackTrace(System.err);
return;
}
Configuration conf = new YarnConfiguration();
TimelineClient client = TimelineClient.createTimelineClient();
client.init(conf);
client.start();
try {
if (UserGroupInformation.isSecurityEnabled() && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
Token<TimelineDelegationTokenIdentifier> token = client.getDelegationToken(UserGroupInformation.getCurrentUser().getUserName());
UserGroupInformation.getCurrentUser().addToken(token);
}
if (type.equals(ENTITY_DATA_TYPE)) {
TimelinePutResponse response = client.putEntities(entities.getEntities().toArray(new TimelineEntity[entities.getEntities().size()]));
if (response.getErrors().size() == 0) {
LOG.info("Timeline entities are successfully put");
} else {
for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
LOG.error("TimelineEntity [" + error.getEntityType() + ":" + error.getEntityId() + "] is not successfully put. Error code: " + error.getErrorCode());
}
}
} else if (type.equals(DOMAIN_DATA_TYPE) && domains != null) {
boolean hasError = false;
for (TimelineDomain domain : domains.getDomains()) {
try {
client.putDomain(domain);
} catch (Exception e) {
LOG.error("Error when putting domain " + domain.getId(), e);
hasError = true;
}
}
if (!hasError) {
LOG.info("Timeline domains are successfully put");
}
}
} catch (RuntimeException e) {
LOG.error("Error when putting the timeline data", e);
} catch (Exception e) {
LOG.error("Error when putting the timeline data", e);
} finally {
client.stop();
}
}
Aggregations