use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TaskUpdatedEvent method toTimelineEvent.
@Override
public TimelineEvent toTimelineEvent() {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(StringUtils.toUpperCase(getEventType().name()));
tEvent.addInfo("FINISH_TIME", getFinishTime());
return tEvent;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class JobSubmittedEvent method toTimelineEvent.
@Override
public TimelineEvent toTimelineEvent() {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(StringUtils.toUpperCase(getEventType().name()));
tEvent.addInfo("SUBMIT_TIME", getSubmitTime());
tEvent.addInfo("QUEUE_NAME", getJobQueueName());
tEvent.addInfo("JOB_NAME", getJobName());
tEvent.addInfo("USER_NAME", getUserName());
tEvent.addInfo("JOB_CONF_PATH", getJobConfPath());
tEvent.addInfo("ACLS", getJobAcls());
tEvent.addInfo("JOB_QUEUE_NAME", getJobQueueName());
tEvent.addInfo("WORKLFOW_ID", getWorkflowId());
tEvent.addInfo("WORKFLOW_NAME", getWorkflowName());
tEvent.addInfo("WORKFLOW_NODE_NAME", getWorkflowNodeName());
tEvent.addInfo("WORKFLOW_ADJACENCIES", getWorkflowAdjacencies());
tEvent.addInfo("WORKFLOW_TAGS", getWorkflowTags());
return tEvent;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class NormalizedResourceEvent method toTimelineEvent.
@Override
public TimelineEvent toTimelineEvent() {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(StringUtils.toUpperCase(getEventType().name()));
tEvent.addInfo("MEMORY", "" + getMemory());
tEvent.addInfo("TASK_TYPE", getTaskType());
return tEvent;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class ReduceAttemptFinishedEvent method toTimelineEvent.
@Override
public TimelineEvent toTimelineEvent() {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(StringUtils.toUpperCase(getEventType().name()));
tEvent.addInfo("TASK_TYPE", getTaskType().toString());
tEvent.addInfo("ATTEMPT_ID", getAttemptId() == null ? "" : getAttemptId().toString());
tEvent.addInfo("FINISH_TIME", getFinishTime());
tEvent.addInfo("STATUS", getTaskStatus());
tEvent.addInfo("STATE", getState());
tEvent.addInfo("SHUFFLE_FINISH_TIME", getShuffleFinishTime());
tEvent.addInfo("SORT_FINISH_TIME", getSortFinishTime());
tEvent.addInfo("HOSTNAME", getHostname());
tEvent.addInfo("PORT", getPort());
tEvent.addInfo("RACK_NAME", getRackName());
return tEvent;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestMRTimelineEventHandling method verifyEntity.
/**
* Verifies entity by reading the entity file written via FS impl.
* @param entityFile File to be read.
* @param eventId Event to be checked.
* @param chkMetrics If event is not null, this flag determines if metrics
* exist when the event is encountered. If event is null, we merely check
* if metrics exist in the entity file.
* @param chkCfg If event is not null, this flag determines if configs
* exist when the event is encountered. If event is null, we merely check
* if configs exist in the entity file.
* @param cfgsToVerify a set of configs which should exist in the entity file.
* @throws IOException
*/
private void verifyEntity(File entityFile, String eventId, boolean chkMetrics, boolean chkCfg, Set<String> cfgsToVerify) throws IOException {
BufferedReader reader = null;
String strLine;
try {
reader = new BufferedReader(new FileReader(entityFile));
while ((strLine = reader.readLine()) != null) {
if (strLine.trim().length() > 0) {
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(strLine.trim(), org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.class);
if (eventId == null) {
// its found. Same applies to configs.
if (chkMetrics && entity.getMetrics().size() > 0) {
return;
}
if (chkCfg && entity.getConfigs().size() > 0) {
if (cfgsToVerify == null) {
return;
} else {
// not).
for (Iterator<String> itr = cfgsToVerify.iterator(); itr.hasNext(); ) {
String config = itr.next();
if (entity.getConfigs().containsKey(config)) {
itr.remove();
}
}
// All the required configs have been verified, so return.
if (cfgsToVerify.isEmpty()) {
return;
}
}
}
} else {
for (TimelineEvent event : entity.getEvents()) {
if (event.getId().equals(eventId)) {
if (chkMetrics) {
assertTrue(entity.getMetrics().size() > 0);
}
if (chkCfg) {
assertTrue(entity.getConfigs().size() > 0);
if (cfgsToVerify != null) {
for (String cfg : cfgsToVerify) {
assertTrue(entity.getConfigs().containsKey(cfg));
}
}
}
return;
}
}
}
}
}
if (cfgsToVerify != null) {
assertTrue(cfgsToVerify.isEmpty());
return;
}
fail("Expected event : " + eventId + " not found in the file " + entityFile);
} finally {
reader.close();
}
}
Aggregations