use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineServiceV1Publisher method containerFinished.
@SuppressWarnings("unchecked")
@Override
public void containerFinished(RMContainer container, long finishedTime) {
TimelineEntity entity = createContainerEntity(container.getContainerId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(ContainerMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(finishedTime);
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO, container.getDiagnosticsInfo());
eventInfo.put(ContainerMetricsConstants.EXIT_STATUS_INFO, container.getContainerExitStatus());
eventInfo.put(ContainerMetricsConstants.STATE_INFO, container.getContainerState().toString());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
entity.setOtherInfo(entityInfo);
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineServiceV1Publisher method containerCreated.
@SuppressWarnings("unchecked")
@Override
public void containerCreated(RMContainer container, long createdTime) {
TimelineEntity entity = createContainerEntity(container.getContainerId());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress());
entity.setOtherInfo(entityInfo);
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(ContainerMetricsConstants.CREATED_EVENT_TYPE);
tEvent.setTimestamp(createdTime);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class JHEventHandlerForSigtermTest method testTimelineEventHandling.
// Have JobHistoryEventHandler handle some events and make sure they get
// stored to the Timeline store
@Test(timeout = 50000)
public void testTimelineEventHandling() throws Exception {
TestParams t = new TestParams(RunningAppContext.class, false);
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
long currentTime = System.currentTimeMillis();
try (MiniYARNCluster yarnCluster = new MiniYARNCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1, 1, 1, 1)) {
yarnCluster.init(conf);
yarnCluster.start();
Configuration confJHEH = new YarnConfiguration(conf);
confJHEH.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
confJHEH.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + yarnCluster.getApplicationHistoryServer().getPort());
JHEvenHandlerForTest jheh = new JHEvenHandlerForTest(t.mockAppContext, 0);
jheh.init(confJHEH);
jheh.start();
TimelineStore ts = yarnCluster.getApplicationHistoryServer().getTimelineStore();
handleEvent(jheh, new JobHistoryEvent(t.jobId, new AMStartedEvent(t.appAttemptId, 200, t.containerId, "nmhost", 3000, 4000, -1), currentTime - 10));
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
Assert.assertEquals(1, tEntity.getEvents().size());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(0).getTimestamp());
handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobSubmittedEvent(TypeConverter.fromYarn(t.jobId), "name", "user", 200, "/foo/job.xml", new HashMap<JobACL, AccessControlList>(), "default"), currentTime + 10));
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
Assert.assertEquals(2, tEntity.getEvents().size());
Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp());
handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobQueueChangeEvent(TypeConverter.fromYarn(t.jobId), "q2"), currentTime - 20));
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
Assert.assertEquals(3, tEntity.getEvents().size());
Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(2).getEventType());
Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp());
Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(2).getTimestamp());
handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobFinishedEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, 0, 0, new Counters(), new Counters(), new Counters()), currentTime));
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
Assert.assertEquals(4, tEntity.getEvents().size());
Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(1).getEventType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(2).getEventType());
Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(3).getEventType());
Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
Assert.assertEquals(currentTime, tEntity.getEvents().get(1).getTimestamp());
Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(2).getTimestamp());
Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(3).getTimestamp());
handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, JobStateInternal.KILLED.toString()), currentTime + 20));
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
Assert.assertEquals(5, tEntity.getEvents().size());
Assert.assertEquals(EventType.JOB_KILLED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(1).getEventType());
Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(2).getEventType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(3).getEventType());
Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(4).getEventType());
Assert.assertEquals(currentTime + 20, tEntity.getEvents().get(0).getTimestamp());
Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(1).getTimestamp());
Assert.assertEquals(currentTime, tEntity.getEvents().get(2).getTimestamp());
Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(3).getTimestamp());
Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(4).getTimestamp());
handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.MAP, "")));
entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId());
Assert.assertEquals(1, tEntity.getEvents().size());
Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(0).getEventType());
Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE"));
handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.REDUCE, "")));
entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId());
Assert.assertEquals(2, tEntity.getEvents().size());
Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
Assert.assertEquals(TaskType.REDUCE.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE"));
Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(1).getEventInfo().get("TASK_TYPE"));
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TestMRTimelineEventHandling method testMapreduceJobTimelineServiceEnabled.
@Test
public void testMapreduceJobTimelineServiceEnabled() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
MiniMRYarnCluster cluster = null;
FileSystem fs = null;
Path inDir = new Path(GenericTestUtils.getTempPath("input"));
Path outDir = new Path(GenericTestUtils.getTempPath("output"));
try {
fs = FileSystem.get(conf);
cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
cluster = null;
try {
cluster = new MiniMRYarnCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class JobHistoryFileReplayMapperV1 method writePerEntity.
private void writePerEntity(TimelineClient tlc, Set<TimelineEntity> entitySet, UserGroupInformation ugi) throws IOException, YarnException {
for (TimelineEntity entity : entitySet) {
tlc.putEntities(entity);
LOG.info("wrote entity " + entity.getEntityId());
}
}
Aggregations