use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestMRTimelineEventHandling method testMapreduceJobTimelineServiceEnabled.
@Test
public void testMapreduceJobTimelineServiceEnabled() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
MiniMRYarnCluster cluster = null;
FileSystem fs = null;
Path inDir = new Path(GenericTestUtils.getTempPath("input"));
Path outDir = new Path(GenericTestUtils.getTempPath("output"));
try {
fs = FileSystem.get(conf);
cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
cluster = null;
try {
cluster = new MiniMRYarnCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestDistributedShell method checkTimelineV1.
private void checkTimelineV1(boolean haveDomain) throws Exception {
TimelineDomain domain = null;
if (haveDomain) {
domain = yarnCluster.getApplicationHistoryServer().getTimelineStore().getDomain("TEST_DOMAIN");
Assert.assertNotNull(domain);
Assert.assertEquals("reader_user reader_group", domain.getReaders());
Assert.assertEquals("writer_user writer_group", domain.getWriters());
}
TimelineEntities entitiesAttempts = yarnCluster.getApplicationHistoryServer().getTimelineStore().getEntities(ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString(), null, null, null, null, null, null, null, null, null);
Assert.assertNotNull(entitiesAttempts);
Assert.assertEquals(1, entitiesAttempts.getEntities().size());
Assert.assertEquals(2, entitiesAttempts.getEntities().get(0).getEvents().size());
Assert.assertEquals(entitiesAttempts.getEntities().get(0).getEntityType().toString(), ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString());
if (haveDomain) {
Assert.assertEquals(domain.getId(), entitiesAttempts.getEntities().get(0).getDomainId());
} else {
Assert.assertEquals("DEFAULT", entitiesAttempts.getEntities().get(0).getDomainId());
}
String currAttemptEntityId = entitiesAttempts.getEntities().get(0).getEntityId();
ApplicationAttemptId attemptId = ApplicationAttemptId.fromString(currAttemptEntityId);
NameValuePair primaryFilter = new NameValuePair(ApplicationMaster.APPID_TIMELINE_FILTER_NAME, attemptId.getApplicationId().toString());
TimelineEntities entities = yarnCluster.getApplicationHistoryServer().getTimelineStore().getEntities(ApplicationMaster.DSEntity.DS_CONTAINER.toString(), null, null, null, null, null, primaryFilter, null, null, null);
Assert.assertNotNull(entities);
Assert.assertEquals(2, entities.getEntities().size());
Assert.assertEquals(entities.getEntities().get(0).getEntityType().toString(), ApplicationMaster.DSEntity.DS_CONTAINER.toString());
if (haveDomain) {
Assert.assertEquals(domain.getId(), entities.getEntities().get(0).getDomainId());
} else {
Assert.assertEquals("DEFAULT", entities.getEntities().get(0).getDomainId());
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TimelineWriter method putEntities.
public TimelinePutResponse putEntities(TimelineEntity... entities) throws IOException, YarnException {
TimelineEntities entitiesContainer = new TimelineEntities();
for (TimelineEntity entity : entities) {
if (entity.getEntityId() == null || entity.getEntityType() == null) {
throw new YarnException("Incomplete entity without entity id/type");
}
entitiesContainer.addEntity(entity);
}
ClientResponse resp = doPosting(entitiesContainer, null);
return resp.getEntity(TimelinePutResponse.class);
}
Aggregations