Search in sources :

Example 16 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TestTimelineClientForATS1_5 method generateEntity.

private static TimelineEntity generateEntity(String type) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityId("entity id");
    entity.setEntityType(type);
    entity.setStartTime(System.currentTimeMillis());
    return entity;
}
Also used : TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 17 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TestMRTimelineEventHandling method testMRTimelineEventHandling.

@Test
public void testMRTimelineEventHandling() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
    MiniMRYarnCluster cluster = null;
    try {
        cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
        cluster.init(conf);
        cluster.start();
        conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
        TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
        String localPathRoot = System.getProperty("test.build.data", "build/test/data");
        Path inDir = new Path(localPathRoot, "input");
        Path outDir = new Path(localPathRoot, "output");
        RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType());
        job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(2, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType());
    } finally {
        if (cluster != null) {
            cluster.stop();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) MiniMRYarnCluster(org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) Test(org.junit.Test)

Example 18 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TimelineDataManager method doGetEvents.

private TimelineEvents doGetEvents(String entityType, SortedSet<String> entityIds, SortedSet<String> eventTypes, Long windowStart, Long windowEnd, Long limit, UserGroupInformation callerUGI) throws YarnException, IOException {
    TimelineEvents events = null;
    events = store.getEntityTimelines(entityType, entityIds, limit, windowStart, windowEnd, eventTypes);
    if (events != null) {
        Iterator<TimelineEvents.EventsOfOneEntity> eventsItr = events.getAllEvents().iterator();
        while (eventsItr.hasNext()) {
            TimelineEvents.EventsOfOneEntity eventsOfOneEntity = eventsItr.next();
            try {
                TimelineEntity entity = store.getEntity(eventsOfOneEntity.getEntityId(), eventsOfOneEntity.getEntityType(), EnumSet.of(Field.PRIMARY_FILTERS));
                addDefaultDomainIdIfAbsent(entity);
                // check ACLs
                if (!timelineACLsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP, entity)) {
                    eventsItr.remove();
                }
            } catch (Exception e) {
                LOG.warn("Error when verifying access for user " + callerUGI + " on the events of the timeline entity " + new EntityIdentifier(eventsOfOneEntity.getEntityId(), eventsOfOneEntity.getEntityType()), e);
                eventsItr.remove();
            }
        }
    }
    if (events == null) {
        return new TimelineEvents();
    }
    return events;
}
Also used : TimelineEvents(org.apache.hadoop.yarn.api.records.timeline.TimelineEvents) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException)

Example 19 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TestApplicationHistoryManagerOnTimelineStore method createAppAttemptTimelineEntity.

private static TimelineEntity createAppAttemptTimelineEntity(ApplicationAttemptId appAttemptId) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityType(AppAttemptMetricsConstants.ENTITY_TYPE);
    entity.setEntityId(appAttemptId.toString());
    entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
    entity.addPrimaryFilter(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appAttemptId.getApplicationId().toString());
    entity.addPrimaryFilter(TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn");
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 1L);
    Map<String, Object> eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, "test tracking url");
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, "test original tracking url");
    eventInfo.put(AppAttemptMetricsConstants.HOST_INFO, "test host");
    eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_INFO, 100);
    eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, ContainerId.newContainerId(appAttemptId, 1));
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 2L);
    eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, "test tracking url");
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, "test original tracking url");
    eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, "test diagnostics info");
    eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, FinalApplicationStatus.UNDEFINED.toString());
    eventInfo.put(AppAttemptMetricsConstants.STATE_INFO, YarnApplicationAttemptState.FINISHED.toString());
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    return entity;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 20 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TestApplicationHistoryManagerOnTimelineStore method createApplicationTimelineEntity.

private static TimelineEntity createApplicationTimelineEntity(ApplicationId appId, boolean emptyACLs, boolean noAttemptId, boolean wrongAppId, boolean enableUpdateEvent, YarnApplicationState state) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityType(ApplicationMetricsConstants.ENTITY_TYPE);
    if (wrongAppId) {
        entity.setEntityId("wrong_app_id");
    } else {
        entity.setEntityId(appId.toString());
    }
    entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
    entity.addPrimaryFilter(TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn");
    Map<String, Object> entityInfo = new HashMap<String, Object>();
    entityInfo.put(ApplicationMetricsConstants.NAME_ENTITY_INFO, "test app");
    entityInfo.put(ApplicationMetricsConstants.TYPE_ENTITY_INFO, "test app type");
    entityInfo.put(ApplicationMetricsConstants.USER_ENTITY_INFO, "user1");
    entityInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, "test queue");
    entityInfo.put(ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO, "false");
    entityInfo.put(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO, Priority.newInstance(0));
    entityInfo.put(ApplicationMetricsConstants.SUBMITTED_TIME_ENTITY_INFO, Integer.MAX_VALUE + 1L);
    entityInfo.put(ApplicationMetricsConstants.APP_MEM_METRICS, 123);
    entityInfo.put(ApplicationMetricsConstants.APP_CPU_METRICS, 345);
    entityInfo.put(ApplicationMetricsConstants.APP_MEM_PREEMPT_METRICS, 456);
    entityInfo.put(ApplicationMetricsConstants.APP_CPU_PREEMPT_METRICS, 789);
    if (emptyACLs) {
        entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, "");
    } else {
        entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, "user2");
    }
    Set<String> appTags = new HashSet<String>();
    appTags.add("Test_APP_TAGS_1");
    appTags.add("Test_APP_TAGS_2");
    entityInfo.put(ApplicationMetricsConstants.APP_TAGS_INFO, appTags);
    entity.setOtherInfo(entityInfo);
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 2L + appId.getId());
    entity.addEvent(tEvent);
    tEvent = new TimelineEvent();
    tEvent.setEventType(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 3L + appId.getId());
    Map<String, Object> eventInfo = new HashMap<String, Object>();
    eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, "test diagnostics info");
    eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, FinalApplicationStatus.UNDEFINED.toString());
    eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, state.toString());
    if (!noAttemptId) {
        eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, ApplicationAttemptId.newInstance(appId, 1));
    }
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    // send a YARN_APPLICATION_STATE_UPDATED event
    // after YARN_APPLICATION_FINISHED
    // The final YarnApplicationState should not be changed
    tEvent = new TimelineEvent();
    tEvent.setEventType(ApplicationMetricsConstants.STATE_UPDATED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 4L + appId.getId());
    eventInfo = new HashMap<String, Object>();
    eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, YarnApplicationState.KILLED);
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    if (enableUpdateEvent) {
        tEvent = new TimelineEvent();
        long updatedTimeIndex = 4L;
        createAppModifiedEvent(appId, tEvent, updatedTimeIndex++, "changed queue", 5);
        entity.addEvent(tEvent);
        // Change priority alone
        tEvent = new TimelineEvent();
        createAppModifiedEvent(appId, tEvent, updatedTimeIndex++, "changed queue", 6);
        // Now change queue
        tEvent = new TimelineEvent();
        createAppModifiedEvent(appId, tEvent, updatedTimeIndex++, "changed queue1", 6);
        entity.addEvent(tEvent);
    }
    return entity;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) HashSet(java.util.HashSet)

Aggregations

TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)148 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)66 Test (org.junit.Test)61 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)30 HashMap (java.util.HashMap)23 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)21 IOException (java.io.IOException)17 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)16 ClientResponse (com.sun.jersey.api.client.ClientResponse)13 WebResource (com.sun.jersey.api.client.WebResource)12 Set (java.util.Set)12 Configuration (org.apache.hadoop.conf.Configuration)11 HashSet (java.util.HashSet)8 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)8 Map (java.util.Map)7 Path (org.apache.hadoop.fs.Path)7 Field (org.apache.hadoop.yarn.server.timeline.TimelineReader.Field)6 DAGHistoryEvent (org.apache.tez.dag.history.DAGHistoryEvent)6 TezDAGID (org.apache.tez.dag.records.TezDAGID)6 ArrayList (java.util.ArrayList)5