Search in sources :

Example 1 with ApplicationAttemptEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method appAttemptFinished.

@SuppressWarnings("unchecked")
@Override
public void appAttemptFinished(RMAppAttempt appAttempt, RMAppAttemptState appAttemtpState, RMApp app, long finishedTime) {
    ApplicationAttemptEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId());
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setId(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(finishedTime);
    entity.addEvent(tEvent);
    Map<String, Object> entityInfo = new HashMap<String, Object>();
    entityInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, appAttempt.getDiagnostics());
    // app will get the final status from app attempt, or create one
    // based on app state if it doesn't exist
    entityInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, app.getFinalApplicationStatus().toString());
    entityInfo.put(AppAttemptMetricsConstants.STATE_INFO, RMServerUtils.createApplicationAttemptState(appAttemtpState).toString());
    entity.setInfo(entityInfo);
    getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) ApplicationAttemptEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity) HashMap(java.util.HashMap)

Example 2 with ApplicationAttemptEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity in project hadoop by apache.

the class TestTimelineServiceClientIntegration method testPutExtendedEntities.

@Test
public void testPutExtendedEntities() throws Exception {
    ApplicationId appId = ApplicationId.newInstance(0, 1);
    TimelineV2Client client = TimelineV2Client.createTimelineClient(appId);
    try {
        // set the timeline service address manually
        client.setTimelineServiceAddress(collectorManager.getRestServerBindAddress());
        client.init(conf);
        client.start();
        ClusterEntity cluster = new ClusterEntity();
        cluster.setId(YarnConfiguration.DEFAULT_RM_CLUSTER_ID);
        FlowRunEntity flow = new FlowRunEntity();
        flow.setUser(UserGroupInformation.getCurrentUser().getShortUserName());
        flow.setName("test_flow_name");
        flow.setVersion("test_flow_version");
        flow.setRunId(1L);
        flow.setParent(cluster.getType(), cluster.getId());
        ApplicationEntity app = new ApplicationEntity();
        app.setId(appId.toString());
        flow.addChild(app.getType(), app.getId());
        ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
        ApplicationAttemptEntity appAttempt = new ApplicationAttemptEntity();
        appAttempt.setId(attemptId.toString());
        ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
        ContainerEntity container = new ContainerEntity();
        container.setId(containerId.toString());
        UserEntity user = new UserEntity();
        user.setId(UserGroupInformation.getCurrentUser().getShortUserName());
        QueueEntity queue = new QueueEntity();
        queue.setId("default_queue");
        client.putEntities(cluster, flow, app, appAttempt, container, user, queue);
        client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, queue);
    } finally {
        client.stop();
    }
}
Also used : ApplicationAttemptEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity) TimelineV2Client(org.apache.hadoop.yarn.client.api.TimelineV2Client) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ContainerEntity(org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) FlowRunEntity(org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity) QueueEntity(org.apache.hadoop.yarn.api.records.timelineservice.QueueEntity) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) UserEntity(org.apache.hadoop.yarn.api.records.timelineservice.UserEntity) ClusterEntity(org.apache.hadoop.yarn.api.records.timelineservice.ClusterEntity) Test(org.junit.Test)

Example 3 with ApplicationAttemptEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method loadEntityData.

private static void loadEntityData(String rootDir) throws Exception {
    File appDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", "app");
    TimelineEntity entity11 = new TimelineEntity();
    entity11.setId("id_1");
    entity11.setType("app");
    entity11.setCreatedTime(1425016502000L);
    Map<String, Object> info1 = new HashMap<String, Object>();
    info1.put("info1", "val1");
    info1.put("info2", "val5");
    entity11.addInfo(info1);
    TimelineEvent event = new TimelineEvent();
    event.setId("event_1");
    event.setTimestamp(1425016502003L);
    entity11.addEvent(event);
    Set<TimelineMetric> metrics = new HashSet<TimelineMetric>();
    TimelineMetric metric1 = new TimelineMetric();
    metric1.setId("metric1");
    metric1.setType(TimelineMetric.Type.SINGLE_VALUE);
    metric1.addValue(1425016502006L, 113);
    metrics.add(metric1);
    TimelineMetric metric2 = new TimelineMetric();
    metric2.setId("metric2");
    metric2.setType(TimelineMetric.Type.TIME_SERIES);
    metric2.addValue(1425016502016L, 34);
    metrics.add(metric2);
    entity11.setMetrics(metrics);
    Map<String, String> configs = new HashMap<String, String>();
    configs.put("config_1", "127");
    entity11.setConfigs(configs);
    entity11.addRelatesToEntity("flow", "flow1");
    entity11.addIsRelatedToEntity("type1", "tid1_1");
    writeEntityFile(entity11, appDir);
    TimelineEntity entity12 = new TimelineEntity();
    entity12.setId("id_1");
    entity12.setType("app");
    configs.clear();
    configs.put("config_2", "23");
    configs.put("config_3", "abc");
    entity12.addConfigs(configs);
    metrics.clear();
    TimelineMetric metric12 = new TimelineMetric();
    metric12.setId("metric2");
    metric12.setType(TimelineMetric.Type.TIME_SERIES);
    metric12.addValue(1425016502032L, 48);
    metric12.addValue(1425016502054L, 51);
    metrics.add(metric12);
    TimelineMetric metric3 = new TimelineMetric();
    metric3.setId("metric3");
    metric3.setType(TimelineMetric.Type.SINGLE_VALUE);
    metric3.addValue(1425016502060L, 23L);
    metrics.add(metric3);
    entity12.setMetrics(metrics);
    entity12.addIsRelatedToEntity("type1", "tid1_2");
    entity12.addIsRelatedToEntity("type2", "tid2_1`");
    TimelineEvent event15 = new TimelineEvent();
    event15.setId("event_5");
    event15.setTimestamp(1425016502017L);
    entity12.addEvent(event15);
    writeEntityFile(entity12, appDir);
    TimelineEntity entity2 = new TimelineEntity();
    entity2.setId("id_2");
    entity2.setType("app");
    entity2.setCreatedTime(1425016501050L);
    Map<String, Object> info2 = new HashMap<String, Object>();
    info1.put("info2", 4);
    entity2.addInfo(info2);
    Map<String, String> configs2 = new HashMap<String, String>();
    configs2.put("config_1", "129");
    configs2.put("config_3", "def");
    entity2.setConfigs(configs2);
    TimelineEvent event2 = new TimelineEvent();
    event2.setId("event_2");
    event2.setTimestamp(1425016501003L);
    entity2.addEvent(event2);
    Set<TimelineMetric> metrics2 = new HashSet<TimelineMetric>();
    TimelineMetric metric21 = new TimelineMetric();
    metric21.setId("metric1");
    metric21.setType(TimelineMetric.Type.SINGLE_VALUE);
    metric21.addValue(1425016501006L, 300);
    metrics2.add(metric21);
    TimelineMetric metric22 = new TimelineMetric();
    metric22.setId("metric2");
    metric22.setType(TimelineMetric.Type.TIME_SERIES);
    metric22.addValue(1425016501056L, 31);
    metric22.addValue(1425016501084L, 70);
    metrics2.add(metric22);
    TimelineMetric metric23 = new TimelineMetric();
    metric23.setId("metric3");
    metric23.setType(TimelineMetric.Type.SINGLE_VALUE);
    metric23.addValue(1425016502060L, 23L);
    metrics2.add(metric23);
    entity2.setMetrics(metrics2);
    entity2.addRelatesToEntity("flow", "flow2");
    writeEntityFile(entity2, appDir);
    TimelineEntity entity3 = new TimelineEntity();
    entity3.setId("id_3");
    entity3.setType("app");
    entity3.setCreatedTime(1425016501050L);
    Map<String, Object> info3 = new HashMap<String, Object>();
    info3.put("info2", 3.5);
    info3.put("info4", 20);
    entity3.addInfo(info3);
    Map<String, String> configs3 = new HashMap<String, String>();
    configs3.put("config_1", "123");
    configs3.put("config_3", "abc");
    entity3.setConfigs(configs3);
    TimelineEvent event3 = new TimelineEvent();
    event3.setId("event_2");
    event3.setTimestamp(1425016501003L);
    entity3.addEvent(event3);
    TimelineEvent event4 = new TimelineEvent();
    event4.setId("event_4");
    event4.setTimestamp(1425016502006L);
    entity3.addEvent(event4);
    Set<TimelineMetric> metrics3 = new HashSet<TimelineMetric>();
    TimelineMetric metric31 = new TimelineMetric();
    metric31.setId("metric1");
    metric31.setType(TimelineMetric.Type.SINGLE_VALUE);
    metric31.addValue(1425016501006L, 124);
    metrics3.add(metric31);
    TimelineMetric metric32 = new TimelineMetric();
    metric32.setId("metric2");
    metric32.setType(TimelineMetric.Type.TIME_SERIES);
    metric32.addValue(1425016501056L, 31);
    metric32.addValue(1425016501084L, 74);
    metrics3.add(metric32);
    entity3.setMetrics(metrics3);
    entity3.addIsRelatedToEntity("type1", "tid1_2");
    writeEntityFile(entity3, appDir);
    TimelineEntity entity4 = new TimelineEntity();
    entity4.setId("id_4");
    entity4.setType("app");
    entity4.setCreatedTime(1425016502050L);
    TimelineEvent event44 = new TimelineEvent();
    event44.setId("event_4");
    event44.setTimestamp(1425016502003L);
    entity4.addEvent(event44);
    writeEntityFile(entity4, appDir);
    File attemptDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString());
    ApplicationAttemptEntity attempt1 = new ApplicationAttemptEntity();
    attempt1.setId("app-attempt-1");
    attempt1.setCreatedTime(1425017502003L);
    writeEntityFile(attempt1, attemptDir);
    ApplicationAttemptEntity attempt2 = new ApplicationAttemptEntity();
    attempt2.setId("app-attempt-2");
    attempt2.setCreatedTime(1425017502004L);
    writeEntityFile(attempt2, attemptDir);
    File entityDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_CONTAINER.toString());
    ContainerEntity containerEntity1 = new ContainerEntity();
    containerEntity1.setId("container_1_1");
    containerEntity1.setParent(attempt1.getIdentifier());
    containerEntity1.setCreatedTime(1425017502003L);
    writeEntityFile(containerEntity1, entityDir);
    ContainerEntity containerEntity2 = new ContainerEntity();
    containerEntity2.setId("container_2_1");
    containerEntity2.setParent(attempt2.getIdentifier());
    containerEntity2.setCreatedTime(1425018502003L);
    writeEntityFile(containerEntity2, entityDir);
    ContainerEntity containerEntity3 = new ContainerEntity();
    containerEntity3.setId("container_2_2");
    containerEntity3.setParent(attempt2.getIdentifier());
    containerEntity3.setCreatedTime(1425018502003L);
    writeEntityFile(containerEntity3, entityDir);
    File appDir2 = getAppDir(rootDir, "cluster1", "user1", "flow1,flow", "1", "app2", "app");
    TimelineEntity entity5 = new TimelineEntity();
    entity5.setId("id_5");
    entity5.setType("app");
    entity5.setCreatedTime(1425016502050L);
    writeEntityFile(entity5, appDir2);
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) ApplicationAttemptEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) ContainerEntity(org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity) File(java.io.File) HashSet(java.util.HashSet)

Example 4 with ApplicationAttemptEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method createAppAttemptEntity.

private static ApplicationAttemptEntity createAppAttemptEntity(ApplicationAttemptId appAttemptId) {
    ApplicationAttemptEntity entity = new ApplicationAttemptEntity();
    entity.setId(appAttemptId.toString());
    entity.setParent(new Identifier(TimelineEntityType.YARN_APPLICATION.name(), appAttemptId.getApplicationId().toString()));
    return entity;
}
Also used : Identifier(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier) ApplicationAttemptEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity)

Aggregations

ApplicationAttemptEntity (org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity)4 HashMap (java.util.HashMap)2 ContainerEntity (org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity)2 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)2 File (java.io.File)1 HashSet (java.util.HashSet)1 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)1 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)1 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)1 ApplicationEntity (org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity)1 ClusterEntity (org.apache.hadoop.yarn.api.records.timelineservice.ClusterEntity)1 FlowRunEntity (org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity)1 QueueEntity (org.apache.hadoop.yarn.api.records.timelineservice.QueueEntity)1 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)1 Identifier (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier)1 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)1 UserEntity (org.apache.hadoop.yarn.api.records.timelineservice.UserEntity)1 TimelineV2Client (org.apache.hadoop.yarn.client.api.TimelineV2Client)1 Test (org.junit.Test)1