Search in sources :

Example 41 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesEventFilters.

@Test
public void testGetEntitiesEventFilters() throws Exception {
    Client client = createClient();
    try {
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=event1,event3");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        // eventfilters=!(event1,event3) OR event5,event6
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)%20OR%20event5,event6");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity2"));
        }
        //  eventfilters=(!(event1,event3) OR event5,event6) OR
        // (event1,event2 AND (event3,event4))
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=(!(event1,event3)%20OR%20event5," + "event6)%20OR%20(event1,event2%20AND%20(event3,event4))");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Example 42 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method containerCreated.

@SuppressWarnings("unchecked")
@Override
public void containerCreated(RMContainer container, long createdTime) {
    if (publishContainerEvents) {
        TimelineEntity entity = createContainerEntity(container.getContainerId());
        entity.setCreatedTime(createdTime);
        TimelineEvent tEvent = new TimelineEvent();
        tEvent.setId(ContainerMetricsConstants.CREATED_IN_RM_EVENT_TYPE);
        tEvent.setTimestamp(createdTime);
        entity.addEvent(tEvent);
        // updated as event info instead of entity info, as entity info is updated
        // by NM
        Map<String, Object> entityInfo = new HashMap<String, Object>();
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress());
        entity.setInfo(entityInfo);
        getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Example 43 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineEntityConverterV2 method createTaskAttemptEntities.

private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
    Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
    Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts();
    LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts");
    for (TaskAttemptInfo taskAttemptInfo : taskAttemptInfoMap.values()) {
        TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
        taskAttempts.add(taskAttempt);
    }
    return taskAttempts;
}
Also used : TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) HashSet(java.util.HashSet)

Example 44 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineEntityConverterV2 method createTaskAndTaskAttemptEntities.

private List<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
    List<TimelineEntity> entities = new ArrayList<>();
    Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
    LOG.info("job " + jobInfo.getJobId() + " has " + taskInfoMap.size() + " tasks");
    for (TaskInfo taskInfo : taskInfoMap.values()) {
        TimelineEntity task = createTaskEntity(taskInfo);
        entities.add(task);
        // add the task attempts from this task
        Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo);
        entities.addAll(taskAttempts);
    }
    return entities;
}
Also used : TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) ArrayList(java.util.ArrayList) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Example 45 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class HBaseTimelineWriterImpl method write.

/**
   * Stores the entire information in TimelineEntities to the timeline store.
   */
@Override
public TimelineWriteResponse write(String clusterId, String userId, String flowName, String flowVersion, long flowRunId, String appId, TimelineEntities data) throws IOException {
    TimelineWriteResponse putStatus = new TimelineWriteResponse();
    // defensive coding to avoid NPE during row key construction
    if ((flowName == null) || (appId == null) || (clusterId == null) || (userId == null)) {
        LOG.warn("Found null for one of: flowName=" + flowName + " appId=" + appId + " userId=" + userId + " clusterId=" + clusterId + " . Not proceeding with writing to hbase");
        return putStatus;
    }
    for (TimelineEntity te : data.getEntities()) {
        // a set can have at most 1 null
        if (te == null) {
            continue;
        }
        // if the entity is the application, the destination is the application
        // table
        boolean isApplication = isApplicationEntity(te);
        byte[] rowKey;
        if (isApplication) {
            ApplicationRowKey applicationRowKey = new ApplicationRowKey(clusterId, userId, flowName, flowRunId, appId);
            rowKey = applicationRowKey.getRowKey();
        } else {
            EntityRowKey entityRowKey = new EntityRowKey(clusterId, userId, flowName, flowRunId, appId, te.getType(), te.getId());
            rowKey = entityRowKey.getRowKey();
        }
        storeInfo(rowKey, te, flowVersion, isApplication);
        storeEvents(rowKey, te.getEvents(), isApplication);
        storeConfig(rowKey, te.getConfigs(), isApplication);
        storeMetrics(rowKey, te.getMetrics(), isApplication);
        storeRelations(rowKey, te, isApplication);
        if (isApplication) {
            TimelineEvent event = getApplicationEvent(te, ApplicationMetricsConstants.CREATED_EVENT_TYPE);
            FlowRunRowKey flowRunRowKey = new FlowRunRowKey(clusterId, userId, flowName, flowRunId);
            if (event != null) {
                AppToFlowRowKey appToFlowRowKey = new AppToFlowRowKey(clusterId, appId);
                onApplicationCreated(flowRunRowKey, appToFlowRowKey, appId, userId, flowVersion, te, event.getTimestamp());
            }
            // if it's an application entity, store metrics
            storeFlowMetricsAppRunning(flowRunRowKey, appId, te);
            // if application has finished, store it's finish time and write final
            // values of all metrics
            event = getApplicationEvent(te, ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
            if (event != null) {
                onApplicationFinished(flowRunRowKey, flowVersion, appId, te, event.getTimestamp());
            }
        }
    }
    return putStatus;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) EntityRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey) TimelineWriteResponse(org.apache.hadoop.yarn.api.records.timelineservice.TimelineWriteResponse) ApplicationRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey) FlowRunRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) AppToFlowRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey)

Aggregations

TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)155 Test (org.junit.Test)98 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)54 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)54 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)46 HashSet (java.util.HashSet)37 Client (com.sun.jersey.api.client.Client)36 ClientResponse (com.sun.jersey.api.client.ClientResponse)36 URI (java.net.URI)36 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)33 Set (java.util.Set)32 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)27 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)26 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)26 HashMap (java.util.HashMap)23 Configuration (org.apache.hadoop.conf.Configuration)21 GenericType (com.sun.jersey.api.client.GenericType)14 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)12 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)10 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)9