Search in sources :

Example 11 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class TimelineServiceV1Publisher method appAttemptFinished.

@SuppressWarnings("unchecked")
@Override
public void appAttemptFinished(RMAppAttempt appAttempt, RMAppAttemptState appAttemtpState, RMApp app, long finishedTime) {
    TimelineEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId());
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(finishedTime);
    Map<String, Object> eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, appAttempt.getTrackingUrl());
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, appAttempt.getOriginalTrackingUrl());
    eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, appAttempt.getDiagnostics());
    eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, app.getFinalApplicationStatus().toString());
    eventInfo.put(AppAttemptMetricsConstants.STATE_INFO, RMServerUtils.createApplicationAttemptState(appAttemtpState).toString());
    if (appAttempt.getMasterContainer() != null) {
        eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, appAttempt.getMasterContainer().getId().toString());
    }
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 12 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class ApplicationHistoryManagerOnTimelineStore method convertToApplicationAttemptReport.

private static ApplicationAttemptReport convertToApplicationAttemptReport(TimelineEntity entity) {
    String host = null;
    int rpcPort = -1;
    ContainerId amContainerId = null;
    String trackingUrl = null;
    String originalTrackingUrl = null;
    String diagnosticsInfo = null;
    YarnApplicationAttemptState state = null;
    List<TimelineEvent> events = entity.getEvents();
    if (events != null) {
        for (TimelineEvent event : events) {
            if (event.getEventType().equals(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE)) {
                Map<String, Object> eventInfo = event.getEventInfo();
                if (eventInfo == null) {
                    continue;
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.HOST_INFO)) {
                    host = eventInfo.get(AppAttemptMetricsConstants.HOST_INFO).toString();
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.RPC_PORT_INFO)) {
                    rpcPort = (Integer) eventInfo.get(AppAttemptMetricsConstants.RPC_PORT_INFO);
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)) {
                    amContainerId = ContainerId.fromString(eventInfo.get(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO).toString());
                }
            } else if (event.getEventType().equals(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE)) {
                Map<String, Object> eventInfo = event.getEventInfo();
                if (eventInfo == null) {
                    continue;
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.TRACKING_URL_INFO)) {
                    trackingUrl = eventInfo.get(AppAttemptMetricsConstants.TRACKING_URL_INFO).toString();
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO)) {
                    originalTrackingUrl = eventInfo.get(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO).toString();
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.DIAGNOSTICS_INFO)) {
                    diagnosticsInfo = eventInfo.get(AppAttemptMetricsConstants.DIAGNOSTICS_INFO).toString();
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.STATE_INFO)) {
                    state = YarnApplicationAttemptState.valueOf(eventInfo.get(AppAttemptMetricsConstants.STATE_INFO).toString());
                }
                if (eventInfo.containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)) {
                    amContainerId = ContainerId.fromString(eventInfo.get(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO).toString());
                }
            }
        }
    }
    return ApplicationAttemptReport.newInstance(ApplicationAttemptId.fromString(entity.getEntityId()), host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, state, amContainerId);
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) YarnApplicationAttemptState(org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState)

Example 13 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class ApplicationHistoryManagerOnTimelineStore method convertToContainerReport.

private static ContainerReport convertToContainerReport(TimelineEntity entity, String serverHttpAddress, String user) {
    int allocatedMem = 0;
    int allocatedVcore = 0;
    String allocatedHost = null;
    int allocatedPort = -1;
    int allocatedPriority = 0;
    long createdTime = 0;
    long finishedTime = 0;
    String diagnosticsInfo = null;
    int exitStatus = ContainerExitStatus.INVALID;
    ContainerState state = null;
    String nodeHttpAddress = null;
    Map<String, Object> entityInfo = entity.getOtherInfo();
    if (entityInfo != null) {
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO)) {
            allocatedMem = (Integer) entityInfo.get(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO);
        }
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_VCORE_INFO)) {
            allocatedVcore = (Integer) entityInfo.get(ContainerMetricsConstants.ALLOCATED_VCORE_INFO);
        }
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_HOST_INFO)) {
            allocatedHost = entityInfo.get(ContainerMetricsConstants.ALLOCATED_HOST_INFO).toString();
        }
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_PORT_INFO)) {
            allocatedPort = (Integer) entityInfo.get(ContainerMetricsConstants.ALLOCATED_PORT_INFO);
        }
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO)) {
            allocatedPriority = (Integer) entityInfo.get(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO);
        }
        if (entityInfo.containsKey(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO)) {
            nodeHttpAddress = (String) entityInfo.get(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO);
        }
    }
    List<TimelineEvent> events = entity.getEvents();
    if (events != null) {
        for (TimelineEvent event : events) {
            if (event.getEventType().equals(ContainerMetricsConstants.CREATED_EVENT_TYPE)) {
                createdTime = event.getTimestamp();
            } else if (event.getEventType().equals(ContainerMetricsConstants.FINISHED_EVENT_TYPE)) {
                finishedTime = event.getTimestamp();
                Map<String, Object> eventInfo = event.getEventInfo();
                if (eventInfo == null) {
                    continue;
                }
                if (eventInfo.containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO)) {
                    diagnosticsInfo = eventInfo.get(ContainerMetricsConstants.DIAGNOSTICS_INFO).toString();
                }
                if (eventInfo.containsKey(ContainerMetricsConstants.EXIT_STATUS_INFO)) {
                    exitStatus = (Integer) eventInfo.get(ContainerMetricsConstants.EXIT_STATUS_INFO);
                }
                if (eventInfo.containsKey(ContainerMetricsConstants.STATE_INFO)) {
                    state = ContainerState.valueOf(eventInfo.get(ContainerMetricsConstants.STATE_INFO).toString());
                }
            }
        }
    }
    ContainerId containerId = ContainerId.fromString(entity.getEntityId());
    String logUrl = null;
    NodeId allocatedNode = null;
    if (allocatedHost != null) {
        allocatedNode = NodeId.newInstance(allocatedHost, allocatedPort);
        logUrl = WebAppUtils.getAggregatedLogURL(serverHttpAddress, allocatedNode.toString(), containerId.toString(), containerId.toString(), user);
    }
    return ContainerReport.newInstance(ContainerId.fromString(entity.getEntityId()), Resource.newInstance(allocatedMem, allocatedVcore), allocatedNode, Priority.newInstance(allocatedPriority), createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state, nodeHttpAddress);
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) NodeId(org.apache.hadoop.yarn.api.records.NodeId) ContainerState(org.apache.hadoop.yarn.api.records.ContainerState) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 14 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class KeyValueBasedTimelineStore method put.

@Override
public synchronized TimelinePutResponse put(TimelineEntities data) {
    TimelinePutResponse response = new TimelinePutResponse();
    if (getServiceStopped()) {
        LOG.info("Service stopped, return null for the storage");
        TimelinePutError error = new TimelinePutError();
        error.setErrorCode(TimelinePutError.IO_EXCEPTION);
        response.addError(error);
        return response;
    }
    for (TimelineEntity entity : data.getEntities()) {
        EntityIdentifier entityId = new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
        // store entity info in memory
        TimelineEntity existingEntity = entities.get(entityId);
        boolean needsPut = false;
        if (existingEntity == null) {
            existingEntity = new TimelineEntity();
            existingEntity.setEntityId(entity.getEntityId());
            existingEntity.setEntityType(entity.getEntityType());
            existingEntity.setStartTime(entity.getStartTime());
            if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
                TimelinePutError error = new TimelinePutError();
                error.setEntityId(entityId.getId());
                error.setEntityType(entityId.getType());
                error.setErrorCode(TimelinePutError.NO_DOMAIN);
                response.addError(error);
                continue;
            }
            existingEntity.setDomainId(entity.getDomainId());
            // insert a new entity to the storage, update insert time map
            entityInsertTimes.put(entityId, System.currentTimeMillis());
            needsPut = true;
        }
        if (entity.getEvents() != null) {
            if (existingEntity.getEvents() == null) {
                existingEntity.setEvents(entity.getEvents());
            } else {
                existingEntity.addEvents(entity.getEvents());
            }
            Collections.sort(existingEntity.getEvents());
            needsPut = true;
        }
        // check startTime
        if (existingEntity.getStartTime() == null) {
            if (existingEntity.getEvents() == null || existingEntity.getEvents().isEmpty()) {
                TimelinePutError error = new TimelinePutError();
                error.setEntityId(entityId.getId());
                error.setEntityType(entityId.getType());
                error.setErrorCode(TimelinePutError.NO_START_TIME);
                response.addError(error);
                entities.remove(entityId);
                entityInsertTimes.remove(entityId);
                continue;
            } else {
                Long min = Long.MAX_VALUE;
                for (TimelineEvent e : entity.getEvents()) {
                    if (min > e.getTimestamp()) {
                        min = e.getTimestamp();
                    }
                }
                existingEntity.setStartTime(min);
                needsPut = true;
            }
        }
        if (entity.getPrimaryFilters() != null) {
            if (existingEntity.getPrimaryFilters() == null) {
                existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
            }
            for (Entry<String, Set<Object>> pf : entity.getPrimaryFilters().entrySet()) {
                for (Object pfo : pf.getValue()) {
                    existingEntity.addPrimaryFilter(pf.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(pfo));
                    needsPut = true;
                }
            }
        }
        if (entity.getOtherInfo() != null) {
            if (existingEntity.getOtherInfo() == null) {
                existingEntity.setOtherInfo(new HashMap<String, Object>());
            }
            for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
                existingEntity.addOtherInfo(info.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(info.getValue()));
                needsPut = true;
            }
        }
        if (needsPut) {
            entities.put(entityId, existingEntity);
        }
        // relate it to other entities
        if (entity.getRelatedEntities() == null) {
            continue;
        }
        for (Entry<String, Set<String>> partRelatedEntities : entity.getRelatedEntities().entrySet()) {
            if (partRelatedEntities == null) {
                continue;
            }
            for (String idStr : partRelatedEntities.getValue()) {
                EntityIdentifier relatedEntityId = new EntityIdentifier(idStr, partRelatedEntities.getKey());
                TimelineEntity relatedEntity = entities.get(relatedEntityId);
                if (relatedEntity != null) {
                    if (relatedEntity.getDomainId().equals(existingEntity.getDomainId())) {
                        relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
                        entities.put(relatedEntityId, relatedEntity);
                    } else {
                        // in this case the entity will be put, but the relation will be
                        // ignored
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityType(existingEntity.getEntityType());
                        error.setEntityId(existingEntity.getEntityId());
                        error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
                        response.addError(error);
                    }
                } else {
                    relatedEntity = new TimelineEntity();
                    relatedEntity.setEntityId(relatedEntityId.getId());
                    relatedEntity.setEntityType(relatedEntityId.getType());
                    relatedEntity.setStartTime(existingEntity.getStartTime());
                    relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
                    relatedEntity.setDomainId(existingEntity.getDomainId());
                    entities.put(relatedEntityId, relatedEntity);
                    entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
                }
            }
        }
    }
    return response;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) SortedSet(java.util.SortedSet) HashSet(java.util.HashSet) EnumSet(java.util.EnumSet) Set(java.util.Set) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) TimelinePutError(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError)

Example 15 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class RollingLevelDBTimelineStore method getEntity.

/**
   * Read entity from a db iterator. If no information is found in the specified
   * fields for this entity, return null.
   */
private static TimelineEntity getEntity(String entityId, String entityType, Long startTime, EnumSet<Field> fields, DBIterator iterator, byte[] prefix, int prefixlen) throws IOException {
    if (fields == null) {
        fields = EnumSet.allOf(Field.class);
    }
    TimelineEntity entity = new TimelineEntity();
    boolean events = false;
    boolean lastEvent = false;
    if (fields.contains(Field.EVENTS)) {
        events = true;
    } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
        lastEvent = true;
    } else {
        entity.setEvents(null);
    }
    boolean relatedEntities = false;
    if (fields.contains(Field.RELATED_ENTITIES)) {
        relatedEntities = true;
    } else {
        entity.setRelatedEntities(null);
    }
    boolean primaryFilters = false;
    if (fields.contains(Field.PRIMARY_FILTERS)) {
        primaryFilters = true;
    } else {
        entity.setPrimaryFilters(null);
    }
    boolean otherInfo = false;
    if (fields.contains(Field.OTHER_INFO)) {
        otherInfo = true;
    } else {
        entity.setOtherInfo(null);
    }
    // of a requested field
    for (; iterator.hasNext(); iterator.next()) {
        byte[] key = iterator.peekNext().getKey();
        if (!prefixMatches(prefix, prefixlen, key)) {
            break;
        }
        if (key.length == prefixlen) {
            continue;
        }
        if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
            if (primaryFilters) {
                addPrimaryFilter(entity, key, prefixlen + PRIMARY_FILTERS_COLUMN.length);
            }
        } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
            if (otherInfo) {
                entity.addOtherInfo(parseRemainingKey(key, prefixlen + OTHER_INFO_COLUMN.length), fstConf.asObject(iterator.peekNext().getValue()));
            }
        } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
            if (relatedEntities) {
                addRelatedEntity(entity, key, prefixlen + RELATED_ENTITIES_COLUMN.length);
            }
        } else if (key[prefixlen] == EVENTS_COLUMN[0]) {
            if (events || (lastEvent && entity.getEvents().size() == 0)) {
                TimelineEvent event = getEntityEvent(null, key, prefixlen + EVENTS_COLUMN.length, iterator.peekNext().getValue());
                if (event != null) {
                    entity.addEvent(event);
                }
            }
        } else if (key[prefixlen] == DOMAIN_ID_COLUMN[0]) {
            byte[] v = iterator.peekNext().getValue();
            String domainId = new String(v, UTF_8);
            entity.setDomainId(domainId);
        } else {
            LOG.warn(String.format("Found unexpected column for entity %s of " + "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
        }
    }
    entity.setEntityId(entityId);
    entity.setEntityType(entityType);
    entity.setStartTime(startTime);
    return entity;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Aggregations

TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)44 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)32 HashMap (java.util.HashMap)19 IOException (java.io.IOException)10 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)7 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)7 Map (java.util.Map)6 Test (org.junit.Test)6 LinkedHashMap (java.util.LinkedHashMap)5 ClientHandlerException (com.sun.jersey.api.client.ClientHandlerException)4 ArrayList (java.util.ArrayList)4 EnumSet (java.util.EnumSet)4 HashSet (java.util.HashSet)4 Set (java.util.Set)4 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)4 TimelineEvents (org.apache.hadoop.yarn.api.records.timeline.TimelineEvents)4 SortedSet (java.util.SortedSet)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)3 TimelinePutError (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError)3 ClientResponse (com.sun.jersey.api.client.ClientResponse)2