Search in sources :

Example 6 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class TimelineServiceV1Publisher method appACLsUpdated.

@SuppressWarnings("unchecked")
@Override
public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) {
    TimelineEntity entity = createApplicationEntity(app.getApplicationId());
    TimelineEvent tEvent = new TimelineEvent();
    Map<String, Object> entityInfo = new HashMap<String, Object>();
    entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, (appViewACLs == null) ? "" : appViewACLs);
    entity.setOtherInfo(entityInfo);
    tEvent.setEventType(ApplicationMetricsConstants.ACLS_UPDATED_EVENT_TYPE);
    tEvent.setTimestamp(updatedTime);
    entity.addEvent(tEvent);
    getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 7 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class TimelineServiceV1Publisher method appAttemptFinished.

@SuppressWarnings("unchecked")
@Override
public void appAttemptFinished(RMAppAttempt appAttempt, RMAppAttemptState appAttemtpState, RMApp app, long finishedTime) {
    TimelineEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId());
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(finishedTime);
    Map<String, Object> eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, appAttempt.getTrackingUrl());
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, appAttempt.getOriginalTrackingUrl());
    eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, appAttempt.getDiagnostics());
    eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, app.getFinalApplicationStatus().toString());
    eventInfo.put(AppAttemptMetricsConstants.STATE_INFO, RMServerUtils.createApplicationAttemptState(appAttemtpState).toString());
    if (appAttempt.getMasterContainer() != null) {
        eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, appAttempt.getMasterContainer().getId().toString());
    }
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 8 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class RollingLevelDBTimelineStore method putEntities.

/**
   * Put a single entity. If there is an error, add a TimelinePutError to the
   * given response.
   *
   * @param entityUpdates
   *          a map containing all the scheduled writes for this put to the
   *          entity db
   * @param indexUpdates
   *          a map containing all the scheduled writes for this put to the
   *          index db
   */
private long putEntities(TreeMap<Long, RollingWriteBatch> entityUpdates, TreeMap<Long, RollingWriteBatch> indexUpdates, TimelineEntity entity, TimelinePutResponse response) {
    long putCount = 0;
    List<EntityIdentifier> relatedEntitiesWithoutStartTimes = new ArrayList<EntityIdentifier>();
    byte[] revStartTime = null;
    Map<String, Set<Object>> primaryFilters = null;
    try {
        List<TimelineEvent> events = entity.getEvents();
        // look up the start time for the entity
        Long startTime = getAndSetStartTime(entity.getEntityId(), entity.getEntityType(), entity.getStartTime(), events);
        if (startTime == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_START_TIME);
            response.addError(error);
            return putCount;
        }
        // Must have a domain
        if (StringUtils.isEmpty(entity.getDomainId())) {
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_DOMAIN);
            response.addError(error);
            return putCount;
        }
        revStartTime = writeReverseOrderedLong(startTime);
        long roundedStartTime = entitydb.computeCurrentCheckMillis(startTime);
        RollingWriteBatch rollingWriteBatch = entityUpdates.get(roundedStartTime);
        if (rollingWriteBatch == null) {
            DB db = entitydb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch writeBatch = db.createWriteBatch();
                rollingWriteBatch = new RollingWriteBatch(db, writeBatch);
                entityUpdates.put(roundedStartTime, rollingWriteBatch);
            }
        }
        if (rollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch writeBatch = rollingWriteBatch.getWriteBatch();
        // Save off the getBytes conversion to avoid unnecessary cost
        byte[] entityIdBytes = entity.getEntityId().getBytes(UTF_8);
        byte[] entityTypeBytes = entity.getEntityType().getBytes(UTF_8);
        byte[] domainIdBytes = entity.getDomainId().getBytes(UTF_8);
        // write entity marker
        byte[] markerKey = KeyBuilder.newInstance(3).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).getBytesForLookup();
        writeBatch.put(markerKey, EMPTY_BYTES);
        ++putCount;
        // write domain id entry
        byte[] domainkey = KeyBuilder.newInstance(4).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(DOMAIN_ID_COLUMN).getBytes();
        writeBatch.put(domainkey, domainIdBytes);
        ++putCount;
        // write event entries
        if (events != null) {
            for (TimelineEvent event : events) {
                byte[] revts = writeReverseOrderedLong(event.getTimestamp());
                byte[] key = KeyBuilder.newInstance().add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(EVENTS_COLUMN).add(revts).add(event.getEventType().getBytes(UTF_8)).getBytes();
                byte[] value = fstConf.asByteArray(event.getEventInfo());
                writeBatch.put(key, value);
                ++putCount;
            }
        }
        // write primary filter entries
        primaryFilters = entity.getPrimaryFilters();
        if (primaryFilters != null) {
            for (Entry<String, Set<Object>> primaryFilter : primaryFilters.entrySet()) {
                for (Object primaryFilterValue : primaryFilter.getValue()) {
                    byte[] key = KeyBuilder.newInstance(6).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(PRIMARY_FILTERS_COLUMN).add(primaryFilter.getKey()).add(fstConf.asByteArray(primaryFilterValue)).getBytes();
                    writeBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }
        // write other info entries
        Map<String, Object> otherInfo = entity.getOtherInfo();
        if (otherInfo != null) {
            for (Entry<String, Object> info : otherInfo.entrySet()) {
                byte[] key = KeyBuilder.newInstance(5).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(OTHER_INFO_COLUMN).add(info.getKey()).getBytes();
                byte[] value = fstConf.asByteArray(info.getValue());
                writeBatch.put(key, value);
                ++putCount;
            }
        }
        // write related entity entries
        Map<String, Set<String>> relatedEntities = entity.getRelatedEntities();
        if (relatedEntities != null) {
            for (Entry<String, Set<String>> relatedEntityList : relatedEntities.entrySet()) {
                String relatedEntityType = relatedEntityList.getKey();
                for (String relatedEntityId : relatedEntityList.getValue()) {
                    // look up start time of related entity
                    Long relatedStartTimeLong = getStartTimeLong(relatedEntityId, relatedEntityType);
                    // delay writing the related entity if no start time is found
                    if (relatedStartTimeLong == null) {
                        relatedEntitiesWithoutStartTimes.add(new EntityIdentifier(relatedEntityId, relatedEntityType));
                        continue;
                    }
                    byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedStartTimeLong);
                    long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
                    RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
                    if (relatedRollingWriteBatch == null) {
                        DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                        if (db != null) {
                            WriteBatch relatedWriteBatch = db.createWriteBatch();
                            relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                            entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                        }
                    }
                    if (relatedRollingWriteBatch == null) {
                        // if no start time is found, add an error and return
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                        response.addError(error);
                        continue;
                    }
                    // This is the existing entity
                    byte[] relatedDomainIdBytes = relatedRollingWriteBatch.getDB().get(createDomainIdKey(relatedEntityId, relatedEntityType, relatedEntityStartTime));
                    // The timeline data created by the server before 2.6 won't have
                    // the domain field. We assume this timeline data is in the
                    // default timeline domain.
                    String domainId = null;
                    if (relatedDomainIdBytes == null) {
                        domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
                    } else {
                        domainId = new String(relatedDomainIdBytes, UTF_8);
                    }
                    if (!domainId.equals(entity.getDomainId())) {
                        // in this case the entity will be put, but the relation will be
                        // ignored
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
                        response.addError(error);
                        continue;
                    }
                    // write "forward" entry (related entity -> entity)
                    byte[] key = createRelatedEntityKey(relatedEntityId, relatedEntityType, relatedEntityStartTime, entity.getEntityId(), entity.getEntityType());
                    WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
                    relatedWriteBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }
        // write index entities
        RollingWriteBatch indexRollingWriteBatch = indexUpdates.get(roundedStartTime);
        if (indexRollingWriteBatch == null) {
            DB db = indexdb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch indexWriteBatch = db.createWriteBatch();
                indexRollingWriteBatch = new RollingWriteBatch(db, indexWriteBatch);
                indexUpdates.put(roundedStartTime, indexRollingWriteBatch);
            }
        }
        if (indexRollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch indexWriteBatch = indexRollingWriteBatch.getWriteBatch();
        putCount += writePrimaryFilterEntries(indexWriteBatch, primaryFilters, markerKey, EMPTY_BYTES);
    } catch (IOException e) {
        LOG.error("Error putting entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
        TimelinePutError error = new TimelinePutError();
        error.setEntityId(entity.getEntityId());
        error.setEntityType(entity.getEntityType());
        error.setErrorCode(TimelinePutError.IO_EXCEPTION);
        response.addError(error);
    }
    for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
        try {
            Long relatedEntityStartAndInsertTime = getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(), readReverseOrderedLong(revStartTime, 0), null);
            if (relatedEntityStartAndInsertTime == null) {
                throw new IOException("Error setting start time for related entity");
            }
            long relatedStartTimeLong = relatedEntityStartAndInsertTime;
            long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
            RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
            if (relatedRollingWriteBatch == null) {
                DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                if (db != null) {
                    WriteBatch relatedWriteBatch = db.createWriteBatch();
                    relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                    entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                }
            }
            if (relatedRollingWriteBatch == null) {
                // if no start time is found, add an error and return
                TimelinePutError error = new TimelinePutError();
                error.setEntityId(entity.getEntityId());
                error.setEntityType(entity.getEntityType());
                error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                response.addError(error);
                continue;
            }
            WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
            byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedEntityStartAndInsertTime);
            // This is the new entity, the domain should be the same
            byte[] key = createDomainIdKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime);
            relatedWriteBatch.put(key, entity.getDomainId().getBytes(UTF_8));
            ++putCount;
            relatedWriteBatch.put(createRelatedEntityKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime, entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
            ++putCount;
            relatedWriteBatch.put(createEntityMarkerKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime), EMPTY_BYTES);
            ++putCount;
        } catch (IOException e) {
            LOG.error("Error putting related entity " + relatedEntity.getId() + " of type " + relatedEntity.getType() + " for entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.IO_EXCEPTION);
            response.addError(error);
        }
    }
    return putCount;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) SortedSet(java.util.SortedSet) EnumSet(java.util.EnumSet) Set(java.util.Set) RollingWriteBatch(org.apache.hadoop.yarn.server.timeline.RollingLevelDB.RollingWriteBatch) ArrayList(java.util.ArrayList) IOException(java.io.IOException) TimelinePutError(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) RollingWriteBatch(org.apache.hadoop.yarn.server.timeline.RollingLevelDB.RollingWriteBatch) WriteBatch(org.iq80.leveldb.WriteBatch) DB(org.iq80.leveldb.DB)

Example 9 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class RollingLevelDBTimelineStore method getEntityTimelines.

@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventType) throws IOException {
    TimelineEvents events = new TimelineEvents();
    if (entityIds == null || entityIds.isEmpty()) {
        return events;
    }
    // create a lexicographically-ordered map from start time to entities
    Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], List<EntityIdentifier>>(new Comparator<byte[]>() {

        @Override
        public int compare(byte[] o1, byte[] o2) {
            return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
        }
    });
    DBIterator iterator = null;
    try {
        // skip entities with no start time
        for (String entityId : entityIds) {
            byte[] startTime = getStartTime(entityId, entityType);
            if (startTime != null) {
                List<EntityIdentifier> entities = startTimeMap.get(startTime);
                if (entities == null) {
                    entities = new ArrayList<EntityIdentifier>();
                    startTimeMap.put(startTime, entities);
                }
                entities.add(new EntityIdentifier(entityId, entityType));
            }
        }
        for (Entry<byte[], List<EntityIdentifier>> entry : startTimeMap.entrySet()) {
            // look up the events matching the given parameters (limit,
            // start time, end time, event types) for entities whose start times
            // were found and add the entities to the return list
            byte[] revStartTime = entry.getKey();
            for (EntityIdentifier entityIdentifier : entry.getValue()) {
                EventsOfOneEntity entity = new EventsOfOneEntity();
                entity.setEntityId(entityIdentifier.getId());
                entity.setEntityType(entityType);
                events.addEvent(entity);
                KeyBuilder kb = KeyBuilder.newInstance().add(entityType).add(revStartTime).add(entityIdentifier.getId()).add(EVENTS_COLUMN);
                byte[] prefix = kb.getBytesForLookup();
                if (windowEnd == null) {
                    windowEnd = Long.MAX_VALUE;
                }
                byte[] revts = writeReverseOrderedLong(windowEnd);
                kb.add(revts);
                byte[] first = kb.getBytesForLookup();
                byte[] last = null;
                if (windowStart != null) {
                    last = KeyBuilder.newInstance().add(prefix).add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
                }
                if (limit == null) {
                    limit = DEFAULT_LIMIT;
                }
                DB db = entitydb.getDBForStartTime(readReverseOrderedLong(revStartTime, 0));
                if (db == null) {
                    continue;
                }
                iterator = db.iterator();
                for (iterator.seek(first); entity.getEvents().size() < limit && iterator.hasNext(); iterator.next()) {
                    byte[] key = iterator.peekNext().getKey();
                    if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
                        break;
                    }
                    TimelineEvent event = getEntityEvent(eventType, key, prefix.length, iterator.peekNext().getValue());
                    if (event != null) {
                        entity.addEvent(event);
                    }
                }
            }
        }
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
    return events;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) EventsOfOneEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity) TimelineEvents(org.apache.hadoop.yarn.api.records.timeline.TimelineEvents) TreeMap(java.util.TreeMap) DBIterator(org.iq80.leveldb.DBIterator) KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder) List(java.util.List) ArrayList(java.util.ArrayList) DB(org.iq80.leveldb.DB)

Example 10 with TimelineEvent

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.

the class TestApplicationHistoryManagerOnTimelineStore method createAppAttemptTimelineEntity.

private static TimelineEntity createAppAttemptTimelineEntity(ApplicationAttemptId appAttemptId) {
    TimelineEntity entity = new TimelineEntity();
    entity.setEntityType(AppAttemptMetricsConstants.ENTITY_TYPE);
    entity.setEntityId(appAttemptId.toString());
    entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
    entity.addPrimaryFilter(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appAttemptId.getApplicationId().toString());
    entity.addPrimaryFilter(TimelineStore.SystemFilter.ENTITY_OWNER.toString(), "yarn");
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 1L);
    Map<String, Object> eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, "test tracking url");
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, "test original tracking url");
    eventInfo.put(AppAttemptMetricsConstants.HOST_INFO, "test host");
    eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_INFO, 100);
    eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, ContainerId.newContainerId(appAttemptId, 1));
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    tEvent = new TimelineEvent();
    tEvent.setEventType(AppAttemptMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(Integer.MAX_VALUE + 2L);
    eventInfo = new HashMap<String, Object>();
    eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, "test tracking url");
    eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, "test original tracking url");
    eventInfo.put(AppAttemptMetricsConstants.DIAGNOSTICS_INFO, "test diagnostics info");
    eventInfo.put(AppAttemptMetricsConstants.FINAL_STATUS_INFO, FinalApplicationStatus.UNDEFINED.toString());
    eventInfo.put(AppAttemptMetricsConstants.STATE_INFO, YarnApplicationAttemptState.FINISHED.toString());
    tEvent.setEventInfo(eventInfo);
    entity.addEvent(tEvent);
    return entity;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Aggregations

TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)44 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)32 HashMap (java.util.HashMap)19 IOException (java.io.IOException)10 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)7 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)7 Map (java.util.Map)6 Test (org.junit.Test)6 LinkedHashMap (java.util.LinkedHashMap)5 ClientHandlerException (com.sun.jersey.api.client.ClientHandlerException)4 ArrayList (java.util.ArrayList)4 EnumSet (java.util.EnumSet)4 HashSet (java.util.HashSet)4 Set (java.util.Set)4 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)4 TimelineEvents (org.apache.hadoop.yarn.api.records.timeline.TimelineEvents)4 SortedSet (java.util.SortedSet)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)3 TimelinePutError (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError)3 ClientResponse (com.sun.jersey.api.client.ClientResponse)2