Search in sources :

Example 11 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TestRollingLevelDBTimelineStore method testRelatingToOldEntityWithoutDomainId.

@Test
public void testRelatingToOldEntityWithoutDomainId() throws IOException {
    // New entity is put in the default domain
    TimelineEntity entityToStore = new TimelineEntity();
    entityToStore.setEntityType("NEW_ENTITY_TYPE_1");
    entityToStore.setEntityId("NEW_ENTITY_ID_1");
    entityToStore.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
    entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
    TimelineEntities entities = new TimelineEntities();
    entities.addEntity(entityToStore);
    store.put(entities);
    TimelineEntity entityToGet = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
    Assert.assertNotNull(entityToGet);
    Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
    Assert.assertEquals("NEW_ENTITY_TYPE_1", entityToGet.getRelatedEntities().keySet().iterator().next());
    Assert.assertEquals("NEW_ENTITY_ID_1", entityToGet.getRelatedEntities().values().iterator().next().iterator().next());
    // New entity is not put in the default domain
    entityToStore = new TimelineEntity();
    entityToStore.setEntityType("NEW_ENTITY_TYPE_2");
    entityToStore.setEntityId("NEW_ENTITY_ID_2");
    entityToStore.setDomainId("NON_DEFAULT");
    entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
    entities = new TimelineEntities();
    entities.addEntity(entityToStore);
    TimelinePutResponse response = store.put(entities);
    Assert.assertEquals(1, response.getErrors().size());
    Assert.assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
    entityToGet = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
    Assert.assertNotNull(entityToGet);
    Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
    // Still have one related entity
    Assert.assertEquals(1, entityToGet.getRelatedEntities().keySet().size());
    Assert.assertEquals(1, entityToGet.getRelatedEntities().values().iterator().next().size());
}
Also used : TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) Test(org.junit.Test)

Example 12 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class KeyValueBasedTimelineStore method getEntities.

@Override
public synchronized TimelineEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
    if (getServiceStopped()) {
        LOG.info("Service stopped, return null for the storage");
        return null;
    }
    if (limit == null) {
        limit = DEFAULT_LIMIT;
    }
    if (windowStart == null) {
        windowStart = Long.MIN_VALUE;
    }
    if (windowEnd == null) {
        windowEnd = Long.MAX_VALUE;
    }
    if (fields == null) {
        fields = EnumSet.allOf(Field.class);
    }
    Iterator<TimelineEntity> entityIterator = null;
    if (fromId != null) {
        TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId, entityType));
        if (firstEntity == null) {
            return new TimelineEntities();
        } else {
            entityIterator = entities.valueSetIterator(firstEntity);
        }
    }
    if (entityIterator == null) {
        entityIterator = entities.valueSetIterator();
    }
    List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
    while (entityIterator.hasNext()) {
        TimelineEntity entity = entityIterator.next();
        if (entitiesSelected.size() >= limit) {
            break;
        }
        if (!entity.getEntityType().equals(entityType)) {
            continue;
        }
        if (entity.getStartTime() <= windowStart) {
            continue;
        }
        if (entity.getStartTime() > windowEnd) {
            continue;
        }
        if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(entity.getEntityId(), entity.getEntityType())) > fromTs) {
            continue;
        }
        if (primaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
            continue;
        }
        if (secondaryFilters != null) {
            // AND logic
            boolean flag = true;
            for (NameValuePair secondaryFilter : secondaryFilters) {
                if (secondaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), secondaryFilter) && !KeyValueBasedTimelineStoreUtils.matchFilter(entity.getOtherInfo(), secondaryFilter)) {
                    flag = false;
                    break;
                }
            }
            if (!flag) {
                continue;
            }
        }
        if (entity.getDomainId() == null) {
            entity.setDomainId(DEFAULT_DOMAIN_ID);
        }
        if (checkAcl == null || checkAcl.check(entity)) {
            entitiesSelected.add(entity);
        }
    }
    List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
    for (TimelineEntity entitySelected : entitiesSelected) {
        entitiesToReturn.add(KeyValueBasedTimelineStoreUtils.maskFields(entitySelected, fields));
    }
    Collections.sort(entitiesToReturn);
    TimelineEntities entitiesWrapper = new TimelineEntities();
    entitiesWrapper.setEntities(entitiesToReturn);
    return entitiesWrapper;
}
Also used : TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) ArrayList(java.util.ArrayList) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 13 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class ApplicationHistoryManagerOnTimelineStore method getApplicationAttempts.

@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport> getApplicationAttempts(ApplicationId appId) throws YarnException, IOException {
    ApplicationReportExt app = getApplication(appId, ApplicationReportField.USER_AND_ACLS);
    checkAccess(app);
    TimelineEntities entities = timelineDataManager.getEntities(AppAttemptMetricsConstants.ENTITY_TYPE, new NameValuePair(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
    Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts = new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
    for (TimelineEntity entity : entities.getEntities()) {
        ApplicationAttemptReport appAttempt = convertToApplicationAttemptReport(entity);
        appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
    }
    return appAttempts;
}
Also used : NameValuePair(org.apache.hadoop.yarn.server.timeline.NameValuePair) Field(org.apache.hadoop.yarn.server.timeline.TimelineReader.Field) ApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptReport) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) LinkedHashMap(java.util.LinkedHashMap)

Example 14 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class ApplicationHistoryManagerOnTimelineStore method getContainers.

@Override
public Map<ContainerId, ContainerReport> getContainers(ApplicationAttemptId appAttemptId) throws YarnException, IOException {
    ApplicationReportExt app = getApplication(appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
    checkAccess(app);
    TimelineEntities entities = timelineDataManager.getEntities(ContainerMetricsConstants.ENTITY_TYPE, new NameValuePair(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, appAttemptId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
    Map<ContainerId, ContainerReport> containers = new LinkedHashMap<ContainerId, ContainerReport>();
    if (entities != null && entities.getEntities() != null) {
        for (TimelineEntity entity : entities.getEntities()) {
            ContainerReport container = convertToContainerReport(entity, serverHttpAddress, app.appReport.getUser());
            containers.put(container.getContainerId(), container);
        }
    }
    return containers;
}
Also used : NameValuePair(org.apache.hadoop.yarn.server.timeline.NameValuePair) Field(org.apache.hadoop.yarn.server.timeline.TimelineReader.Field) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ContainerReport(org.apache.hadoop.yarn.api.records.ContainerReport) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) LinkedHashMap(java.util.LinkedHashMap)

Example 15 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TimelineClientImpl method putTimelineDataInJSONFile.

/**
   * Put timeline data in a JSON file via command line.
   * 
   * @param path
   *          path to the timeline data JSON file
   * @param type
   *          the type of the timeline data in the JSON file
   */
private static void putTimelineDataInJSONFile(String path, String type) {
    File jsonFile = new File(path);
    if (!jsonFile.exists()) {
        LOG.error("File [" + jsonFile.getAbsolutePath() + "] doesn't exist");
        return;
    }
    YarnJacksonJaxbJsonProvider.configObjectMapper(MAPPER);
    TimelineEntities entities = null;
    TimelineDomains domains = null;
    try {
        if (type.equals(ENTITY_DATA_TYPE)) {
            entities = MAPPER.readValue(jsonFile, TimelineEntities.class);
        } else if (type.equals(DOMAIN_DATA_TYPE)) {
            domains = MAPPER.readValue(jsonFile, TimelineDomains.class);
        }
    } catch (Exception e) {
        LOG.error("Error when reading  " + e.getMessage());
        e.printStackTrace(System.err);
        return;
    }
    Configuration conf = new YarnConfiguration();
    TimelineClient client = TimelineClient.createTimelineClient();
    client.init(conf);
    client.start();
    try {
        if (UserGroupInformation.isSecurityEnabled() && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
            Token<TimelineDelegationTokenIdentifier> token = client.getDelegationToken(UserGroupInformation.getCurrentUser().getUserName());
            UserGroupInformation.getCurrentUser().addToken(token);
        }
        if (type.equals(ENTITY_DATA_TYPE)) {
            TimelinePutResponse response = client.putEntities(entities.getEntities().toArray(new TimelineEntity[entities.getEntities().size()]));
            if (response.getErrors().size() == 0) {
                LOG.info("Timeline entities are successfully put");
            } else {
                for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
                    LOG.error("TimelineEntity [" + error.getEntityType() + ":" + error.getEntityId() + "] is not successfully put. Error code: " + error.getErrorCode());
                }
            }
        } else if (type.equals(DOMAIN_DATA_TYPE) && domains != null) {
            boolean hasError = false;
            for (TimelineDomain domain : domains.getDomains()) {
                try {
                    client.putDomain(domain);
                } catch (Exception e) {
                    LOG.error("Error when putting domain " + domain.getId(), e);
                    hasError = true;
                }
            }
            if (!hasError) {
                LOG.info("Timeline domains are successfully put");
            }
        }
    } catch (RuntimeException e) {
        LOG.error("Error when putting the timeline data", e);
    } catch (Exception e) {
        LOG.error("Error when putting the timeline data", e);
    } finally {
        client.stop();
    }
}
Also used : TimelineDomains(org.apache.hadoop.yarn.api.records.timeline.TimelineDomains) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) TimelineDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TimelineDomain(org.apache.hadoop.yarn.api.records.timeline.TimelineDomain) File(java.io.File)

Aggregations

TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)43 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)30 Test (org.junit.Test)23 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)14 ClientResponse (com.sun.jersey.api.client.ClientResponse)9 WebResource (com.sun.jersey.api.client.WebResource)8 AdminACLsManager (org.apache.hadoop.yarn.security.AdminACLsManager)6 ArrayList (java.util.ArrayList)5 Set (java.util.Set)5 Path (org.apache.hadoop.fs.Path)5 IOException (java.io.IOException)4 HashMap (java.util.HashMap)4 HashSet (java.util.HashSet)4 Configuration (org.apache.hadoop.conf.Configuration)4 TimelineDomain (org.apache.hadoop.yarn.api.records.timeline.TimelineDomain)4 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)4 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)4 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)4 EnumSet (java.util.EnumSet)3 LinkedHashMap (java.util.LinkedHashMap)3