Search in sources :

Example 21 with TimelineEntityFilters

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.

the class ApplicationEntityReader method constructFilterListBasedOnFilters.

/**
   * This method is called only for multiple entity reads.
   */
@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
    // Filters here cannot be null for multiple entity reads as they are set in
    // augmentParams if null.
    TimelineEntityFilters filters = getFilters();
    FilterList listBasedOnFilters = new FilterList();
    // Create filter list based on created time range and add it to
    // listBasedOnFilters.
    long createdTimeBegin = filters.getCreatedTimeBegin();
    long createdTimeEnd = filters.getCreatedTimeEnd();
    if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createSingleColValueFiltersByRange(ApplicationColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
    }
    // Create filter list based on metric filters and add it to
    // listBasedOnFilters.
    TimelineFilterList metricFilters = filters.getMetricFilters();
    if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.METRIC, metricFilters));
    }
    // Create filter list based on config filters and add it to
    // listBasedOnFilters.
    TimelineFilterList configFilters = filters.getConfigFilters();
    if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.CONFIG, configFilters));
    }
    // Create filter list based on info filters and add it to listBasedOnFilters
    TimelineFilterList infoFilters = filters.getInfoFilters();
    if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.INFO, infoFilters));
    }
    return listBasedOnFilters;
}
Also used : TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Example 22 with TimelineEntityFilters

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.

the class GenericEntityReader method parseEntity.

@Override
protected TimelineEntity parseEntity(Result result) throws IOException {
    if (result == null || result.isEmpty()) {
        return null;
    }
    TimelineEntity entity = new TimelineEntity();
    String entityType = EntityColumn.TYPE.readResult(result).toString();
    entity.setType(entityType);
    String entityId = EntityColumn.ID.readResult(result).toString();
    entity.setId(entityId);
    TimelineEntityFilters filters = getFilters();
    // fetch created time
    Long createdTime = (Long) EntityColumn.CREATED_TIME.readResult(result);
    entity.setCreatedTime(createdTime);
    EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
    // fetch is related to entities and match isRelatedTo filter. If isRelatedTo
    // filters do not match, entity would be dropped. We have to match filters
    // locally as relevant HBase filters to filter out rows on the basis of
    // isRelatedTo are not set in HBase scan.
    boolean checkIsRelatedTo = !isSingleEntityRead() && filters.getIsRelatedTo() != null && filters.getIsRelatedTo().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) {
        readRelationship(entity, result, EntityColumnPrefix.IS_RELATED_TO, true);
        if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity, filters.getIsRelatedTo())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
            entity.getIsRelatedToEntities().clear();
        }
    }
    // fetch relates to entities and match relatesTo filter. If relatesTo
    // filters do not match, entity would be dropped. We have to match filters
    // locally as relevant HBase filters to filter out rows on the basis of
    // relatesTo are not set in HBase scan.
    boolean checkRelatesTo = !isSingleEntityRead() && filters.getRelatesTo() != null && filters.getRelatesTo().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) {
        readRelationship(entity, result, EntityColumnPrefix.RELATES_TO, false);
        if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity, filters.getRelatesTo())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) {
            entity.getRelatesToEntities().clear();
        }
    }
    // fetch info if fieldsToRetrieve contains INFO or ALL.
    if (hasField(fieldsToRetrieve, Field.INFO)) {
        readKeyValuePairs(entity, result, EntityColumnPrefix.INFO, false);
    }
    // fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
    if (hasField(fieldsToRetrieve, Field.CONFIGS)) {
        readKeyValuePairs(entity, result, EntityColumnPrefix.CONFIG, true);
    }
    // fetch events and match event filters if they exist. If event filters do
    // not match, entity would be dropped. We have to match filters locally
    // as relevant HBase filters to filter out rows on the basis of events
    // are not set in HBase scan.
    boolean checkEvents = !isSingleEntityRead() && filters.getEventFilters() != null && filters.getEventFilters().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) {
        readEvents(entity, result, EntityColumnPrefix.EVENT);
        if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity, filters.getEventFilters())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.EVENTS)) {
            entity.getEvents().clear();
        }
    }
    // fetch metrics if fieldsToRetrieve contains METRICS or ALL.
    if (hasField(fieldsToRetrieve, Field.METRICS)) {
        readMetrics(entity, result, EntityColumnPrefix.METRIC);
    }
    return entity;
}
Also used : Field(org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Example 23 with TimelineEntityFilters

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.

the class GenericEntityReader method constructFilterListBasedOnFilters.

@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
    // Filters here cannot be null for multiple entity reads as they are set in
    // augmentParams if null.
    FilterList listBasedOnFilters = new FilterList();
    TimelineEntityFilters filters = getFilters();
    // Create filter list based on created time range and add it to
    // listBasedOnFilters.
    long createdTimeBegin = filters.getCreatedTimeBegin();
    long createdTimeEnd = filters.getCreatedTimeEnd();
    if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createSingleColValueFiltersByRange(EntityColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
    }
    // Create filter list based on metric filters and add it to
    // listBasedOnFilters.
    TimelineFilterList metricFilters = filters.getMetricFilters();
    if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.METRIC, metricFilters));
    }
    // Create filter list based on config filters and add it to
    // listBasedOnFilters.
    TimelineFilterList configFilters = filters.getConfigFilters();
    if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.CONFIG, configFilters));
    }
    // Create filter list based on info filters and add it to listBasedOnFilters
    TimelineFilterList infoFilters = filters.getInfoFilters();
    if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.INFO, infoFilters));
    }
    return listBasedOnFilters;
}
Also used : TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Example 24 with TimelineEntityFilters

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.

the class GenericEntityReader method fetchPartialColsFromInfoFamily.

/**
   * Check if we need to fetch only some of the columns based on event filters,
   * relatesto and isrelatedto from info family.
   *
   * @return true, if we need to fetch only some of the columns, false if we
   *         need to fetch all the columns under info column family.
   */
protected boolean fetchPartialColsFromInfoFamily() {
    EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
    TimelineEntityFilters filters = getFilters();
    return fetchPartialEventCols(filters.getEventFilters(), fieldsToRetrieve) || fetchPartialRelatesToCols(filters.getRelatesTo(), fieldsToRetrieve) || fetchPartialIsRelatedToCols(filters.getIsRelatedTo(), fieldsToRetrieve);
}
Also used : Field(org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)

Example 25 with TimelineEntityFilters

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testGetEntitiesWithLimit.

@Test
public void testGetEntitiesWithLimit() throws Exception {
    Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(2L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    // based on created time, descending.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_4")) {
            Assert.fail("Entity not sorted by created time");
        }
    }
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(3L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
    // Even though 2 entities out of 4 have same created time, one entity
    // is left out due to limit
    Assert.assertEquals(3, result.size());
}
Also used : TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Test(org.junit.Test)

Aggregations

TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)50 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)46 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)45 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)44 Test (org.junit.Test)44 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)28 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)10 Configuration (org.apache.hadoop.conf.Configuration)9 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)9 HashSet (java.util.HashSet)8 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)7 TimelineKeyValueFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter)7 TimelineKeyValuesFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter)7 HBaseTimelineReaderImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl)7 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)7 TimelineCompareFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter)6 TimelineExistsFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter)5 Connection (org.apache.hadoop.hbase.client.Connection)3 Result (org.apache.hadoop.hbase.client.Result)3 FlowActivityEntity (org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity)3