Search in sources :

Example 11 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class ApplicationEntityReader method createFilterListForColsOfInfoFamily.

/**
   * Creates a filter list which indicates that only some of the column
   * qualifiers in the info column family will be returned in result.
   *
   * @return filter list.
   * @throws IOException if any problem occurs while creating filter list.
   */
private FilterList createFilterListForColsOfInfoFamily() throws IOException {
    FilterList infoFamilyColsFilter = new FilterList(Operator.MUST_PASS_ONE);
    // Add filters for each column in entity table.
    updateFixedColumns(infoFamilyColsFilter);
    EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
    // with INFO column prefix.
    if (hasField(fieldsToRetrieve, Field.INFO)) {
        infoFamilyColsFilter.addFilter(TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL, ApplicationColumnPrefix.INFO));
    }
    TimelineFilterList relatesTo = getFilters().getRelatesTo();
    if (hasField(fieldsToRetrieve, Field.RELATES_TO)) {
        // If RELATES_TO field has to be retrieved, add a filter for fetching
        // columns with RELATES_TO column prefix.
        infoFamilyColsFilter.addFilter(TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL, ApplicationColumnPrefix.RELATES_TO));
    } else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) {
        // Even if fields to retrieve does not contain RELATES_TO, we still
        // need to have a filter to fetch some of the column qualifiers if
        // relatesTo filters are specified. relatesTo filters will then be
        // matched after fetching rows from HBase.
        Set<String> relatesToCols = TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo);
        infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(ApplicationColumnPrefix.RELATES_TO, relatesToCols));
    }
    TimelineFilterList isRelatedTo = getFilters().getIsRelatedTo();
    if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
        // If IS_RELATED_TO field has to be retrieved, add a filter for fetching
        // columns with IS_RELATED_TO column prefix.
        infoFamilyColsFilter.addFilter(TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL, ApplicationColumnPrefix.IS_RELATED_TO));
    } else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) {
        // Even if fields to retrieve does not contain IS_RELATED_TO, we still
        // need to have a filter to fetch some of the column qualifiers if
        // isRelatedTo filters are specified. isRelatedTo filters will then be
        // matched after fetching rows from HBase.
        Set<String> isRelatedToCols = TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo);
        infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(ApplicationColumnPrefix.IS_RELATED_TO, isRelatedToCols));
    }
    TimelineFilterList eventFilters = getFilters().getEventFilters();
    if (hasField(fieldsToRetrieve, Field.EVENTS)) {
        // If EVENTS field has to be retrieved, add a filter for fetching columns
        // with EVENT column prefix.
        infoFamilyColsFilter.addFilter(TimelineFilterUtils.createHBaseQualifierFilter(CompareOp.EQUAL, ApplicationColumnPrefix.EVENT));
    } else if (eventFilters != null && !eventFilters.getFilterList().isEmpty()) {
        // Even if fields to retrieve does not contain EVENTS, we still need to
        // have a filter to fetch some of the column qualifiers on the basis of
        // event filters specified. Event filters will then be matched after
        // fetching rows from HBase.
        Set<String> eventCols = TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters);
        infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers(ApplicationColumnPrefix.EVENT, eventCols));
    }
    return infoFamilyColsFilter;
}
Also used : Field(org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field) EnumSet(java.util.EnumSet) Set(java.util.Set) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Example 12 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class ApplicationEntityReader method constructFilterListBasedOnFilters.

/**
   * This method is called only for multiple entity reads.
   */
@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
    // Filters here cannot be null for multiple entity reads as they are set in
    // augmentParams if null.
    TimelineEntityFilters filters = getFilters();
    FilterList listBasedOnFilters = new FilterList();
    // Create filter list based on created time range and add it to
    // listBasedOnFilters.
    long createdTimeBegin = filters.getCreatedTimeBegin();
    long createdTimeEnd = filters.getCreatedTimeEnd();
    if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createSingleColValueFiltersByRange(ApplicationColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
    }
    // Create filter list based on metric filters and add it to
    // listBasedOnFilters.
    TimelineFilterList metricFilters = filters.getMetricFilters();
    if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.METRIC, metricFilters));
    }
    // Create filter list based on config filters and add it to
    // listBasedOnFilters.
    TimelineFilterList configFilters = filters.getConfigFilters();
    if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.CONFIG, configFilters));
    }
    // Create filter list based on info filters and add it to listBasedOnFilters
    TimelineFilterList infoFilters = filters.getInfoFilters();
    if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(ApplicationColumnPrefix.INFO, infoFilters));
    }
    return listBasedOnFilters;
}
Also used : TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Example 13 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class ApplicationEntityReader method getResults.

@Override
protected ResultScanner getResults(Configuration hbaseConf, Connection conn, FilterList filterList) throws IOException {
    Scan scan = new Scan();
    TimelineReaderContext context = getContext();
    // Whether or not flowRunID is null doesn't matter, the
    // ApplicationRowKeyPrefix will do the right thing.
    RowKeyPrefix<ApplicationRowKey> applicationRowKeyPrefix = new ApplicationRowKeyPrefix(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId());
    scan.setRowPrefixFilter(applicationRowKeyPrefix.getRowKeyPrefix());
    FilterList newList = new FilterList();
    newList.addFilter(new PageFilter(getFilters().getLimit()));
    if (filterList != null && !filterList.getFilters().isEmpty()) {
        newList.addFilter(filterList);
    }
    scan.setFilter(newList);
    scan.setMaxVersions(getDataToRetrieve().getMetricsLimit());
    return getTable().getResultScanner(hbaseConf, conn, scan);
}
Also used : ApplicationRowKeyPrefix(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) ApplicationRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey) Scan(org.apache.hadoop.hbase.client.Scan) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) PageFilter(org.apache.hadoop.hbase.filter.PageFilter)

Example 14 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class ApplicationEntityReader method constructFilterListBasedOnFields.

@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
    if (!needCreateFilterListBasedOnFields()) {
        // Fetch all the columns. No need of a filter.
        return null;
    }
    FilterList listBasedOnFields = new FilterList(Operator.MUST_PASS_ONE);
    FilterList infoColFamilyList = new FilterList();
    // By default fetch everything in INFO column family.
    FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ApplicationColumnFamily.INFO.getBytes()));
    infoColFamilyList.addFilter(infoColumnFamily);
    if (!isSingleEntityRead() && fetchPartialColsFromInfoFamily()) {
        // We can fetch only some of the columns from info family.
        infoColFamilyList.addFilter(createFilterListForColsOfInfoFamily());
    } else {
        // Exclude column prefixes in info column family which are not required
        // based on fields to retrieve.
        excludeFieldsFromInfoColFamily(infoColFamilyList);
    }
    listBasedOnFields.addFilter(infoColFamilyList);
    updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
    return listBasedOnFields;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 15 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class FlowRunEntityReader method constructFilterListBasedOnFilters.

protected FilterList constructFilterListBasedOnFilters() throws IOException {
    FilterList listBasedOnFilters = new FilterList();
    // Filter based on created time range.
    Long createdTimeBegin = getFilters().getCreatedTimeBegin();
    Long createdTimeEnd = getFilters().getCreatedTimeEnd();
    if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createSingleColValueFiltersByRange(FlowRunColumn.MIN_START_TIME, createdTimeBegin, createdTimeEnd));
    }
    // Filter based on metric filters.
    TimelineFilterList metricFilters = getFilters().getMetricFilters();
    if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricFilters));
    }
    return listBasedOnFilters;
}
Also used : TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)68 Filter (org.apache.hadoop.hbase.filter.Filter)36 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)9 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 ArrayList (java.util.ArrayList)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 IOException (java.io.IOException)6 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4