Search in sources :

Example 31 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class Filters method combine.

/**
   * Adds {@code overrideFilter} on to {@code baseFilter}, if it exists, otherwise replaces it.
   */
public static Filter combine(Filter overrideFilter, Filter baseFilter) {
    if (baseFilter != null) {
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(baseFilter);
        filterList.addFilter(overrideFilter);
        return filterList;
    }
    return overrideFilter;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 32 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class Filters method combine.

/**
   * Adds {@code overrideFilter} on to {@code baseFilter}, if it exists, otherwise replaces it.
   */
public static Filter combine(Filter overrideFilter, Filter baseFilter) {
    if (baseFilter != null) {
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(baseFilter);
        filterList.addFilter(overrideFilter);
        return filterList;
    }
    return overrideFilter;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 33 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hive by apache.

the class HBaseScanRange method setup.

public void setup(Scan scan, Configuration conf, boolean filterOnly) throws Exception {
    if (!filterOnly) {
        // Set the start and stop rows only if asked to
        if (startRow != null) {
            scan.setStartRow(startRow);
        }
        if (stopRow != null) {
            scan.setStopRow(stopRow);
        }
    }
    if (filterDescs.isEmpty()) {
        return;
    }
    if (filterDescs.size() == 1) {
        scan.setFilter(filterDescs.get(0).toFilter(conf));
        return;
    }
    List<Filter> filters = new ArrayList<Filter>();
    for (FilterDesc filter : filterDescs) {
        filters.add(filter.toFilter(conf));
    }
    scan.setFilter(new FilterList(filters));
}
Also used : Filter(org.apache.hadoop.hbase.filter.Filter) ArrayList(java.util.ArrayList) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 34 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cxf by apache.

the class HBaseQueryVisitor method createCompositeQuery.

private Filter createCompositeQuery(List<Filter> queries, boolean orCondition) {
    FilterList.Operator oper = orCondition ? FilterList.Operator.MUST_PASS_ONE : FilterList.Operator.MUST_PASS_ALL;
    FilterList list = new FilterList(oper);
    for (Filter query : queries) {
        list.addFilter(query);
    }
    return list;
}
Also used : Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 35 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project janusgraph by JanusGraph.

the class HBaseKeyColumnValueStore method getFilter.

public static Filter getFilter(SliceQuery query) {
    byte[] colStartBytes = query.getSliceStart().length() > 0 ? query.getSliceStart().as(StaticBuffer.ARRAY_FACTORY) : null;
    byte[] colEndBytes = query.getSliceEnd().length() > 0 ? query.getSliceEnd().as(StaticBuffer.ARRAY_FACTORY) : null;
    Filter filter = new ColumnRangeFilter(colStartBytes, true, colEndBytes, false);
    if (query.hasLimit()) {
        filter = new FilterList(FilterList.Operator.MUST_PASS_ALL, filter, new ColumnPaginationFilter(query.getLimit(), 0));
    }
    logger.debug("Generated HBase Filter {}", filter);
    return filter;
}
Also used : ColumnPaginationFilter(org.apache.hadoop.hbase.filter.ColumnPaginationFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ColumnRangeFilter(org.apache.hadoop.hbase.filter.ColumnRangeFilter) ColumnRangeFilter(org.apache.hadoop.hbase.filter.ColumnRangeFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) ColumnPaginationFilter(org.apache.hadoop.hbase.filter.ColumnPaginationFilter)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)68 Filter (org.apache.hadoop.hbase.filter.Filter)36 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)9 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 ArrayList (java.util.ArrayList)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 IOException (java.io.IOException)6 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4