Search in sources :

Example 61 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class DequeueScanObserver method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
    Transaction tx = DequeueScanAttributes.getTx(scan);
    if (consumerConfig == null || tx == null) {
        return super.preScannerOpen(e, scan, s);
    }
    Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
    Filter existing = scan.getFilter();
    if (existing != null) {
        Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
        scan.setFilter(combined);
    } else {
        scan.setFilter(dequeueFilter);
    }
    return super.preScannerOpen(e, scan, s);
}
Also used : Transaction(org.apache.tephra.Transaction) Filter(org.apache.hadoop.hbase.filter.Filter) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 62 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class Filters method combine.

/**
   * Adds {@code overrideFilter} on to {@code baseFilter}, if it exists, otherwise replaces it.
   */
public static Filter combine(Filter overrideFilter, Filter baseFilter) {
    if (baseFilter != null) {
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(baseFilter);
        filterList.addFilter(overrideFilter);
        return filterList;
    }
    return overrideFilter;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 63 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class DequeueScanObserver method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
    Transaction tx = DequeueScanAttributes.getTx(scan);
    if (consumerConfig == null || tx == null) {
        return super.preScannerOpen(e, scan, s);
    }
    Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
    Filter existing = scan.getFilter();
    if (existing != null) {
        Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
        scan.setFilter(combined);
    } else {
        scan.setFilter(dequeueFilter);
    }
    return super.preScannerOpen(e, scan, s);
}
Also used : Transaction(org.apache.tephra.Transaction) Filter(org.apache.hadoop.hbase.filter.Filter) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 64 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class DequeueScanObserver method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
    Transaction tx = DequeueScanAttributes.getTx(scan);
    if (consumerConfig == null || tx == null) {
        return super.preScannerOpen(e, scan, s);
    }
    Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
    Filter existing = scan.getFilter();
    if (existing != null) {
        Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
        scan.setFilter(combined);
    } else {
        scan.setFilter(dequeueFilter);
    }
    return super.preScannerOpen(e, scan, s);
}
Also used : Transaction(org.apache.tephra.Transaction) Filter(org.apache.hadoop.hbase.filter.Filter) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)64 Filter (org.apache.hadoop.hbase.filter.Filter)32 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)9 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)8 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4