Search in sources :

Example 36 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class DequeueScanObserver method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
    Transaction tx = DequeueScanAttributes.getTx(scan);
    if (consumerConfig == null || tx == null) {
        return super.preScannerOpen(e, scan, s);
    }
    Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
    Filter existing = scan.getFilter();
    if (existing != null) {
        Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
        scan.setFilter(combined);
    } else {
        scan.setFilter(dequeueFilter);
    }
    return super.preScannerOpen(e, scan, s);
}
Also used : Transaction(org.apache.tephra.Transaction) Filter(org.apache.hadoop.hbase.filter.Filter) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 37 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.

the class Filters method combine.

/**
   * Adds {@code overrideFilter} on to {@code baseFilter}, if it exists, otherwise replaces it.
   */
public static Filter combine(Filter overrideFilter, Filter baseFilter) {
    if (baseFilter != null) {
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(baseFilter);
        filterList.addFilter(overrideFilter);
        return filterList;
    }
    return overrideFilter;
}
Also used : FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 38 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.

the class TimelineFilterUtils method createSingleColValueFiltersByRange.

/**
   * Create 2 HBase {@link SingleColumnValueFilter} filters for the specified
   * value range represented by start and end value and wraps them inside a
   * filter list. Start and end value should not be null.
   *
   * @param <T> Describes the type of column prefix.
   * @param column Column for which single column value filter is to be created.
   * @param startValue Start value.
   * @param endValue End value.
   * @return 2 single column value filters wrapped in a filter list.
   * @throws IOException if any problem is encountered while encoding value.
   */
public static <T> FilterList createSingleColValueFiltersByRange(Column<T> column, Object startValue, Object endValue) throws IOException {
    FilterList list = new FilterList();
    Filter singleColValFilterStart = createHBaseSingleColValueFilter(column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), column.getValueConverter().encodeValue(startValue), CompareOp.GREATER_OR_EQUAL, true);
    list.addFilter(singleColValFilterStart);
    Filter singleColValFilterEnd = createHBaseSingleColValueFilter(column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), column.getValueConverter().encodeValue(endValue), CompareOp.LESS_OR_EQUAL, true);
    list.addFilter(singleColValFilterEnd);
    return list;
}
Also used : FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 39 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class HMobStore method createScanner.

/**
   * Gets the MobStoreScanner or MobReversedStoreScanner. In these scanners, a additional seeks in
   * the mob files should be performed after the seek in HBase is done.
   */
@Override
protected KeyValueScanner createScanner(Scan scan, final NavigableSet<byte[]> targetCols, long readPt, KeyValueScanner scanner) throws IOException {
    if (scanner == null) {
        if (MobUtils.isRefOnlyScan(scan)) {
            Filter refOnlyFilter = new MobReferenceOnlyFilter();
            Filter filter = scan.getFilter();
            if (filter != null) {
                scan.setFilter(new FilterList(filter, refOnlyFilter));
            } else {
                scan.setFilter(refOnlyFilter);
            }
        }
        scanner = scan.isReversed() ? new ReversedMobStoreScanner(this, getScanInfo(), scan, targetCols, readPt) : new MobStoreScanner(this, getScanInfo(), scan, targetCols, readPt);
    }
    return scanner;
}
Also used : Filter(org.apache.hadoop.hbase.filter.Filter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 40 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class TestFromClientSide method testEmptyFilterList.

@Test
public void testEmptyFilterList() throws Exception {
    // Test Initialization.
    final TableName tableName = TableName.valueOf(name.getMethodName());
    Table table = TEST_UTIL.createTable(tableName, FAMILY);
    // Insert one row each region
    Put put = new Put(Bytes.toBytes("row"));
    put.addColumn(FAMILY, QUALIFIER, VALUE);
    table.put(put);
    List<Result> scanResults = new LinkedList<>();
    Scan scan = new Scan();
    scan.setFilter(new FilterList());
    try (ResultScanner scanner = table.getScanner(scan)) {
        for (Result r : scanner) {
            scanResults.add(r);
        }
    }
    assertEquals(1, scanResults.size());
    Get g = new Get(Bytes.toBytes("row"));
    g.setFilter(new FilterList());
    Result getResult = table.get(g);
    Result scanResult = scanResults.get(0);
    assertEquals(scanResult.rawCells().length, getResult.rawCells().length);
    for (int i = 0; i != scanResult.rawCells().length; ++i) {
        Cell scanCell = scanResult.rawCells()[i];
        Cell getCell = getResult.rawCells()[i];
        assertEquals(0, Bytes.compareTo(CellUtil.cloneRow(scanCell), CellUtil.cloneRow(getCell)));
        assertEquals(0, Bytes.compareTo(CellUtil.cloneFamily(scanCell), CellUtil.cloneFamily(getCell)));
        assertEquals(0, Bytes.compareTo(CellUtil.cloneQualifier(scanCell), CellUtil.cloneQualifier(getCell)));
        assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(scanCell), CellUtil.cloneValue(getCell)));
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) FilterList(org.apache.hadoop.hbase.filter.FilterList) Cell(org.apache.hadoop.hbase.Cell) LinkedList(java.util.LinkedList) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) Test(org.junit.Test)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)68 Filter (org.apache.hadoop.hbase.filter.Filter)36 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)9 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 ArrayList (java.util.ArrayList)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 IOException (java.io.IOException)6 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4