use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.
the class DequeueScanObserver method preScannerOpen.
@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
Transaction tx = DequeueScanAttributes.getTx(scan);
if (consumerConfig == null || tx == null) {
return super.preScannerOpen(e, scan, s);
}
Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
Filter existing = scan.getFilter();
if (existing != null) {
Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
scan.setFilter(combined);
} else {
scan.setFilter(dequeueFilter);
}
return super.preScannerOpen(e, scan, s);
}
use of org.apache.hadoop.hbase.filter.FilterList in project cdap by caskdata.
the class Filters method combine.
/**
* Adds {@code overrideFilter} on to {@code baseFilter}, if it exists, otherwise replaces it.
*/
public static Filter combine(Filter overrideFilter, Filter baseFilter) {
if (baseFilter != null) {
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
filterList.addFilter(baseFilter);
filterList.addFilter(overrideFilter);
return filterList;
}
return overrideFilter;
}
use of org.apache.hadoop.hbase.filter.FilterList in project hadoop by apache.
the class TimelineFilterUtils method createSingleColValueFiltersByRange.
/**
* Create 2 HBase {@link SingleColumnValueFilter} filters for the specified
* value range represented by start and end value and wraps them inside a
* filter list. Start and end value should not be null.
*
* @param <T> Describes the type of column prefix.
* @param column Column for which single column value filter is to be created.
* @param startValue Start value.
* @param endValue End value.
* @return 2 single column value filters wrapped in a filter list.
* @throws IOException if any problem is encountered while encoding value.
*/
public static <T> FilterList createSingleColValueFiltersByRange(Column<T> column, Object startValue, Object endValue) throws IOException {
FilterList list = new FilterList();
Filter singleColValFilterStart = createHBaseSingleColValueFilter(column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), column.getValueConverter().encodeValue(startValue), CompareOp.GREATER_OR_EQUAL, true);
list.addFilter(singleColValFilterStart);
Filter singleColValFilterEnd = createHBaseSingleColValueFilter(column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), column.getValueConverter().encodeValue(endValue), CompareOp.LESS_OR_EQUAL, true);
list.addFilter(singleColValFilterEnd);
return list;
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class HMobStore method createScanner.
/**
* Gets the MobStoreScanner or MobReversedStoreScanner. In these scanners, a additional seeks in
* the mob files should be performed after the seek in HBase is done.
*/
@Override
protected KeyValueScanner createScanner(Scan scan, final NavigableSet<byte[]> targetCols, long readPt, KeyValueScanner scanner) throws IOException {
if (scanner == null) {
if (MobUtils.isRefOnlyScan(scan)) {
Filter refOnlyFilter = new MobReferenceOnlyFilter();
Filter filter = scan.getFilter();
if (filter != null) {
scan.setFilter(new FilterList(filter, refOnlyFilter));
} else {
scan.setFilter(refOnlyFilter);
}
}
scanner = scan.isReversed() ? new ReversedMobStoreScanner(this, getScanInfo(), scan, targetCols, readPt) : new MobStoreScanner(this, getScanInfo(), scan, targetCols, readPt);
}
return scanner;
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestFromClientSide method testEmptyFilterList.
@Test
public void testEmptyFilterList() throws Exception {
// Test Initialization.
final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = TEST_UTIL.createTable(tableName, FAMILY);
// Insert one row each region
Put put = new Put(Bytes.toBytes("row"));
put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
List<Result> scanResults = new LinkedList<>();
Scan scan = new Scan();
scan.setFilter(new FilterList());
try (ResultScanner scanner = table.getScanner(scan)) {
for (Result r : scanner) {
scanResults.add(r);
}
}
assertEquals(1, scanResults.size());
Get g = new Get(Bytes.toBytes("row"));
g.setFilter(new FilterList());
Result getResult = table.get(g);
Result scanResult = scanResults.get(0);
assertEquals(scanResult.rawCells().length, getResult.rawCells().length);
for (int i = 0; i != scanResult.rawCells().length; ++i) {
Cell scanCell = scanResult.rawCells()[i];
Cell getCell = getResult.rawCells()[i];
assertEquals(0, Bytes.compareTo(CellUtil.cloneRow(scanCell), CellUtil.cloneRow(getCell)));
assertEquals(0, Bytes.compareTo(CellUtil.cloneFamily(scanCell), CellUtil.cloneFamily(getCell)));
assertEquals(0, Bytes.compareTo(CellUtil.cloneQualifier(scanCell), CellUtil.cloneQualifier(getCell)));
assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(scanCell), CellUtil.cloneValue(getCell)));
}
}
Aggregations