use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestScannersWithFilters method testFilterList.
@Test
public void testFilterList() throws Exception {
// Test getting a single row, single key using Row, Qualifier, and Value
// regular expression and substring filters
// Use must pass all
List<Filter> filters = new ArrayList<>(3);
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
Filter f = new FilterList(Operator.MUST_PASS_ALL, filters);
Scan s = new Scan();
s.addFamily(FAMILIES[0]);
s.setFilter(f);
KeyValue[] kvs = { new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]) };
verifyScanFull(s, kvs);
// Test getting everything with a MUST_PASS_ONE filter including row, qf,
// val, regular expression and substring filters
filters.clear();
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+Two.+")));
filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
f = new FilterList(Operator.MUST_PASS_ONE, filters);
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, numRows, colsPerRow);
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestFromClientSide method buildScanner.
private ResultScanner buildScanner(String keyPrefix, String value, Table ht) throws IOException {
// OurFilterList allFilters = new OurFilterList();
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
// allFilters.addFilter(new
// RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
// Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return ht.getScanner(scan);
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestFromClientSide method testFilterAllRecords.
@Test
public void testFilterAllRecords() throws IOException {
Scan scan = new Scan();
scan.setBatch(1);
scan.setCaching(1);
// Filter out any records
scan.setFilter(new FilterList(new FirstKeyOnlyFilter(), new InclusiveStopFilter(new byte[0])));
try (Table table = TEST_UTIL.getConnection().getTable(TableName.NAMESPACE_TABLE_NAME)) {
try (ResultScanner s = table.getScanner(scan)) {
assertNull(s.next());
}
}
}
use of org.apache.hadoop.hbase.filter.FilterList in project hive by apache.
the class HBaseScanRange method setup.
public void setup(Scan scan, Configuration conf) throws Exception {
if (startRow != null) {
scan.setStartRow(startRow);
}
if (stopRow != null) {
scan.setStopRow(stopRow);
}
if (filterDescs.isEmpty()) {
return;
}
if (filterDescs.size() == 1) {
scan.setFilter(filterDescs.get(0).toFilter(conf));
return;
}
List<Filter> filters = new ArrayList<Filter>();
for (FilterDesc filter : filterDescs) {
filters.add(filter.toFilter(conf));
}
scan.setFilter(new FilterList(filters));
}
use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestHRegion method buildScanner.
private InternalScanner buildScanner(String keyPrefix, String value, HRegion r) throws IOException {
// Defaults FilterList.Operator.MUST_PASS_ALL.
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
// Only return rows where this column value exists in the row.
SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return r.getScanner(scan);
}
Aggregations