use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestFilter method testNestedFilterListWithSCVF.
// TODO: intentionally disabled?
public void testNestedFilterListWithSCVF() throws IOException {
byte[] columnStatus = Bytes.toBytes("S");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Region testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
for (int i = 0; i < 10; i++) {
Put p = new Put(Bytes.toBytes("row" + i));
p.setDurability(Durability.SKIP_WAL);
p.addColumn(FAMILIES[0], columnStatus, Bytes.toBytes(i % 2));
testRegion.put(p);
}
testRegion.flush(true);
// 1. got rows > "row4"
Filter rowFilter = new RowFilter(CompareOp.GREATER, new BinaryComparator(Bytes.toBytes("row4")));
Scan s1 = new Scan();
s1.setFilter(rowFilter);
InternalScanner scanner = testRegion.getScanner(s1);
List<Cell> results = new ArrayList<>();
int i = 5;
for (boolean done = true; done; i++) {
done = scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
// 2. got rows <= "row4" and S=
FilterList subFilterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
Filter subFilter1 = new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes("row4")));
subFilterList.addFilter(subFilter1);
Filter subFilter2 = new SingleColumnValueFilter(FAMILIES[0], columnStatus, CompareOp.EQUAL, Bytes.toBytes(0));
subFilterList.addFilter(subFilter2);
s1 = new Scan();
s1.setFilter(subFilterList);
scanner = testRegion.getScanner(s1);
results = new ArrayList<>();
for (i = 0; i <= 4; i += 2) {
scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
assertFalse(scanner.next(results));
// 3. let's begin to verify nested filter list
// 3.1 add rowFilter, then add subFilterList
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
filterList.addFilter(rowFilter);
filterList.addFilter(subFilterList);
s1 = new Scan();
s1.setFilter(filterList);
scanner = testRegion.getScanner(s1);
results = new ArrayList<>();
for (i = 0; i <= 4; i += 2) {
scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
for (i = 5; i <= 9; i++) {
scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
assertFalse(scanner.next(results));
// 3.2 MAGIC here! add subFilterList first, then add rowFilter
filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
filterList.addFilter(subFilterList);
filterList.addFilter(rowFilter);
s1 = new Scan();
s1.setFilter(filterList);
scanner = testRegion.getScanner(s1);
results = new ArrayList<>();
for (i = 0; i <= 4; i += 2) {
scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
for (i = 5; i <= 9; i++) {
scanner.next(results);
assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
results.clear();
}
assertFalse(scanner.next(results));
WAL wal = ((HRegion) testRegion).getWAL();
((HRegion) testRegion).close();
wal.close();
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestFilter method testNoFilter.
@Test
public void testNoFilter() throws Exception {
// No filter
long expectedRows = this.numRows;
long expectedKeys = this.colsPerRow;
// Both families
Scan s = new Scan();
verifyScan(s, expectedRows, expectedKeys);
// One family
s = new Scan();
s.addFamily(FAMILIES[0]);
verifyScan(s, expectedRows, expectedKeys / 2);
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestFilter method testKeyOnlyFilter.
@Test
public void testKeyOnlyFilter() throws Exception {
// KVs in first 6 rows
KeyValue[] expectedKVs = { // testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-2
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-3
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-2
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-3
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) };
// Grab all 6 rows
long expectedRows = 6;
long expectedKeys = this.colsPerRow;
for (boolean useLen : new boolean[] { false, true }) {
Scan s = new Scan();
s.setFilter(new KeyOnlyFilter(useLen));
verifyScan(s, expectedRows, expectedKeys);
verifyScanFullNoValues(s, expectedKVs, useLen);
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestFilter method testWhileMatchFilterWithFilterKeyValue.
/**
* Tests the the {@link WhileMatchFilter} works in combination with a
* {@link Filter} that uses the {@link Filter#filterKeyValue(Cell)} method.
*
* See HBASE-2258.
*
* @throws Exception
*/
@Test
public void testWhileMatchFilterWithFilterKeyValue() throws Exception {
Scan s = new Scan();
WhileMatchFilter filter = new WhileMatchFilter(new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS_ONE[0], CompareOp.EQUAL, Bytes.toBytes("foo")));
s.setFilter(filter);
InternalScanner scanner = this.region.getScanner(s);
while (true) {
ArrayList<Cell> values = new ArrayList<>();
boolean isMoreResults = scanner.next(values);
assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
if (!isMoreResults) {
break;
}
}
}
use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.
the class TestFilter method testFilterListWithSingleColumnValueFilter.
@Test
public void testFilterListWithSingleColumnValueFilter() throws IOException {
// Test for HBASE-3191
// Scan using SingleColumnValueFilter
SingleColumnValueFilter f1 = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS_ONE[0], CompareOp.EQUAL, VALUES[0]);
f1.setFilterIfMissing(true);
Scan s1 = new Scan();
s1.addFamily(FAMILIES[0]);
s1.setFilter(f1);
KeyValue[] kvs1 = { new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]) };
verifyScanNoEarlyOut(s1, 3, 3);
verifyScanFull(s1, kvs1);
// Scan using another SingleColumnValueFilter, expect disjoint result
SingleColumnValueFilter f2 = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS_TWO[0], CompareOp.EQUAL, VALUES[1]);
f2.setFilterIfMissing(true);
Scan s2 = new Scan();
s2.addFamily(FAMILIES[0]);
s2.setFilter(f2);
KeyValue[] kvs2 = { new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]) };
verifyScanNoEarlyOut(s2, 3, 3);
verifyScanFull(s2, kvs2);
// Scan, ORing the two previous filters, expect unified result
FilterList f = new FilterList(Operator.MUST_PASS_ONE);
f.addFilter(f1);
f.addFilter(f2);
Scan s = new Scan();
s.addFamily(FAMILIES[0]);
s.setFilter(f);
KeyValue[] kvs = { new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]) };
verifyScanNoEarlyOut(s, 6, 3);
verifyScanFull(s, kvs);
}
Aggregations