use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.
the class TestFromClientSide method testKeyOnlyFilterWithReverseScan.
@Test
public void testKeyOnlyFilterWithReverseScan() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"), Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"), Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"), Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"), Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
scan.setReversed(true);
scan.addFamily(FAMILY);
Filter filter = new KeyOnlyFilter(true);
scan.setFilter(filter);
ResultScanner scanner = ht.getScanner(scan);
int count = 0;
for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++;
}
assertEquals(count, 10);
scanner.close();
ht.close();
}
use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.
the class TestFromClientSide method testFilters.
@Test
public void testFilters() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"), Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"), Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"), Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"), Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
scan.addFamily(FAMILY);
Filter filter = new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator("col[1-5]"));
scan.setFilter(filter);
ResultScanner scanner = ht.getScanner(scan);
int expectedIndex = 1;
for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1);
assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[expectedIndex]));
assertTrue(Bytes.equals(CellUtil.cloneQualifier(result.rawCells()[0]), QUALIFIERS[expectedIndex]));
expectedIndex++;
}
assertEquals(expectedIndex, 6);
scanner.close();
}
use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.
the class TestFromClientSide method testFiltersWithReverseScan.
@Test
public void testFiltersWithReverseScan() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table ht = TEST_UTIL.createTable(tableName, FAMILY);
byte[][] ROWS = makeN(ROW, 10);
byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"), Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"), Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"), Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"), Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
scan.setReversed(true);
scan.addFamily(FAMILY);
Filter filter = new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator("col[1-5]"));
scan.setFilter(filter);
ResultScanner scanner = ht.getScanner(scan);
int expectedIndex = 5;
for (Result result : scanner) {
assertEquals(result.size(), 1);
Cell c = result.rawCells()[0];
assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(), ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
expectedIndex--;
}
assertEquals(expectedIndex, 0);
scanner.close();
ht.close();
}
use of org.apache.hadoop.hbase.filter.Filter in project hive by apache.
the class HBaseScanRange method setup.
public void setup(Scan scan, Configuration conf) throws Exception {
if (startRow != null) {
scan.setStartRow(startRow);
}
if (stopRow != null) {
scan.setStopRow(stopRow);
}
if (filterDescs.isEmpty()) {
return;
}
if (filterDescs.size() == 1) {
scan.setFilter(filterDescs.get(0).toFilter(conf));
return;
}
List<Filter> filters = new ArrayList<Filter>();
for (FilterDesc filter : filterDescs) {
filters.add(filter.toFilter(conf));
}
scan.setFilter(new FilterList(filters));
}
use of org.apache.hadoop.hbase.filter.Filter in project hive by apache.
the class HBaseReadWrite method scanRoles.
private List<Role> scanRoles(String regex) throws IOException {
Filter filter = null;
if (regex != null) {
filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
}
Iterator<Result> iter = scan(ROLE_TABLE, null, null, CATALOG_CF, CATALOG_COL, filter);
List<Role> roles = new ArrayList<>();
while (iter.hasNext()) {
Result result = iter.next();
roles.add(HBaseUtils.deserializeRole(result.getRow(), result.getValue(CATALOG_CF, CATALOG_COL)));
}
return roles;
}
Aggregations