use of org.apache.hadoop.hbase.filter.SubstringComparator in project hbase by apache.
the class TestScannersWithFilters method testFilterList.
@Test
public void testFilterList() throws Exception {
// Test getting a single row, single key using Row, Qualifier, and Value
// regular expression and substring filters
// Use must pass all
List<Filter> filters = new ArrayList<>(3);
filters.add(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new QualifierFilter(CompareOperator.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("One")));
Filter f = new FilterList(Operator.MUST_PASS_ALL, filters);
Scan s = new Scan();
s.addFamily(FAMILIES[0]);
s.setFilter(f);
KeyValue[] kvs = { new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]) };
verifyScanFull(s, kvs);
// Test getting everything with a MUST_PASS_ONE filter including row, qf,
// val, regular expression and substring filters
filters.clear();
filters.add(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(".+Two.+")));
filters.add(new QualifierFilter(CompareOperator.EQUAL, new RegexStringComparator(".+-2")));
filters.add(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("One")));
f = new FilterList(Operator.MUST_PASS_ONE, filters);
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, numRows, colsPerRow);
}
use of org.apache.hadoop.hbase.filter.SubstringComparator in project hbase by apache.
the class TestHRegion method testGetWithFilter.
@Test
public void testGetWithFilter() throws IOException, InterruptedException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] col1 = Bytes.toBytes("col1");
byte[] value1 = Bytes.toBytes("value1");
byte[] value2 = Bytes.toBytes("value2");
final int maxVersions = 3;
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("testFilterAndColumnTracker")).setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam1).setMaxVersions(maxVersions).build()).build();
ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
final WAL wal = HBaseTestingUtil.createWal(TEST_UTIL.getConfiguration(), logDir, info);
this.region = TEST_UTIL.createLocalHRegion(info, CONF, tableDescriptor, wal);
// Put 4 version to memstore
long ts = 0;
Put put = new Put(row1, ts);
put.addColumn(fam1, col1, value1);
region.put(put);
put = new Put(row1, ts + 1);
put.addColumn(fam1, col1, Bytes.toBytes("filter1"));
region.put(put);
put = new Put(row1, ts + 2);
put.addColumn(fam1, col1, Bytes.toBytes("filter2"));
region.put(put);
put = new Put(row1, ts + 3);
put.addColumn(fam1, col1, value2);
region.put(put);
Get get = new Get(row1);
get.readAllVersions();
Result res = region.get(get);
// Get 3 versions, the oldest version has gone from user view
assertEquals(maxVersions, res.size());
get.setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value")));
res = region.get(get);
// When use value filter, the oldest version should still gone from user view and it
// should only return one key vaule
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
assertEquals(ts + 3, res.rawCells()[0].getTimestamp());
region.flush(true);
region.compact(true);
Thread.sleep(1000);
res = region.get(get);
// After flush and compact, the result should be consistent with previous result
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
}
use of org.apache.hadoop.hbase.filter.SubstringComparator in project hbase by apache.
the class MetaTableAccessor method scanByRegionEncodedName.
/**
* Scans META table for a row whose key contains the specified <B>regionEncodedName</B>, returning
* a single related <code>Result</code> instance if any row is found, null otherwise.
* @param connection the connection to query META table.
* @param regionEncodedName the region encoded name to look for at META.
* @return <code>Result</code> instance with the row related info in META, null otherwise.
* @throws IOException if any errors occur while querying META.
*/
public static Result scanByRegionEncodedName(Connection connection, String regionEncodedName) throws IOException {
RowFilter rowFilter = new RowFilter(CompareOperator.EQUAL, new SubstringComparator(regionEncodedName));
Scan scan = getMetaScan(connection.getConfiguration(), 1);
scan.setFilter(rowFilter);
try (Table table = getMetaHTable(connection);
ResultScanner resultScanner = table.getScanner(scan)) {
return resultScanner.next();
}
}
use of org.apache.hadoop.hbase.filter.SubstringComparator in project hbase by apache.
the class TestFromClientSide5 method testReadWithFilter.
/**
* Test for HBASE-17125
*/
@Test
public void testReadWithFilter() throws Exception {
final TableName tableName = name.getTableName();
try (Table table = TEST_UTIL.createTable(tableName, FAMILY, 3)) {
byte[] VALUEA = Bytes.toBytes("value-a");
byte[] VALUEB = Bytes.toBytes("value-b");
long[] ts = { 1000, 2000, 3000, 4000 };
Put put = new Put(ROW);
// Put version 1000,2000,3000,4000 of column FAMILY:QUALIFIER
for (int t = 0; t <= 3; t++) {
if (t <= 1) {
put.addColumn(FAMILY, QUALIFIER, ts[t], VALUEA);
} else {
put.addColumn(FAMILY, QUALIFIER, ts[t], VALUEB);
}
}
table.put(put);
Scan scan = new Scan().setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(3);
ResultScanner scanner = table.getScanner(scan);
Result result = scanner.next();
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
Get get = new Get(ROW).setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(3);
result = table.get(get);
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
// Test with max versions 1, it should still read ts[1]
scan = new Scan().setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(1);
scanner = table.getScanner(scan);
result = scanner.next();
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
// Test with max versions 1, it should still read ts[1]
get = new Get(ROW).setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(1);
result = table.get(get);
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
// Test with max versions 5, it should still read ts[1]
scan = new Scan().setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(5);
scanner = table.getScanner(scan);
result = scanner.next();
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
// Test with max versions 5, it should still read ts[1]
get = new Get(ROW).setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value-a"))).readVersions(5);
result = table.get(get);
// ts[0] has gone from user view. Only read ts[2] which value is less or equal to 3
assertNResult(result, ROW, FAMILY, QUALIFIER, new long[] { ts[1] }, new byte[][] { VALUEA }, 0, 0);
}
}
use of org.apache.hadoop.hbase.filter.SubstringComparator in project uavstack by uavorg.
the class HBaseDataStore method query.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
protected List query(DataStoreMsg msg) {
log.info(this, "DataStore Query Log data start");
msg = (DataStoreMsg) adaptor.prepareQueryObj(msg, datasource.getDataStoreConnection());
// 根据TABLE名取得table实例
String tableName = (String) msg.get(DataStoreProtocol.HBASE_TABLE_NAME);
// 根据family名取得scaner实例
String cfName = (String) msg.get(DataStoreProtocol.HBASE_FAMILY_NAME);
Scan scan = null;
List<byte[]> kv = null;
try (Table table = datasource.getSourceConnect().getTable(TableName.valueOf(tableName))) {
scan = new Scan();
DataStoreConnection con = datasource.getDataStoreConnection();
scan.setCaching(Integer.parseInt((String) con.getContext(DataStoreProtocol.HBASE_QUERY_CACHING)));
scan.setMaxResultSize(Long.parseLong((String) con.getContext(DataStoreProtocol.HBASE_QUERY_MAXRESULTSIZE)));
scan.setReversed((boolean) msg.get(DataStoreProtocol.HBASE_QUERY_REVERSE));
scan.addFamily(cfName.getBytes("UTF-8"));
// 根据查询信息构建实例,目前支持rowkey过滤
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ALL);
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_STARTROW)) {
scan.setStartRow((byte[]) msg.get(DataStoreProtocol.HBASE_QUERY_STARTROW));
}
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_ENDROW)) {
scan.setStopRow((byte[]) msg.get(DataStoreProtocol.HBASE_QUERY_ENDROW));
}
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_ROW_KEYVALUE)) {
kv = (List<byte[]>) msg.get(DataStoreProtocol.HBASE_QUERY_ROW_KEYVALUE);
for (byte[] b : kv) {
flist.addFilter(new RowFilter(CompareOp.EQUAL, new SubstringComparator(new String(b))));
}
}
flist.addFilter(new PageFilter((long) msg.get(DataStoreProtocol.HBASE_QUERY_PAGESIZE)));
scan.setFilter(flist);
log.info(this, "DataStore Query Log data: getFilter String:" + scan.getFilter().toString());
try (ResultScanner result = table.getScanner(scan)) {
List<NavigableMap<byte[], byte[]>> resultList = Lists.newArrayList();
for (Result r : result) {
NavigableMap<byte[], byte[]> map = r.getFamilyMap(cfName.getBytes());
map.put("_timestamp".getBytes(), String.valueOf(r.rawCells()[0].getTimestamp()).getBytes());
resultList.add(map);
}
return adaptor.handleQueryResult(resultList, msg, datasource.getDataStoreConnection());
}
} catch (IOException e) {
log.err(this, "QUERY HBASE TABLE[" + tableName + "] FAMILY[" + cfName + "] FAIL:" + msg.toJSONString(), e);
return null;
}
}
Aggregations