use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project CodeDemo by bingoxubin.
the class FilterListTest method filterList.
// 多过滤器综合查询FilterList
// 需求:使用SingleColumnValueFilter查询f1列族,name为刘备的数据,并且同时满足rowkey的前缀以00开头的数据(PrefixFilter)
/**
* 查询 f1 列族 name 为刘备数据值
* 并且rowkey 前缀以 00开头数据
*/
@Test
public void filterList() throws IOException {
Scan scan = new Scan();
SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter("f1".getBytes(), "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
PrefixFilter prefixFilter = new PrefixFilter("00".getBytes());
FilterList filterList = new FilterList();
filterList.addFilter(singleColumnValueFilter);
filterList.addFilter(prefixFilter);
scan.setFilter(filterList);
ResultScanner scanner = table.getScanner(scan);
System.out.println(scanner);
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project CodeDemo by bingoxubin.
the class SingleColumnValueFilterTest method singleColumnValueFilter.
// 单列值过滤器 SingleColumnValueFilter
// SingleColumnValueFilter会返回满足条件的cell。所在行的所有cell的值
// 查询名字为刘备的数据
/**
* select * from myuser where name = '刘备'
* 会返回我们符合条件数据的所有的字段
* <p>
* SingleColumnValueExcludeFilter 列值排除过滤器
* select * from myuser where name != '刘备'
*/
@Test
public void singleColumnValueFilter() throws IOException {
// 查询 f1 列族 name 列 值为刘备的数据
Scan scan = new Scan();
// 单列值过滤器,过滤 f1 列族 name 列 值为刘备的数据
SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter("f1".getBytes(), "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
scan.setFilter(singleColumnValueFilter);
ResultScanner scanner = table.getScanner(scan);
System.out.println(scanner);
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project gora by apache.
the class DefaultFactory method createFilter.
@Override
public org.apache.hadoop.hbase.filter.Filter createFilter(Filter<K, T> filter, HBaseStore<K, T> store) {
if (filter instanceof FilterList) {
FilterList<K, T> filterList = (FilterList<K, T>) filter;
org.apache.hadoop.hbase.filter.FilterList hbaseFilter = new org.apache.hadoop.hbase.filter.FilterList(Operator.valueOf(filterList.getOperator().name()));
for (Filter<K, T> rowFitler : filterList.getFilters()) {
FilterFactory<K, T> factory = getHbaseFitlerUtil().getFactory(rowFitler);
if (factory == null) {
LOG.warn("HBase remote filter factory not yet implemented for " + rowFitler.getClass().getCanonicalName());
return null;
}
org.apache.hadoop.hbase.filter.Filter hbaseRowFilter = factory.createFilter(rowFitler, store);
if (hbaseRowFilter != null) {
hbaseFilter.addFilter(hbaseRowFilter);
}
}
return hbaseFilter;
} else if (filter instanceof SingleFieldValueFilter) {
SingleFieldValueFilter<K, T> fieldFilter = (SingleFieldValueFilter<K, T>) filter;
HBaseColumn column = store.getMapping().getColumn(fieldFilter.getFieldName());
CompareOperator compareOp = getCompareOp(fieldFilter.getFilterOp());
byte[] family = column.getFamily();
byte[] qualifier = column.getQualifier();
byte[] value = HBaseByteInterface.toBytes(fieldFilter.getOperands().get(0));
SingleColumnValueFilter hbaseFilter = new SingleColumnValueFilter(family, qualifier, compareOp, value);
hbaseFilter.setFilterIfMissing(fieldFilter.isFilterIfMissing());
return hbaseFilter;
} else if (filter instanceof MapFieldValueFilter) {
MapFieldValueFilter<K, T> mapFilter = (MapFieldValueFilter<K, T>) filter;
HBaseColumn column = store.getMapping().getColumn(mapFilter.getFieldName());
CompareOperator compareOp = getCompareOp(mapFilter.getFilterOp());
byte[] family = column.getFamily();
byte[] qualifier = HBaseByteInterface.toBytes(mapFilter.getMapKey());
byte[] value = HBaseByteInterface.toBytes(mapFilter.getOperands().get(0));
SingleColumnValueFilter hbaseFilter = new SingleColumnValueFilter(family, qualifier, compareOp, value);
hbaseFilter.setFilterIfMissing(mapFilter.isFilterIfMissing());
return hbaseFilter;
} else {
LOG.warn("HBase remote filter not yet implemented for " + filter.getClass().getCanonicalName());
return null;
}
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project kylin by apache.
the class HBaseResourceStore method generateTimeFilterList.
private FilterList generateTimeFilterList(VisitFilter visitFilter) {
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
if (visitFilter.getLastModStart() >= 0) {
// NOTE: Negative value does not work in its binary form
SingleColumnValueFilter timeStartFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(visitFilter.getLastModStart()));
filterList.addFilter(timeStartFilter);
}
if (visitFilter.getLastModEndExclusive() != Long.MAX_VALUE) {
SingleColumnValueFilter timeEndFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.LESS, Bytes.toBytes(visitFilter.getLastModEndExclusive()));
filterList.addFilter(timeEndFilter);
}
return filterList.getFilters().isEmpty() ? null : filterList;
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project pxf by greenplum-db.
the class HBaseFilterBuilderTest method parseIsNullExpression.
@Test
public void parseIsNullExpression() throws Exception {
Filter filter = helper("a1o8", tupleDescription);
assertTrue(filter instanceof SingleColumnValueFilter);
SingleColumnValueFilter result = (SingleColumnValueFilter) filter;
assertNotNull(result);
assertSame(families[1], result.getFamily());
assertSame(qualifiers[1], result.getQualifier());
assertEquals(CompareFilter.CompareOp.EQUAL, result.getOperator());
assertTrue(result.getComparator() instanceof NullComparator);
}
Aggregations