use of org.apache.hadoop.hbase.filter.Filter in project cxf by apache.
the class HBaseQueryVisitor method visit.
public void visit(SearchCondition<T> sc) {
PrimitiveStatement statement = sc.getStatement();
if (statement != null) {
if (statement.getProperty() != null) {
queryStack.peek().add(buildSimpleQuery(sc.getConditionType(), statement.getProperty(), statement.getValue()));
}
} else {
queryStack.push(new ArrayList<>());
for (SearchCondition<T> condition : sc.getSearchConditions()) {
condition.accept(this);
}
boolean orCondition = sc.getConditionType() == ConditionType.OR;
List<Filter> queries = queryStack.pop();
queryStack.peek().add(createCompositeQuery(queries, orCondition));
}
}
use of org.apache.hadoop.hbase.filter.Filter in project cxf by apache.
the class HBaseQueryVisitor method createCompositeQuery.
private Filter createCompositeQuery(List<Filter> queries, boolean orCondition) {
FilterList.Operator oper = orCondition ? FilterList.Operator.MUST_PASS_ONE : FilterList.Operator.MUST_PASS_ALL;
FilterList list = new FilterList(oper);
for (Filter query : queries) {
list.addFilter(query);
}
return list;
}
use of org.apache.hadoop.hbase.filter.Filter in project janusgraph by JanusGraph.
the class HBaseKeyColumnValueStore method getFilter.
public static Filter getFilter(SliceQuery query) {
byte[] colStartBytes = query.getSliceStart().length() > 0 ? query.getSliceStart().as(StaticBuffer.ARRAY_FACTORY) : null;
byte[] colEndBytes = query.getSliceEnd().length() > 0 ? query.getSliceEnd().as(StaticBuffer.ARRAY_FACTORY) : null;
Filter filter = new ColumnRangeFilter(colStartBytes, true, colEndBytes, false);
if (query.hasLimit()) {
filter = new FilterList(FilterList.Operator.MUST_PASS_ALL, filter, new ColumnPaginationFilter(query.getLimit(), 0));
}
logger.debug("Generated HBase Filter {}", filter);
return filter;
}
use of org.apache.hadoop.hbase.filter.Filter in project cdap by caskdata.
the class DequeueScanObserver method preScannerOpen.
@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
Transaction tx = DequeueScanAttributes.getTx(scan);
if (consumerConfig == null || tx == null) {
return super.preScannerOpen(e, scan, s);
}
Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
Filter existing = scan.getFilter();
if (existing != null) {
Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
scan.setFilter(combined);
} else {
scan.setFilter(dequeueFilter);
}
return super.preScannerOpen(e, scan, s);
}
use of org.apache.hadoop.hbase.filter.Filter in project hive by apache.
the class HBaseReadWrite method getPartitionCount.
int getPartitionCount() throws IOException {
Filter fil = new FirstKeyOnlyFilter();
Iterator<Result> iter = scan(PART_TABLE, fil);
return Iterators.size(iter);
}
Aggregations