use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.
the class HBaseFilterBuilder method createRowKeyPrefixScanSpec.
private HBaseScanSpec createRowKeyPrefixScanSpec(FunctionCall call, CompareFunctionsProcessor processor) {
byte[] startRow = processor.getRowKeyPrefixStartRow();
byte[] stopRow = processor.getRowKeyPrefixStopRow();
Filter filter = processor.getRowKeyPrefixFilter();
if (startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW || filter != null) {
return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
}
// else
return null;
}
use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.
the class MapRDBFilterBuilder method parseTree.
public HBaseScanSpec parseTree() {
HBaseScanSpec parsedSpec = le.accept(this, null);
if (parsedSpec != null) {
parsedSpec = mergeScanSpecs("booleanAnd", this.groupScan.getHBaseScanSpec(), parsedSpec);
/*
* If RowFilter is THE filter attached to the scan specification,
* remove it since its effect is also achieved through startRow and stopRow.
*/
Filter filter = parsedSpec.getFilter();
if (filter instanceof RowFilter && ((RowFilter) filter).getOperator() != CompareOp.NOT_EQUAL && ((RowFilter) filter).getComparator() instanceof BinaryComparator) {
parsedSpec = new HBaseScanSpec(parsedSpec.getTableName(), parsedSpec.getStartRow(), parsedSpec.getStopRow(), null);
}
}
return parsedSpec;
}
use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.
the class MapRDBFilterBuilder method mergeScanSpecs.
private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
Filter newFilter = null;
byte[] startRow = HConstants.EMPTY_START_ROW;
byte[] stopRow = HConstants.EMPTY_END_ROW;
switch(functionName) {
case "booleanAnd":
//HBaseUtils.LAST_FILTER
newFilter = HBaseUtils.andFilterAtIndex(leftScanSpec.getFilter(), -1, rightScanSpec.getFilter());
startRow = HBaseUtils.maxOfStartRows(leftScanSpec.getStartRow(), rightScanSpec.getStartRow());
stopRow = HBaseUtils.minOfStopRows(leftScanSpec.getStopRow(), rightScanSpec.getStopRow());
break;
case "booleanOr":
//HBaseUtils.LAST_FILTER
newFilter = HBaseUtils.orFilterAtIndex(leftScanSpec.getFilter(), -1, rightScanSpec.getFilter());
startRow = HBaseUtils.minOfStartRows(leftScanSpec.getStartRow(), rightScanSpec.getStartRow());
stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.getStopRow(), rightScanSpec.getStopRow());
}
return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
use of org.apache.hadoop.hbase.filter.Filter in project metron by apache.
the class MockHTable method get.
@Override
public Result get(Get get) throws IOException {
if (!data.containsKey(get.getRow()))
return new Result();
byte[] row = get.getRow();
List<KeyValue> kvs = new ArrayList<KeyValue>();
if (!get.hasFamilies()) {
kvs = toKeyValue(row, data.get(row), get.getMaxVersions());
} else {
for (byte[] family : get.getFamilyMap().keySet()) {
if (data.get(row).get(family) == null)
continue;
NavigableSet<byte[]> qualifiers = get.getFamilyMap().get(family);
if (qualifiers == null || qualifiers.isEmpty())
qualifiers = data.get(row).get(family).navigableKeySet();
for (byte[] qualifier : qualifiers) {
if (qualifier == null)
qualifier = "".getBytes();
if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier) || data.get(row).get(family).get(qualifier).isEmpty())
continue;
Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
}
}
}
Filter filter = get.getFilter();
if (filter != null) {
filter.reset();
List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
for (KeyValue kv : kvs) {
if (filter.filterAllRemaining()) {
break;
}
if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
continue;
}
if (filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE) {
nkvs.add(kv);
}
// ignoring next key hint which is a optimization to reduce file system IO
}
if (filter.hasFilterRow()) {
filter.filterRow();
}
kvs = nkvs;
}
return new Result(kvs);
}
use of org.apache.hadoop.hbase.filter.Filter in project hive by apache.
the class HBaseScanRange method setup.
public void setup(Scan scan, Configuration conf, boolean filterOnly) throws Exception {
if (!filterOnly) {
// Set the start and stop rows only if asked to
if (startRow != null) {
scan.setStartRow(startRow);
}
if (stopRow != null) {
scan.setStopRow(stopRow);
}
}
if (filterDescs.isEmpty()) {
return;
}
if (filterDescs.size() == 1) {
scan.setFilter(filterDescs.get(0).toFilter(conf));
return;
}
List<Filter> filters = new ArrayList<Filter>();
for (FilterDesc filter : filterDescs) {
filters.add(filter.toFilter(conf));
}
scan.setFilter(new FilterList(filters));
}
Aggregations