Search in sources :

Example 86 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.

the class HBaseFilterBuilder method createRowKeyPrefixScanSpec.

private HBaseScanSpec createRowKeyPrefixScanSpec(FunctionCall call, CompareFunctionsProcessor processor) {
    byte[] startRow = processor.getRowKeyPrefixStartRow();
    byte[] stopRow = processor.getRowKeyPrefixStopRow();
    Filter filter = processor.getRowKeyPrefixFilter();
    if (startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW || filter != null) {
        return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
    }
    // else
    return null;
}
Also used : RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter)

Example 87 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.

the class MapRDBFilterBuilder method parseTree.

public HBaseScanSpec parseTree() {
    HBaseScanSpec parsedSpec = le.accept(this, null);
    if (parsedSpec != null) {
        parsedSpec = mergeScanSpecs("booleanAnd", this.groupScan.getHBaseScanSpec(), parsedSpec);
        /*
       * If RowFilter is THE filter attached to the scan specification,
       * remove it since its effect is also achieved through startRow and stopRow.
       */
        Filter filter = parsedSpec.getFilter();
        if (filter instanceof RowFilter && ((RowFilter) filter).getOperator() != CompareOp.NOT_EQUAL && ((RowFilter) filter).getComparator() instanceof BinaryComparator) {
            parsedSpec = new HBaseScanSpec(parsedSpec.getTableName(), parsedSpec.getStartRow(), parsedSpec.getStopRow(), null);
        }
    }
    return parsedSpec;
}
Also used : HBaseScanSpec(org.apache.drill.exec.store.hbase.HBaseScanSpec) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 88 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project drill by apache.

the class MapRDBFilterBuilder method mergeScanSpecs.

private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
    Filter newFilter = null;
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    switch(functionName) {
        case "booleanAnd":
            //HBaseUtils.LAST_FILTER
            newFilter = HBaseUtils.andFilterAtIndex(leftScanSpec.getFilter(), -1, rightScanSpec.getFilter());
            startRow = HBaseUtils.maxOfStartRows(leftScanSpec.getStartRow(), rightScanSpec.getStartRow());
            stopRow = HBaseUtils.minOfStopRows(leftScanSpec.getStopRow(), rightScanSpec.getStopRow());
            break;
        case "booleanOr":
            //HBaseUtils.LAST_FILTER
            newFilter = HBaseUtils.orFilterAtIndex(leftScanSpec.getFilter(), -1, rightScanSpec.getFilter());
            startRow = HBaseUtils.minOfStartRows(leftScanSpec.getStartRow(), rightScanSpec.getStartRow());
            stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.getStopRow(), rightScanSpec.getStopRow());
    }
    return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
Also used : HBaseScanSpec(org.apache.drill.exec.store.hbase.HBaseScanSpec) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter)

Example 89 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project metron by apache.

the class MockHTable method get.

@Override
public Result get(Get get) throws IOException {
    if (!data.containsKey(get.getRow()))
        return new Result();
    byte[] row = get.getRow();
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    if (!get.hasFamilies()) {
        kvs = toKeyValue(row, data.get(row), get.getMaxVersions());
    } else {
        for (byte[] family : get.getFamilyMap().keySet()) {
            if (data.get(row).get(family) == null)
                continue;
            NavigableSet<byte[]> qualifiers = get.getFamilyMap().get(family);
            if (qualifiers == null || qualifiers.isEmpty())
                qualifiers = data.get(row).get(family).navigableKeySet();
            for (byte[] qualifier : qualifiers) {
                if (qualifier == null)
                    qualifier = "".getBytes();
                if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier) || data.get(row).get(family).get(qualifier).isEmpty())
                    continue;
                Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
                kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
            }
        }
    }
    Filter filter = get.getFilter();
    if (filter != null) {
        filter.reset();
        List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
        for (KeyValue kv : kvs) {
            if (filter.filterAllRemaining()) {
                break;
            }
            if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
                continue;
            }
            if (filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE) {
                nkvs.add(kv);
            }
        // ignoring next key hint which is a optimization to reduce file system IO
        }
        if (filter.hasFilterRow()) {
            filter.filterRow();
        }
        kvs = nkvs;
    }
    return new Result(kvs);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ArrayList(java.util.ArrayList) HashMap(java.util.HashMap) Map(java.util.Map) NavigableMap(java.util.NavigableMap) TreeMap(java.util.TreeMap) Result(org.apache.hadoop.hbase.client.Result)

Example 90 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hive by apache.

the class HBaseScanRange method setup.

public void setup(Scan scan, Configuration conf, boolean filterOnly) throws Exception {
    if (!filterOnly) {
        // Set the start and stop rows only if asked to
        if (startRow != null) {
            scan.setStartRow(startRow);
        }
        if (stopRow != null) {
            scan.setStopRow(stopRow);
        }
    }
    if (filterDescs.isEmpty()) {
        return;
    }
    if (filterDescs.size() == 1) {
        scan.setFilter(filterDescs.get(0).toFilter(conf));
        return;
    }
    List<Filter> filters = new ArrayList<Filter>();
    for (FilterDesc filter : filterDescs) {
        filters.add(filter.toFilter(conf));
    }
    scan.setFilter(new FilterList(filters));
}
Also used : Filter(org.apache.hadoop.hbase.filter.Filter) ArrayList(java.util.ArrayList) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Aggregations

Filter (org.apache.hadoop.hbase.filter.Filter)179 Test (org.junit.Test)97 Scan (org.apache.hadoop.hbase.client.Scan)95 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)77 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)76 RowKeyComparisonFilter (org.apache.phoenix.filter.RowKeyComparisonFilter)74 SingleKeyValueComparisonFilter (org.apache.phoenix.filter.SingleKeyValueComparisonFilter)45 TestUtil.rowKeyFilter (org.apache.phoenix.util.TestUtil.rowKeyFilter)45 FilterList (org.apache.hadoop.hbase.filter.FilterList)43 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)40 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)37 TestUtil.multiEncodedKVFilter (org.apache.phoenix.util.TestUtil.multiEncodedKVFilter)33 TestUtil.singleKVFilter (org.apache.phoenix.util.TestUtil.singleKVFilter)33 PhoenixPreparedStatement (org.apache.phoenix.jdbc.PhoenixPreparedStatement)31 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)27 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)25 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)24 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)24 ArrayList (java.util.ArrayList)22 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)18