Search in sources :

Example 6 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hadoop by apache.

the class TimelineFilterUtils method createFilterForConfsOrMetricsToRetrieve.

/**
   * Create filters for confs or metrics to retrieve. This list includes a
   * configs/metrics family filter and relevant filters for confs/metrics to
   * retrieve, if present.
   *
   * @param <T> Describes the type of column prefix.
   * @param confsOrMetricToRetrieve configs/metrics to retrieve.
   * @param columnFamily config or metric column family.
   * @param columnPrefix config or metric column prefix.
   * @return a filter list.
   * @throws IOException if any problem occurs while creating the filters.
   */
public static <T> Filter createFilterForConfsOrMetricsToRetrieve(TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily, ColumnPrefix<T> columnPrefix) throws IOException {
    Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(columnFamily.getBytes()));
    if (confsOrMetricToRetrieve != null && !confsOrMetricToRetrieve.getFilterList().isEmpty()) {
        // If confsOrMetricsToRetrive are specified, create a filter list based
        // on it and family filter.
        FilterList filter = new FilterList(familyFilter);
        filter.addFilter(createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve));
        return filter;
    } else {
        // Only the family filter needs to be added.
        return familyFilter;
    }
}
Also used : FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 7 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.

the class ProtobufUtil method toFilter.

/**
   * Convert a protocol buffer Filter to a client Filter
   *
   * @param proto the protocol buffer Filter to convert
   * @return the converted Filter
   */
@SuppressWarnings("unchecked")
public static Filter toFilter(FilterProtos.Filter proto) throws IOException {
    String type = proto.getName();
    final byte[] value = proto.getSerializedFilter().toByteArray();
    String funcName = "parseFrom";
    try {
        Class<? extends Filter> c = (Class<? extends Filter>) Class.forName(type, true, CLASS_LOADER);
        Method parseFrom = c.getMethod(funcName, byte[].class);
        if (parseFrom == null) {
            throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
        }
        return (Filter) parseFrom.invoke(c, value);
    } catch (Exception e) {
        // In either case, let's not retry.
        throw new DoNotRetryIOException(e);
    }
}
Also used : Filter(org.apache.hadoop.hbase.filter.Filter) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ByteString(com.google.protobuf.ByteString) Method(java.lang.reflect.Method) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) IOException(java.io.IOException) RemoteException(org.apache.hadoop.ipc.RemoteException)

Example 8 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.

the class StoreScanner method next.

/**
   * Get the next row of values from this Store.
   * @param outResult
   * @param scannerContext
   * @return true if there are more rows, false if scanner is done
   */
@Override
public boolean next(List<Cell> outResult, ScannerContext scannerContext) throws IOException {
    if (scannerContext == null) {
        throw new IllegalArgumentException("Scanner context cannot be null");
    }
    boolean flushed = checkFlushed();
    if (checkReseek(flushed)) {
        return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
    }
    // return.
    if (this.heap == null) {
        // By this time partial close should happened because already heap is null
        // Do all cleanup except heap.close()
        close(false);
        return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
    }
    Cell cell = this.heap.peek();
    if (cell == null) {
        // Do all cleanup except heap.close()
        close(false);
        return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
    }
    // comparison.
    if (!scannerContext.hasAnyLimit(LimitScope.BETWEEN_CELLS) || matcher.currentRow() == null) {
        this.countPerRow = 0;
        matcher.setToNewRow(cell);
    }
    // Clear progress away unless invoker has indicated it should be kept.
    if (!scannerContext.getKeepProgress())
        scannerContext.clearProgress();
    // Only do a sanity-check if store and comparator are available.
    CellComparator comparator = store != null ? store.getComparator() : null;
    int count = 0;
    long totalBytesRead = 0;
    LOOP: do {
        // Update and check the time limit based on the configured value of cellsPerTimeoutCheck
        if ((kvsScanned % cellsPerHeartbeatCheck == 0)) {
            scannerContext.updateTimeProgress();
            if (scannerContext.checkTimeLimit(LimitScope.BETWEEN_CELLS)) {
                return scannerContext.setScannerState(NextState.TIME_LIMIT_REACHED).hasMoreValues();
            }
        }
        // Do object compare - we set prevKV from the same heap.
        if (prevCell != cell)
            ++kvsScanned;
        checkScanOrder(prevCell, cell, comparator);
        prevCell = cell;
        ScanQueryMatcher.MatchCode qcode = matcher.match(cell);
        qcode = optimize(qcode, cell);
        switch(qcode) {
            case INCLUDE:
            case INCLUDE_AND_SEEK_NEXT_ROW:
            case INCLUDE_AND_SEEK_NEXT_COL:
                Filter f = matcher.getFilter();
                if (f != null) {
                    cell = f.transformCell(cell);
                }
                this.countPerRow++;
                if (storeLimit > -1 && this.countPerRow > (storeLimit + storeOffset)) {
                    // do what SEEK_NEXT_ROW does.
                    if (!matcher.moreRowsMayExistAfter(cell)) {
                        // Do all cleanup except heap.close()
                        close(false);
                        return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
                    }
                    matcher.clearCurrentRow();
                    seekToNextRow(cell);
                    break LOOP;
                }
                // also update metric accordingly
                if (this.countPerRow > storeOffset) {
                    outResult.add(cell);
                    // Update local tracking information
                    count++;
                    int cellSize = CellUtil.estimatedSerializedSizeOf(cell);
                    totalBytesRead += cellSize;
                    // Update the progress of the scanner context
                    scannerContext.incrementSizeProgress(cellSize, CellUtil.estimatedHeapSizeOf(cell));
                    scannerContext.incrementBatchProgress(1);
                    if (matcher.isUserScan() && totalBytesRead > maxRowSize) {
                        throw new RowTooBigException("Max row size allowed: " + maxRowSize + ", but the row is bigger than that.");
                    }
                }
                if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW) {
                    if (!matcher.moreRowsMayExistAfter(cell)) {
                        // Do all cleanup except heap.close()
                        close(false);
                        return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
                    }
                    matcher.clearCurrentRow();
                    seekToNextRow(cell);
                } else if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL) {
                    seekAsDirection(matcher.getKeyForNextColumn(cell));
                } else {
                    this.heap.next();
                }
                if (scannerContext.checkBatchLimit(LimitScope.BETWEEN_CELLS)) {
                    break LOOP;
                }
                if (scannerContext.checkSizeLimit(LimitScope.BETWEEN_CELLS)) {
                    break LOOP;
                }
                continue;
            case DONE:
                // Optimization for Gets! If DONE, no more to get on this row, early exit!
                if (this.scan.isGetScan()) {
                    // Then no more to this row... exit.
                    // Do all cleanup except heap.close()
                    close(false);
                    return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
                }
                matcher.clearCurrentRow();
                return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
            case DONE_SCAN:
                // Do all cleanup except heap.close()
                close(false);
                return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
            case SEEK_NEXT_ROW:
                // us if there is an endKey in the scan.
                if (!matcher.moreRowsMayExistAfter(cell)) {
                    // Do all cleanup except heap.close()
                    close(false);
                    return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
                }
                matcher.clearCurrentRow();
                seekToNextRow(cell);
                break;
            case SEEK_NEXT_COL:
                seekAsDirection(matcher.getKeyForNextColumn(cell));
                break;
            case SKIP:
                this.heap.next();
                break;
            case SEEK_NEXT_USING_HINT:
                Cell nextKV = matcher.getNextKeyHint(cell);
                if (nextKV != null) {
                    seekAsDirection(nextKV);
                } else {
                    heap.next();
                }
                break;
            default:
                throw new RuntimeException("UNEXPECTED");
        }
    } while ((cell = this.heap.peek()) != null);
    if (count > 0) {
        return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
    }
    // No more keys
    // Do all cleanup except heap.close()
    close(false);
    return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
Also used : MatchCode(org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode) Filter(org.apache.hadoop.hbase.filter.Filter) CellComparator(org.apache.hadoop.hbase.CellComparator) Cell(org.apache.hadoop.hbase.Cell)

Example 9 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.

the class TestSerialization method testCompareFilter.

@Test
public void testCompareFilter() throws Exception {
    Filter f = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("testRowOne-2")));
    byte[] bytes = f.toByteArray();
    Filter ff = RowFilter.parseFrom(bytes);
    assertNotNull(ff);
}
Also used : RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Test(org.junit.Test)

Example 10 with Filter

use of org.apache.hadoop.hbase.filter.Filter in project hbase by apache.

the class TestServerSideScanMetricsFromClientSide method testRowsFilteredMetric.

public void testRowsFilteredMetric(Scan baseScan) throws Exception {
    testRowsFilteredMetric(baseScan, null, 0);
    // Row filter doesn't match any row key. All rows should be filtered
    Filter filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator("xyz".getBytes()));
    testRowsFilteredMetric(baseScan, filter, ROWS.length);
    // Filter will return results containing only the first key. Number of entire rows filtered
    // should be 0.
    filter = new FirstKeyOnlyFilter();
    testRowsFilteredMetric(baseScan, filter, 0);
    // Column prefix will find some matching qualifier on each row. Number of entire rows filtered
    // should be 0
    filter = new ColumnPrefixFilter(QUALIFIERS[0]);
    testRowsFilteredMetric(baseScan, filter, 0);
    // Column prefix will NOT find any matching qualifier on any row. All rows should be filtered
    filter = new ColumnPrefixFilter("xyz".getBytes());
    testRowsFilteredMetric(baseScan, filter, ROWS.length);
    // Matching column value should exist in each row. No rows should be filtered.
    filter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOp.EQUAL, VALUE);
    testRowsFilteredMetric(baseScan, filter, 0);
    // No matching column value should exist in any row. Filter all rows
    filter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOp.NOT_EQUAL, VALUE);
    testRowsFilteredMetric(baseScan, filter, ROWS.length);
    List<Filter> filters = new ArrayList<>();
    filters.add(new RowFilter(CompareOp.EQUAL, new BinaryComparator(ROWS[0])));
    filters.add(new RowFilter(CompareOp.EQUAL, new BinaryComparator(ROWS[3])));
    int numberOfMatchingRowFilters = filters.size();
    filter = new FilterList(Operator.MUST_PASS_ONE, filters);
    testRowsFilteredMetric(baseScan, filter, ROWS.length - numberOfMatchingRowFilters);
    filters.clear();
    // array in RegionScanner#nextInternal which should be interpreted as a row being filtered.
    for (int family = 0; family < FAMILIES.length; family++) {
        for (int qualifier = 0; qualifier < QUALIFIERS.length; qualifier++) {
            filters.add(new SingleColumnValueExcludeFilter(FAMILIES[family], QUALIFIERS[qualifier], CompareOp.EQUAL, VALUE));
        }
    }
    filter = new FilterList(Operator.MUST_PASS_ONE, filters);
    testRowsFilteredMetric(baseScan, filter, ROWS.length);
}
Also used : ColumnPrefixFilter(org.apache.hadoop.hbase.filter.ColumnPrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) SingleColumnValueExcludeFilter(org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter) ColumnPrefixFilter(org.apache.hadoop.hbase.filter.ColumnPrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) SingleColumnValueExcludeFilter(org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) ArrayList(java.util.ArrayList) FilterList(org.apache.hadoop.hbase.filter.FilterList) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Aggregations

Filter (org.apache.hadoop.hbase.filter.Filter)172 Test (org.junit.Test)96 Scan (org.apache.hadoop.hbase.client.Scan)94 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)77 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)76 RowKeyComparisonFilter (org.apache.phoenix.filter.RowKeyComparisonFilter)74 SingleKeyValueComparisonFilter (org.apache.phoenix.filter.SingleKeyValueComparisonFilter)45 TestUtil.rowKeyFilter (org.apache.phoenix.util.TestUtil.rowKeyFilter)45 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)40 FilterList (org.apache.hadoop.hbase.filter.FilterList)39 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)37 TestUtil.multiEncodedKVFilter (org.apache.phoenix.util.TestUtil.multiEncodedKVFilter)33 TestUtil.singleKVFilter (org.apache.phoenix.util.TestUtil.singleKVFilter)33 PhoenixPreparedStatement (org.apache.phoenix.jdbc.PhoenixPreparedStatement)31 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)27 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)24 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)23 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)22 ArrayList (java.util.ArrayList)19 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)18