Search in sources :

Example 31 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.

the class TestServerSideScanMetricsFromClientSide method testRowsSeenMetric.

private void testRowsSeenMetric(Scan baseScan) throws Exception {
    Scan scan;
    scan = new Scan(baseScan);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, NUM_ROWS);
    for (int i = 0; i < ROWS.length - 1; i++) {
        scan = new Scan(baseScan);
        scan.withStartRow(ROWS[0]);
        scan.withStopRow(ROWS[i + 1]);
        testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, i + 1);
    }
    for (int i = ROWS.length - 1; i > 0; i--) {
        scan = new Scan(baseScan);
        scan.withStartRow(ROWS[i - 1]);
        scan.withStopRow(ROWS[ROWS.length - 1]);
        testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length - i);
    }
    // The filter should filter out all rows, but we still expect to see every row.
    Filter filter = new RowFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("xyz")));
    scan = new Scan(baseScan);
    scan.setFilter(filter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
    // Filter should pass on all rows
    SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.EQUAL, VALUE);
    scan = new Scan(baseScan);
    scan.setFilter(singleColumnValueFilter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
    // Filter should filter out all rows
    singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.NOT_EQUAL, VALUE);
    scan = new Scan(baseScan);
    scan.setFilter(singleColumnValueFilter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
}
Also used : RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ColumnPrefixFilter(org.apache.hadoop.hbase.filter.ColumnPrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) SingleColumnValueExcludeFilter(org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 32 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.

the class QuotaTableUtil method makeFilter.

/**
 * converts quotafilter to serializeable filterlists.
 */
public static Filter makeFilter(final QuotaFilter filter) {
    FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
    if (StringUtils.isNotEmpty(filter.getUserFilter())) {
        FilterList userFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE);
        boolean hasFilter = false;
        if (StringUtils.isNotEmpty(filter.getNamespaceFilter())) {
            FilterList nsFilters = new FilterList(FilterList.Operator.MUST_PASS_ALL);
            nsFilters.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getUserRowKeyRegex(filter.getUserFilter()), 0)));
            nsFilters.addFilter(new QualifierFilter(CompareOperator.EQUAL, new RegexStringComparator(getSettingsQualifierRegexForUserNamespace(filter.getNamespaceFilter()), 0)));
            userFilters.addFilter(nsFilters);
            hasFilter = true;
        }
        if (StringUtils.isNotEmpty(filter.getTableFilter())) {
            FilterList tableFilters = new FilterList(FilterList.Operator.MUST_PASS_ALL);
            tableFilters.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getUserRowKeyRegex(filter.getUserFilter()), 0)));
            tableFilters.addFilter(new QualifierFilter(CompareOperator.EQUAL, new RegexStringComparator(getSettingsQualifierRegexForUserTable(filter.getTableFilter()), 0)));
            userFilters.addFilter(tableFilters);
            hasFilter = true;
        }
        if (!hasFilter) {
            userFilters.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getUserRowKeyRegex(filter.getUserFilter()), 0)));
        }
        filterList.addFilter(userFilters);
    } else if (StringUtils.isNotEmpty(filter.getTableFilter())) {
        filterList.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getTableRowKeyRegex(filter.getTableFilter()), 0)));
    } else if (StringUtils.isNotEmpty(filter.getNamespaceFilter())) {
        filterList.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getNamespaceRowKeyRegex(filter.getNamespaceFilter()), 0)));
    } else if (StringUtils.isNotEmpty(filter.getRegionServerFilter())) {
        filterList.addFilter(new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(getRegionServerRowKeyRegex(filter.getRegionServerFilter()), 0)));
    }
    return filterList;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 33 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.

the class ExportUtils method getScanFromCommandLine.

static Scan getScanFromCommandLine(Configuration conf, String[] args) throws IOException {
    Scan s = new Scan();
    // Optional arguments.
    // Set Scan Versions
    int versions = args.length > 2 ? Integer.parseInt(args[2]) : 1;
    s.readVersions(versions);
    // Set Scan Range
    long startTime = args.length > 3 ? Long.parseLong(args[3]) : 0L;
    long endTime = args.length > 4 ? Long.parseLong(args[4]) : Long.MAX_VALUE;
    s.setTimeRange(startTime, endTime);
    // Set cache blocks
    s.setCacheBlocks(false);
    // set Start and Stop row
    if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
        s.withStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
    }
    if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
        s.withStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP)));
    }
    // Set Scan Column Family
    boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
    if (raw) {
        s.setRaw(raw);
    }
    for (String columnFamily : conf.getTrimmedStrings(TableInputFormat.SCAN_COLUMN_FAMILY)) {
        s.addFamily(Bytes.toBytes(columnFamily));
    }
    // Set RowFilter or Prefix Filter if applicable.
    Filter exportFilter = getExportFilter(args);
    if (exportFilter != null) {
        LOG.info("Setting Scan Filter for Export.");
        s.setFilter(exportFilter);
    }
    List<String> labels = null;
    if (conf.get(EXPORT_VISIBILITY_LABELS) != null) {
        labels = Arrays.asList(conf.getStrings(EXPORT_VISIBILITY_LABELS));
        if (!labels.isEmpty()) {
            s.setAuthorizations(new Authorizations(labels));
        }
    }
    int batching = conf.getInt(EXPORT_BATCHING, -1);
    if (batching != -1) {
        try {
            s.setBatch(batching);
        } catch (IncompatibleFilterException e) {
            LOG.error("Batching could not be set", e);
        }
    }
    int caching = conf.getInt(EXPORT_CACHING, 100);
    if (caching != -1) {
        try {
            s.setCaching(caching);
        } catch (IncompatibleFilterException e) {
            LOG.error("Caching could not be set", e);
        }
    }
    LOG.info("versions=" + versions + ", starttime=" + startTime + ", endtime=" + endTime + ", keepDeletedCells=" + raw + ", visibility labels=" + labels);
    return s;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) IncompatibleFilterException(org.apache.hadoop.hbase.filter.IncompatibleFilterException)

Example 34 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.

the class CellCounter method getConfiguredScanForJob.

private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
    // create scan with any properties set from TableInputFormat
    Scan s = TableInputFormat.createScanFromConfiguration(conf);
    // Set Scan Versions
    if (conf.get(TableInputFormat.SCAN_MAXVERSIONS) == null) {
        // default to all versions unless explicitly set
        s.readVersions(Integer.MAX_VALUE);
    }
    s.setCacheBlocks(false);
    // Set RowFilter or Prefix Filter if applicable.
    Filter rowFilter = getRowFilter(args);
    if (rowFilter != null) {
        LOG.info("Setting Row Filter for counter.");
        s.setFilter(rowFilter);
    }
    // Set TimeRange if defined
    long[] timeRange = getTimeRange(args);
    if (timeRange != null) {
        LOG.info("Setting TimeRange for counter.");
        s.setTimeRange(timeRange[0], timeRange[1]);
    }
    return s;
}
Also used : RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan)

Example 35 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.

the class CellCounter method getRowFilter.

private static Filter getRowFilter(String[] args) {
    Filter rowFilter = null;
    String filterCriteria = (args.length > 3) ? args[3] : null;
    if (filterCriteria == null)
        return null;
    if (filterCriteria.startsWith("^")) {
        String regexPattern = filterCriteria.substring(1, filterCriteria.length());
        rowFilter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(regexPattern));
    } else {
        rowFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
    }
    return rowFilter;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter)

Aggregations

RowFilter (org.apache.hadoop.hbase.filter.RowFilter)39 Filter (org.apache.hadoop.hbase.filter.Filter)31 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)20 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)17 ArrayList (java.util.ArrayList)15 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)13 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)12 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)11 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)11 Scan (org.apache.hadoop.hbase.client.Scan)10 Test (org.junit.Test)10 Result (org.apache.hadoop.hbase.client.Result)9 BloomFilter (org.apache.hive.common.util.BloomFilter)8 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 HBaseScanSpec (org.apache.drill.exec.store.hbase.HBaseScanSpec)5 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)5 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)5 FilterList (org.apache.hadoop.hbase.filter.FilterList)5 NullComparator (org.apache.hadoop.hbase.filter.NullComparator)5 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)4