Search in sources :

Example 11 with PageFilter

use of org.apache.hadoop.hbase.filter.PageFilter in project phoenix by apache.

the class ExplainTable method explain.

protected void explain(String prefix, List<String> planSteps) {
    StringBuilder buf = new StringBuilder(prefix);
    ScanRanges scanRanges = context.getScanRanges();
    Scan scan = context.getScan();
    if (scan.getConsistency() != Consistency.STRONG) {
        buf.append("TIMELINE-CONSISTENCY ");
    }
    if (hint.hasHint(Hint.SMALL)) {
        buf.append(Hint.SMALL).append(" ");
    }
    if (OrderBy.REV_ROW_KEY_ORDER_BY.equals(orderBy)) {
        buf.append("REVERSE ");
    }
    if (scanRanges.isEverything()) {
        buf.append("FULL SCAN ");
    } else {
        explainSkipScan(buf);
    }
    buf.append("OVER ").append(tableRef.getTable().getPhysicalName().getString());
    if (!scanRanges.isPointLookup()) {
        appendKeyRanges(buf);
    }
    planSteps.add(buf.toString());
    if (context.getScan() != null && tableRef.getTable().getRowTimestampColPos() != -1) {
        TimeRange range = context.getScan().getTimeRange();
        planSteps.add("    ROW TIMESTAMP FILTER [" + range.getMin() + ", " + range.getMax() + ")");
    }
    PageFilter pageFilter = null;
    FirstKeyOnlyFilter firstKeyOnlyFilter = null;
    BooleanExpressionFilter whereFilter = null;
    DistinctPrefixFilter distinctFilter = null;
    Iterator<Filter> filterIterator = ScanUtil.getFilterIterator(scan);
    if (filterIterator.hasNext()) {
        do {
            Filter filter = filterIterator.next();
            if (filter instanceof FirstKeyOnlyFilter) {
                firstKeyOnlyFilter = (FirstKeyOnlyFilter) filter;
            } else if (filter instanceof PageFilter) {
                pageFilter = (PageFilter) filter;
            } else if (filter instanceof BooleanExpressionFilter) {
                whereFilter = (BooleanExpressionFilter) filter;
            } else if (filter instanceof DistinctPrefixFilter) {
                distinctFilter = (DistinctPrefixFilter) filter;
            }
        } while (filterIterator.hasNext());
    }
    if (whereFilter != null) {
        planSteps.add("    SERVER FILTER BY " + (firstKeyOnlyFilter == null ? "" : "FIRST KEY ONLY AND ") + whereFilter.toString());
    } else if (firstKeyOnlyFilter != null) {
        planSteps.add("    SERVER FILTER BY FIRST KEY ONLY");
    }
    if (distinctFilter != null) {
        planSteps.add("    SERVER DISTINCT PREFIX FILTER OVER " + groupBy.getExpressions().toString());
    }
    if (!orderBy.getOrderByExpressions().isEmpty() && groupBy.isEmpty()) {
        // with GROUP BY, sort happens client-side
        planSteps.add("    SERVER" + (limit == null ? "" : " TOP " + limit + " ROW" + (limit == 1 ? "" : "S")) + " SORTED BY " + orderBy.getOrderByExpressions().toString());
    } else {
        if (offset != null) {
            planSteps.add("    SERVER OFFSET " + offset);
        }
        if (pageFilter != null) {
            planSteps.add("    SERVER " + pageFilter.getPageSize() + " ROW LIMIT");
        }
    }
    Integer groupByLimit = null;
    byte[] groupByLimitBytes = scan.getAttribute(BaseScannerRegionObserver.GROUP_BY_LIMIT);
    if (groupByLimitBytes != null) {
        groupByLimit = (Integer) PInteger.INSTANCE.toObject(groupByLimitBytes);
    }
    groupBy.explain(planSteps, groupByLimit);
    if (scan.getAttribute(BaseScannerRegionObserver.SPECIFIC_ARRAY_INDEX) != null) {
        planSteps.add("    SERVER ARRAY ELEMENT PROJECTION");
    }
}
Also used : PInteger(org.apache.phoenix.schema.types.PInteger) TimeRange(org.apache.hadoop.hbase.io.TimeRange) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) BooleanExpressionFilter(org.apache.phoenix.filter.BooleanExpressionFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) DistinctPrefixFilter(org.apache.phoenix.filter.DistinctPrefixFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) DistinctPrefixFilter(org.apache.phoenix.filter.DistinctPrefixFilter) ScanRanges(org.apache.phoenix.compile.ScanRanges) BooleanExpressionFilter(org.apache.phoenix.filter.BooleanExpressionFilter)

Aggregations

PageFilter (org.apache.hadoop.hbase.filter.PageFilter)11 Scan (org.apache.hadoop.hbase.client.Scan)9 FilterList (org.apache.hadoop.hbase.filter.FilterList)4 IOException (java.io.IOException)3 Result (org.apache.hadoop.hbase.client.Result)3 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)3 Filter (org.apache.hadoop.hbase.filter.Filter)3 ByteArrayByteIterator (com.yahoo.ycsb.ByteArrayByteIterator)2 ByteIterator (com.yahoo.ycsb.ByteIterator)2 Cell (org.apache.hadoop.hbase.Cell)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 Table (org.apache.hadoop.hbase.client.Table)2 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)2 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)2 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)2 DistinctPrefixFilter (org.apache.phoenix.filter.DistinctPrefixFilter)2 Test (org.junit.Test)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 LinkedList (java.util.LinkedList)1