Search in sources :

Example 26 with FirstKeyOnlyFilter

use of org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter in project phoenix by apache.

the class MetaDataEndpointImpl method buildDeletedTable.

private PTable buildDeletedTable(byte[] key, ImmutableBytesPtr cacheKey, Region region, long clientTimeStamp) throws IOException {
    if (clientTimeStamp == HConstants.LATEST_TIMESTAMP) {
        return null;
    }
    Scan scan = MetaDataUtil.newTableRowsScan(key, clientTimeStamp, HConstants.LATEST_TIMESTAMP);
    scan.setFilter(new FirstKeyOnlyFilter());
    scan.setRaw(true);
    List<Cell> results = Lists.<Cell>newArrayList();
    try (RegionScanner scanner = region.getScanner(scan)) {
        scanner.next(results);
    }
    for (Cell kv : results) {
        KeyValue.Type type = Type.codeToType(kv.getTypeByte());
        if (type == Type.DeleteFamily) {
            // Row was deleted
            Cache<ImmutableBytesPtr, PMetaDataEntity> metaDataCache = GlobalCache.getInstance(this.env).getMetaDataCache();
            PTable table = newDeletedTableMarker(kv.getTimestamp());
            metaDataCache.put(cacheKey, table);
            return table;
        }
    }
    return null;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) PMetaDataEntity(org.apache.phoenix.schema.PMetaDataEntity) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) Scan(org.apache.hadoop.hbase.client.Scan) Type(org.apache.hadoop.hbase.KeyValue.Type) Cell(org.apache.hadoop.hbase.Cell) PTable(org.apache.phoenix.schema.PTable)

Example 27 with FirstKeyOnlyFilter

use of org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter in project phoenix by apache.

the class ExplainTable method explain.

protected void explain(String prefix, List<String> planSteps) {
    StringBuilder buf = new StringBuilder(prefix);
    ScanRanges scanRanges = context.getScanRanges();
    Scan scan = context.getScan();
    if (scan.getConsistency() != Consistency.STRONG) {
        buf.append("TIMELINE-CONSISTENCY ");
    }
    if (hint.hasHint(Hint.SMALL)) {
        buf.append(Hint.SMALL).append(" ");
    }
    if (OrderBy.REV_ROW_KEY_ORDER_BY.equals(orderBy)) {
        buf.append("REVERSE ");
    }
    if (scanRanges.isEverything()) {
        buf.append("FULL SCAN ");
    } else {
        explainSkipScan(buf);
    }
    buf.append("OVER ").append(tableRef.getTable().getPhysicalName().getString());
    if (!scanRanges.isPointLookup()) {
        appendKeyRanges(buf);
    }
    planSteps.add(buf.toString());
    if (context.getScan() != null && tableRef.getTable().getRowTimestampColPos() != -1) {
        TimeRange range = context.getScan().getTimeRange();
        planSteps.add("    ROW TIMESTAMP FILTER [" + range.getMin() + ", " + range.getMax() + ")");
    }
    PageFilter pageFilter = null;
    FirstKeyOnlyFilter firstKeyOnlyFilter = null;
    BooleanExpressionFilter whereFilter = null;
    DistinctPrefixFilter distinctFilter = null;
    Iterator<Filter> filterIterator = ScanUtil.getFilterIterator(scan);
    if (filterIterator.hasNext()) {
        do {
            Filter filter = filterIterator.next();
            if (filter instanceof FirstKeyOnlyFilter) {
                firstKeyOnlyFilter = (FirstKeyOnlyFilter) filter;
            } else if (filter instanceof PageFilter) {
                pageFilter = (PageFilter) filter;
            } else if (filter instanceof BooleanExpressionFilter) {
                whereFilter = (BooleanExpressionFilter) filter;
            } else if (filter instanceof DistinctPrefixFilter) {
                distinctFilter = (DistinctPrefixFilter) filter;
            }
        } while (filterIterator.hasNext());
    }
    if (whereFilter != null) {
        planSteps.add("    SERVER FILTER BY " + (firstKeyOnlyFilter == null ? "" : "FIRST KEY ONLY AND ") + whereFilter.toString());
    } else if (firstKeyOnlyFilter != null) {
        planSteps.add("    SERVER FILTER BY FIRST KEY ONLY");
    }
    if (distinctFilter != null) {
        planSteps.add("    SERVER DISTINCT PREFIX FILTER OVER " + groupBy.getExpressions().toString());
    }
    if (!orderBy.getOrderByExpressions().isEmpty() && groupBy.isEmpty()) {
        // with GROUP BY, sort happens client-side
        planSteps.add("    SERVER" + (limit == null ? "" : " TOP " + limit + " ROW" + (limit == 1 ? "" : "S")) + " SORTED BY " + orderBy.getOrderByExpressions().toString());
    } else {
        if (offset != null) {
            planSteps.add("    SERVER OFFSET " + offset);
        }
        if (pageFilter != null) {
            planSteps.add("    SERVER " + pageFilter.getPageSize() + " ROW LIMIT");
        }
    }
    Integer groupByLimit = null;
    byte[] groupByLimitBytes = scan.getAttribute(BaseScannerRegionObserver.GROUP_BY_LIMIT);
    if (groupByLimitBytes != null) {
        groupByLimit = (Integer) PInteger.INSTANCE.toObject(groupByLimitBytes);
    }
    groupBy.explain(planSteps, groupByLimit);
    if (scan.getAttribute(BaseScannerRegionObserver.SPECIFIC_ARRAY_INDEX) != null) {
        planSteps.add("    SERVER ARRAY ELEMENT PROJECTION");
    }
}
Also used : PInteger(org.apache.phoenix.schema.types.PInteger) TimeRange(org.apache.hadoop.hbase.io.TimeRange) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) BooleanExpressionFilter(org.apache.phoenix.filter.BooleanExpressionFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) DistinctPrefixFilter(org.apache.phoenix.filter.DistinctPrefixFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) DistinctPrefixFilter(org.apache.phoenix.filter.DistinctPrefixFilter) ScanRanges(org.apache.phoenix.compile.ScanRanges) BooleanExpressionFilter(org.apache.phoenix.filter.BooleanExpressionFilter)

Example 28 with FirstKeyOnlyFilter

use of org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter in project cdap by caskdata.

the class HBaseMetadataTable method scanTopics.

/**
   * Scans the HBase table to get a list of {@link TopicId}.
   */
private List<TopicId> scanTopics(ScanBuilder scanBuilder) throws IOException {
    Scan scan = scanBuilder.setFilter(new FirstKeyOnlyFilter()).setCaching(scanCacheRows).build();
    try {
        List<TopicId> topicIds = new ArrayList<>();
        try (ResultScanner resultScanner = hTable.getScanner(scan)) {
            for (Result result : resultScanner) {
                TopicId topicId = MessagingUtils.toTopicId(result.getRow());
                byte[] value = result.getValue(columnFamily, COL);
                Map<String, String> properties = GSON.fromJson(Bytes.toString(value), MAP_TYPE);
                TopicMetadata metadata = new TopicMetadata(topicId, properties);
                if (metadata.exists()) {
                    topicIds.add(topicId);
                }
            }
        }
        return topicIds;
    } catch (IOException e) {
        throw exceptionHandler.handle(e);
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) TopicId(co.cask.cdap.proto.id.TopicId) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) TopicMetadata(co.cask.cdap.messaging.TopicMetadata)

Aggregations

FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)28 Scan (org.apache.hadoop.hbase.client.Scan)17 Cell (org.apache.hadoop.hbase.Cell)9 ArrayList (java.util.ArrayList)8 Test (org.junit.Test)8 IOException (java.io.IOException)7 Result (org.apache.hadoop.hbase.client.Result)7 RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)6 Filter (org.apache.hadoop.hbase.filter.Filter)5 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)4 Connection (java.sql.Connection)3 Put (org.apache.hadoop.hbase.client.Put)3 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)3 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)3 FilterList (org.apache.hadoop.hbase.filter.FilterList)3 BloomFilter (org.apache.hive.common.util.BloomFilter)3 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)3 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)3 PMetaDataEntity (org.apache.phoenix.schema.PMetaDataEntity)3 List (java.util.List)2