Search in sources :

Example 6 with ScanBuilder

use of io.cdap.cdap.data2.util.hbase.ScanBuilder in project cdap by caskdata.

the class ShardedHBaseQueueStrategy method createHBaseScanner.

private ResultScanner createHBaseScanner(ConsumerConfig consumerConfig, HTable hTable, Scan scan, int numRows) throws IOException {
    // Modify the scan with sharded key prefix
    ScanBuilder shardedScan = tableUtil.buildScan(scan);
    // we should roughly divide by number of buckets, but don't want another RPC for the case we are not exactly right
    int caching = (int) (1.1 * numRows / distributorBuckets);
    shardedScan.setCaching(caching);
    if (scan.getStartRow().length > 0) {
        byte[] rowKey = getShardedKey(consumerConfig, consumerConfig.getInstanceId(), scan.getStartRow());
        shardedScan.setStartRow(rowKey);
    }
    if (scan.getStopRow().length > 0) {
        byte[] rowKey = getShardedKey(consumerConfig, consumerConfig.getInstanceId(), scan.getStopRow());
        shardedScan.setStopRow(rowKey);
    }
    return DistributedScanner.create(hTable, shardedScan.build(), rowKeyDistributor, scansExecutor);
}
Also used : ScanBuilder(co.cask.cdap.data2.util.hbase.ScanBuilder)

Example 7 with ScanBuilder

use of io.cdap.cdap.data2.util.hbase.ScanBuilder in project cdap by caskdata.

the class HBaseMetadataTable method listTopics.

@Override
public List<TopicId> listTopics(NamespaceId namespaceId) throws IOException {
    byte[] startRow = MessagingUtils.topicScanKey(namespaceId);
    ScanBuilder scanBuilder = tableUtil.buildScan().setStartRow(startRow).setStopRow(Bytes.stopKeyForPrefix(startRow));
    return scanTopics(scanBuilder);
}
Also used : ScanBuilder(co.cask.cdap.data2.util.hbase.ScanBuilder)

Example 8 with ScanBuilder

use of io.cdap.cdap.data2.util.hbase.ScanBuilder in project cdap by caskdata.

the class HBaseMetricsTable method scan.

@Override
public Scanner scan(@Nullable byte[] startRow, @Nullable byte[] stopRow, @Nullable FuzzyRowFilter filter) {
    ScanBuilder scanBuilder = tableUtil.buildScan();
    configureRangeScan(scanBuilder, startRow, stopRow, filter);
    try {
        ResultScanner resultScanner = getScanner(scanBuilder);
        return new HBaseScanner(resultScanner, columnFamily, rowKeyDistributor);
    } catch (IOException e) {
        throw new DataSetException("Scan failed on table " + tableId, e);
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) DataSetException(co.cask.cdap.api.dataset.DataSetException) ScanBuilder(co.cask.cdap.data2.util.hbase.ScanBuilder) IOException(java.io.IOException)

Example 9 with ScanBuilder

use of io.cdap.cdap.data2.util.hbase.ScanBuilder in project cdap by caskdata.

the class HBaseTable method scanPersisted.

@ReadOnly
@Override
protected Scanner scanPersisted(co.cask.cdap.api.dataset.table.Scan scan) throws Exception {
    ScanBuilder hScan = tableUtil.buildScan();
    hScan.addFamily(columnFamily);
    // TODO (CDAP-11954): use common utility method to extract these configs
    if (scan.getProperties().containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) {
        hScan.setCacheBlocks(Boolean.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)));
    } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) {
        hScan.setCacheBlocks(Boolean.valueOf(arguments.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)));
    } else if (properties.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) {
        hScan.setCacheBlocks(Boolean.valueOf(properties.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)));
    } else {
        // NOTE: by default we assume scanner is used in mapreduce job, hence no cache blocks
        hScan.setCacheBlocks(false);
    }
    if (scan.getProperties().containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) {
        hScan.setCaching(Integer.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_SCANNER_CACHING)));
    } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) {
        hScan.setCaching(Integer.valueOf(arguments.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING)));
    } else if (properties.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) {
        hScan.setCaching(Integer.valueOf(properties.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING)));
    } else {
        // NOTE: by default we use this hard-coded value, for backwards-compatibility with CDAP<4.1.2|4.2.1|4.3
        hScan.setCaching(1000);
    }
    byte[] startRow = scan.getStartRow();
    byte[] stopRow = scan.getStopRow();
    if (startRow != null) {
        hScan.setStartRow(startRow);
    }
    if (stopRow != null) {
        hScan.setStopRow(stopRow);
    }
    setFilterIfNeeded(hScan, scan.getFilter());
    hScan.setAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY, getEncodedTx());
    ResultScanner resultScanner = wrapResultScanner(hTable.getScanner(hScan.build()));
    return new HBaseScanner(resultScanner, columnFamily);
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ScanBuilder(co.cask.cdap.data2.util.hbase.ScanBuilder) ReadOnly(co.cask.cdap.api.annotation.ReadOnly)

Example 10 with ScanBuilder

use of io.cdap.cdap.data2.util.hbase.ScanBuilder in project cdap by caskdata.

the class ReplicationStatusTool method dumpReplicationStateTable.

private static void dumpReplicationStateTable() throws Exception {
    System.out.println("\nThis is all the HBase regions on the Cluster:");
    HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get();
    HTable hTable = tableUtil.createHTable(hConf, getReplicationStateTableId(tableUtil));
    ScanBuilder scan = tableUtil.buildScan();
    scan.addColumn(Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.TIME_FAMILY), Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.WRITE_TIME_ROW_TYPE));
    scan.addColumn(Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.TIME_FAMILY), Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.REPLICATE_TIME_ROW_TYPE));
    Result result;
    try (ResultScanner resultScanner = hTable.getScanner(scan.build())) {
        while ((result = resultScanner.next()) != null) {
            ReplicationStatusKey key = new ReplicationStatusKey(result.getRow());
            String rowType = key.getRowType();
            String region = key.getRegionName();
            UUID rsID = key.getRsID();
            Long writeTime = getTimeFromResult(result, ReplicationConstants.ReplicationStatusTool.WRITE_TIME_ROW_TYPE);
            Long replicateTime = getTimeFromResult(result, ReplicationConstants.ReplicationStatusTool.REPLICATE_TIME_ROW_TYPE);
            System.out.println("Key=>rowType:" + rowType + ":region:" + region + ":RSID:" + rsID + " writeTime:" + writeTime + ":replicateTime:" + replicateTime);
        }
    } finally {
        hTable.close();
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ReplicationStatusKey(co.cask.cdap.replication.ReplicationStatusKey) ScanBuilder(co.cask.cdap.data2.util.hbase.ScanBuilder) HBaseTableUtilFactory(co.cask.cdap.data2.util.hbase.HBaseTableUtilFactory) HTable(org.apache.hadoop.hbase.client.HTable) UUID(java.util.UUID) HBaseTableUtil(co.cask.cdap.data2.util.hbase.HBaseTableUtil) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

ScanBuilder (co.cask.cdap.data2.util.hbase.ScanBuilder)9 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)8 ScanBuilder (io.cdap.cdap.data2.util.hbase.ScanBuilder)5 Result (org.apache.hadoop.hbase.client.Result)4 IOException (java.io.IOException)3 HTable (org.apache.hadoop.hbase.client.HTable)3 HBaseTableUtil (co.cask.cdap.data2.util.hbase.HBaseTableUtil)2 HBaseTableUtilFactory (co.cask.cdap.data2.util.hbase.HBaseTableUtilFactory)2 ReplicationStatusKey (co.cask.cdap.replication.ReplicationStatusKey)2 HBaseTableUtil (io.cdap.cdap.data2.util.hbase.HBaseTableUtil)2 HBaseTableUtilFactory (io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory)2 ReplicationStatusKey (io.cdap.cdap.replication.ReplicationStatusKey)2 FileNotFoundException (java.io.FileNotFoundException)2 HashMap (java.util.HashMap)2 UUID (java.util.UUID)2 ParseException (org.apache.commons.cli.ParseException)2 Table (org.apache.hadoop.hbase.client.Table)2 ReadOnly (co.cask.cdap.api.annotation.ReadOnly)1 DataSetException (co.cask.cdap.api.dataset.DataSetException)1 NotFoundException (co.cask.cdap.common.NotFoundException)1