Search in sources :

Example 1 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hive by apache.

the class HBaseReadWrite method printTables.

/**
   * Print tables
   * @param regex to use to find the tables.  Remember that dbname is in each
   *              table name.
   * @return tables as strings
   * @throws IOException
   * @throws TException
   */
List<String> printTables(String regex) throws IOException, TException {
    Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
    Scan scan = new Scan();
    scan.addColumn(CATALOG_CF, CATALOG_COL);
    scan.addFamily(STATS_CF);
    scan.setFilter(filter);
    Iterator<Result> iter = htab.getScanner(scan).iterator();
    if (!iter.hasNext())
        return noMatch(regex, "table");
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        lines.add(printOneTable(iter.next()));
    }
    return lines;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 2 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hive by apache.

the class HBaseReadWrite method printPartitions.

/**
   * Print partitions
   * @param partKey a partial partition key.  This must match the beginings of the partition key.
   *                It can be just dbname.tablename, or dbname.table.pval... where pval are the
   *                partition values in order.  They must be in the correct order and they must
   *                be literal values (no regular expressions)
   * @return partitions as strings
   * @throws IOException
   * @throws TException
   */
List<String> printPartitions(String partKey) throws IOException, TException {
    // First figure out the table and fetch it
    // Split on dot here rather than the standard separator because this will be passed in as a
    // regex, even though we aren't fully supporting regex's.
    String[] partKeyParts = partKey.split("\\.");
    if (partKeyParts.length < 2)
        return noMatch(partKey, "partition");
    List<String> partVals = partKeyParts.length == 2 ? Arrays.asList("*") : Arrays.asList(Arrays.copyOfRange(partKeyParts, 2, partKeyParts.length));
    PartitionScanInfo psi;
    try {
        psi = scanPartitionsInternal(partKeyParts[0], partKeyParts[1], partVals, -1);
    } catch (NoSuchObjectException e) {
        return noMatch(partKey, "partition");
    }
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(PART_TABLE);
    Scan scan = new Scan();
    scan.addColumn(CATALOG_CF, CATALOG_COL);
    scan.addFamily(STATS_CF);
    scan.setStartRow(psi.keyPrefix);
    scan.setStopRow(psi.endKeyPrefix);
    scan.setFilter(psi.filter);
    Iterator<Result> iter = htab.getScanner(scan).iterator();
    if (!iter.hasNext())
        return noMatch(partKey, "partition");
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        lines.add(printOnePartition(iter.next()));
    }
    return lines;
}
Also used : ArrayList(java.util.ArrayList) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hive by apache.

the class HBaseReadWrite method scan.

private Iterator<Result> scan(String table, byte[] keyStart, byte[] keyEnd, byte[] colFam, byte[] colName, Filter filter) throws IOException {
    HTableInterface htab = conn.getHBaseTable(table);
    Scan s = new Scan();
    if (keyStart != null) {
        s.setStartRow(keyStart);
    }
    if (keyEnd != null) {
        s.setStopRow(keyEnd);
    }
    if (colFam != null && colName != null) {
        s.addColumn(colFam, colName);
    }
    if (filter != null) {
        s.setFilter(filter);
    }
    ResultScanner scanner = htab.getScanner(s);
    return scanner.iterator();
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface)

Example 4 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class MetaTableAccessor method scanMeta.

/**
   * Performs a scan of META table.
   * @param connection connection we're using
   * @param startRow Where to start the scan. Pass null if want to begin scan
   *                 at first row.
   * @param stopRow Where to stop the scan. Pass null if want to scan all rows
   *                from the start one
   * @param type scanned part of meta
   * @param maxRows maximum rows to return
   * @param visitor Visitor invoked against each row.
   * @throws IOException
   */
public static void scanMeta(Connection connection, @Nullable final byte[] startRow, @Nullable final byte[] stopRow, QueryType type, int maxRows, final Visitor visitor) throws IOException {
    int rowUpperLimit = maxRows > 0 ? maxRows : Integer.MAX_VALUE;
    Scan scan = getMetaScan(connection, rowUpperLimit);
    for (byte[] family : type.getFamilies()) {
        scan.addFamily(family);
    }
    if (startRow != null)
        scan.setStartRow(startRow);
    if (stopRow != null)
        scan.setStopRow(stopRow);
    if (LOG.isTraceEnabled()) {
        LOG.trace("Scanning META" + " starting at row=" + Bytes.toStringBinary(startRow) + " stopping at row=" + Bytes.toStringBinary(stopRow) + " for max=" + rowUpperLimit + " with caching=" + scan.getCaching());
    }
    int currentRow = 0;
    try (Table metaTable = getMetaHTable(connection)) {
        try (ResultScanner scanner = metaTable.getScanner(scan)) {
            Result data;
            while ((data = scanner.next()) != null) {
                if (data.isEmpty())
                    continue;
                // Break if visit returns false.
                if (!visitor.visit(data))
                    break;
                if (++currentRow >= rowUpperLimit)
                    break;
            }
        }
    }
    if (visitor != null && visitor instanceof Closeable) {
        try {
            ((Closeable) visitor).close();
        } catch (Throwable t) {
            ExceptionUtil.rethrowIfInterrupt(t);
            LOG.debug("Got exception in closing the meta scanner visitor", t);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Closeable(java.io.Closeable) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 5 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class MetaTableAccessor method getScanForTableName.

/**
   * This method creates a Scan object that will only scan catalog rows that
   * belong to the specified table. It doesn't specify any columns.
   * This is a better alternative to just using a start row and scan until
   * it hits a new table since that requires parsing the HRI to get the table
   * name.
   * @param tableName bytes of table's name
   * @return configured Scan object
   */
@Deprecated
public static Scan getScanForTableName(Connection connection, TableName tableName) {
    // Start key is just the table name with delimiters
    byte[] startKey = getTableStartRowForMeta(tableName, QueryType.REGION);
    // Stop key appends the smallest possible char to the table name
    byte[] stopKey = getTableStopRowForMeta(tableName, QueryType.REGION);
    Scan scan = getMetaScan(connection, -1);
    scan.setStartRow(startKey);
    scan.setStopRow(stopKey);
    return scan;
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan)

Aggregations

Scan (org.apache.hadoop.hbase.client.Scan)1286 Test (org.junit.Test)619 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)434 Result (org.apache.hadoop.hbase.client.Result)421 Cell (org.apache.hadoop.hbase.Cell)346 ArrayList (java.util.ArrayList)337 Table (org.apache.hadoop.hbase.client.Table)289 Put (org.apache.hadoop.hbase.client.Put)216 IOException (java.io.IOException)208 TableName (org.apache.hadoop.hbase.TableName)179 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)160 Delete (org.apache.hadoop.hbase.client.Delete)131 Connection (org.apache.hadoop.hbase.client.Connection)117 Filter (org.apache.hadoop.hbase.filter.Filter)104 KeyValue (org.apache.hadoop.hbase.KeyValue)97 Configuration (org.apache.hadoop.conf.Configuration)95 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)89 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)80 List (java.util.List)79 Path (org.apache.hadoop.fs.Path)76