Search in sources :

Example 11 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method multiRead.

private void multiRead(String table, byte[] colFam, byte[] colName, byte[][] keys, ByteBuffer[] resultDest) throws IOException {
    assert keys.length == resultDest.length;
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(table);
    List<Get> gets = new ArrayList<>(keys.length);
    for (byte[] key : keys) {
        Get g = new Get(key);
        g.addColumn(colFam, colName);
        gets.add(g);
    }
    Result[] results = htab.get(gets);
    for (int i = 0; i < results.length; ++i) {
        Result r = results[i];
        if (r.isEmpty()) {
            resultDest[i] = null;
        } else {
            Cell cell = r.getColumnLatestCell(colFam, colName);
            resultDest[i] = ByteBuffer.wrap(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
        }
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) ArrayList(java.util.ArrayList) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result)

Example 12 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method printTables.

/**
   * Print tables
   * @param regex to use to find the tables.  Remember that dbname is in each
   *              table name.
   * @return tables as strings
   * @throws IOException
   * @throws TException
   */
List<String> printTables(String regex) throws IOException, TException {
    Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
    Scan scan = new Scan();
    scan.addColumn(CATALOG_CF, CATALOG_COL);
    scan.addFamily(STATS_CF);
    scan.setFilter(filter);
    Iterator<Result> iter = htab.getScanner(scan).iterator();
    if (!iter.hasNext())
        return noMatch(regex, "table");
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        lines.add(printOneTable(iter.next()));
    }
    return lines;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 13 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method printPartitions.

/**
   * Print partitions
   * @param partKey a partial partition key.  This must match the beginings of the partition key.
   *                It can be just dbname.tablename, or dbname.table.pval... where pval are the
   *                partition values in order.  They must be in the correct order and they must
   *                be literal values (no regular expressions)
   * @return partitions as strings
   * @throws IOException
   * @throws TException
   */
List<String> printPartitions(String partKey) throws IOException, TException {
    // First figure out the table and fetch it
    // Split on dot here rather than the standard separator because this will be passed in as a
    // regex, even though we aren't fully supporting regex's.
    String[] partKeyParts = partKey.split("\\.");
    if (partKeyParts.length < 2)
        return noMatch(partKey, "partition");
    List<String> partVals = partKeyParts.length == 2 ? Arrays.asList("*") : Arrays.asList(Arrays.copyOfRange(partKeyParts, 2, partKeyParts.length));
    PartitionScanInfo psi;
    try {
        psi = scanPartitionsInternal(partKeyParts[0], partKeyParts[1], partVals, -1);
    } catch (NoSuchObjectException e) {
        return noMatch(partKey, "partition");
    }
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(PART_TABLE);
    Scan scan = new Scan();
    scan.addColumn(CATALOG_CF, CATALOG_COL);
    scan.addFamily(STATS_CF);
    scan.setStartRow(psi.keyPrefix);
    scan.setStopRow(psi.endKeyPrefix);
    scan.setFilter(psi.filter);
    Iterator<Result> iter = htab.getScanner(scan).iterator();
    if (!iter.hasNext())
        return noMatch(partKey, "partition");
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        lines.add(printOnePartition(iter.next()));
    }
    return lines;
}
Also used : ArrayList(java.util.ArrayList) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 14 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method scan.

private Iterator<Result> scan(String table, byte[] keyStart, byte[] keyEnd, byte[] colFam, byte[] colName, Filter filter) throws IOException {
    HTableInterface htab = conn.getHBaseTable(table);
    Scan s = new Scan();
    if (keyStart != null) {
        s.setStartRow(keyStart);
    }
    if (keyEnd != null) {
        s.setStopRow(keyEnd);
    }
    if (colFam != null && colName != null) {
        s.addColumn(colFam, colName);
    }
    if (filter != null) {
        s.setFilter(filter);
    }
    ResultScanner scanner = htab.getScanner(s);
    return scanner.iterator();
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface)

Example 15 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method storeFileMetadata.

@Override
public void storeFileMetadata(long fileId, ByteBuffer metadata, ByteBuffer[] addedCols, ByteBuffer[] addedVals) throws IOException, InterruptedException {
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(FILE_METADATA_TABLE);
    Put p = new Put(HBaseUtils.makeLongKey(fileId));
    p.addColumn(CATALOG_CF, ByteBuffer.wrap(CATALOG_COL), HConstants.LATEST_TIMESTAMP, metadata);
    assert (addedCols == null && addedVals == null) || (addedCols.length == addedVals.length);
    if (addedCols != null) {
        for (int i = 0; i < addedCols.length; ++i) {
            p.addColumn(STATS_CF, addedCols[i], HConstants.LATEST_TIMESTAMP, addedVals[i]);
        }
    }
    htab.put(p);
    conn.flush(htab);
}
Also used : HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)117 Result (org.apache.hadoop.hbase.client.Result)43 Put (org.apache.hadoop.hbase.client.Put)41 IOException (java.io.IOException)36 ArrayList (java.util.ArrayList)26 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)23 Get (org.apache.hadoop.hbase.client.Get)21 Scan (org.apache.hadoop.hbase.client.Scan)21 Test (org.junit.Test)20 SQLException (java.sql.SQLException)19 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)17 Connection (java.sql.Connection)15 HashMap (java.util.HashMap)15 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)13 Delete (org.apache.hadoop.hbase.client.Delete)12 Mutation (org.apache.hadoop.hbase.client.Mutation)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)11 ResultSet (java.sql.ResultSet)10 Configuration (org.apache.hadoop.conf.Configuration)9 ConnectionQueryServices (org.apache.phoenix.query.ConnectionQueryServices)9