Search in sources :

Example 21 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class AccessControlLists method loadAll.

/**
   * Load all permissions from the region server holding {@code _acl_},
   * primarily intended for testing purposes.
   */
static Map<byte[], ListMultimap<String, TablePermission>> loadAll(Configuration conf) throws IOException {
    Map<byte[], ListMultimap<String, TablePermission>> allPerms = new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
    // do a full scan of _acl_, filtering on only first table region rows
    Scan scan = new Scan();
    scan.addFamily(ACL_LIST_FAMILY);
    ResultScanner scanner = null;
    // TODO: Pass in a Connection rather than create one each time.
    try (Connection connection = ConnectionFactory.createConnection(conf)) {
        try (Table table = connection.getTable(ACL_TABLE_NAME)) {
            scanner = table.getScanner(scan);
            try {
                for (Result row : scanner) {
                    ListMultimap<String, TablePermission> resultPerms = parsePermissions(row.getRow(), row);
                    allPerms.put(row.getRow(), resultPerms);
                }
            } finally {
                if (scanner != null)
                    scanner.close();
            }
        }
    }
    return allPerms;
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) TreeMap(java.util.TreeMap) ArrayListMultimap(com.google.common.collect.ArrayListMultimap) ListMultimap(com.google.common.collect.ListMultimap) Result(org.apache.hadoop.hbase.client.Result)

Example 22 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method deleteTableData.

//
// ==========================================================================
/**
   * Provide an existing table name to truncate.
   * Scans the table and issues a delete for each row read.
   * @param tableName existing table
   * @return HTable to that new table
   * @throws IOException
   */
public Table deleteTableData(TableName tableName) throws IOException {
    Table table = getConnection().getTable(tableName);
    Scan scan = new Scan();
    ResultScanner resScan = table.getScanner(scan);
    for (Result res : resScan) {
        Delete del = new Delete(res.getRow());
        table.delete(del);
    }
    resScan = table.getScanner(scan);
    resScan.close();
    return table;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 23 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method findLastTableState.

@Nullable
public TableState findLastTableState(final TableName table) throws IOException {
    final AtomicReference<TableState> lastTableState = new AtomicReference<>(null);
    MetaTableAccessor.Visitor visitor = new MetaTableAccessor.Visitor() {

        @Override
        public boolean visit(Result r) throws IOException {
            if (!Arrays.equals(r.getRow(), table.getName()))
                return false;
            TableState state = MetaTableAccessor.getTableState(r);
            if (state != null)
                lastTableState.set(state);
            return true;
        }
    };
    MetaTableAccessor.scanMeta(connection, null, null, MetaTableAccessor.QueryType.TABLE, Integer.MAX_VALUE, visitor);
    return lastTableState.get();
}
Also used : AtomicReference(java.util.concurrent.atomic.AtomicReference) TableState(org.apache.hadoop.hbase.client.TableState) Result(org.apache.hadoop.hbase.client.Result) Nullable(edu.umd.cs.findbugs.annotations.Nullable)

Example 24 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method verifyNumericRows.

public void verifyNumericRows(Table table, final byte[] f, int startRow, int endRow, int replicaId) throws IOException {
    for (int i = startRow; i < endRow; i++) {
        String failMsg = "Failed verification of row :" + i;
        byte[] data = Bytes.toBytes(String.valueOf(i));
        Get get = new Get(data);
        get.setReplicaId(replicaId);
        get.setConsistency(Consistency.TIMELINE);
        Result result = table.get(get);
        assertTrue(failMsg, result.containsColumn(f, null));
        assertEquals(failMsg, result.getColumnCells(f, null).size(), 1);
        Cell cell = result.getColumnLatestCell(f, null);
        assertTrue(failMsg, Bytes.equals(data, 0, data.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) Result(org.apache.hadoop.hbase.client.Result)

Example 25 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestCase method assertResultEquals.

protected void assertResultEquals(final HRegion region, final byte[] row, final byte[] family, final byte[] qualifier, final long timestamp, final byte[] value) throws IOException {
    Get get = new Get(row);
    get.setTimeStamp(timestamp);
    Result res = region.get(get);
    NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = res.getMap();
    byte[] res_value = map.get(family).get(qualifier).get(timestamp);
    if (value == null) {
        assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, null, res_value);
    } else {
        if (res_value == null) {
            fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp + "\" was expected to be \"" + Bytes.toStringBinary(value) + " but was null");
        }
        if (res_value != null) {
            assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, value, new String(res_value));
        }
    }
}
Also used : NavigableMap(java.util.NavigableMap) Get(org.apache.hadoop.hbase.client.Get) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

Result (org.apache.hadoop.hbase.client.Result)715 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)286 Test (org.junit.Test)280 Scan (org.apache.hadoop.hbase.client.Scan)279 Get (org.apache.hadoop.hbase.client.Get)269 Table (org.apache.hadoop.hbase.client.Table)224 Put (org.apache.hadoop.hbase.client.Put)183 Cell (org.apache.hadoop.hbase.Cell)177 IOException (java.io.IOException)164 ArrayList (java.util.ArrayList)143 TableName (org.apache.hadoop.hbase.TableName)124 Connection (org.apache.hadoop.hbase.client.Connection)101 Delete (org.apache.hadoop.hbase.client.Delete)101 Configuration (org.apache.hadoop.conf.Configuration)70 KeyValue (org.apache.hadoop.hbase.KeyValue)70 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)62 InterruptedIOException (java.io.InterruptedIOException)58 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)50 CellScanner (org.apache.hadoop.hbase.CellScanner)47 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)45