Search in sources :

Example 36 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method verifyNumericRows.

public void verifyNumericRows(Table table, final byte[] f, int startRow, int endRow, int replicaId) throws IOException {
    for (int i = startRow; i < endRow; i++) {
        String failMsg = "Failed verification of row :" + i;
        byte[] data = Bytes.toBytes(String.valueOf(i));
        Get get = new Get(data);
        get.setReplicaId(replicaId);
        get.setConsistency(Consistency.TIMELINE);
        Result result = table.get(get);
        assertTrue(failMsg, result.containsColumn(f, null));
        assertEquals(failMsg, result.getColumnCells(f, null).size(), 1);
        Cell cell = result.getColumnLatestCell(f, null);
        assertTrue(failMsg, Bytes.equals(data, 0, data.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) Result(org.apache.hadoop.hbase.client.Result)

Example 37 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestCase method assertResultEquals.

protected void assertResultEquals(final HRegion region, final byte[] row, final byte[] family, final byte[] qualifier, final long timestamp, final byte[] value) throws IOException {
    Get get = new Get(row);
    get.setTimeStamp(timestamp);
    Result res = region.get(get);
    NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = res.getMap();
    byte[] res_value = map.get(family).get(qualifier).get(timestamp);
    if (value == null) {
        assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, null, res_value);
    } else {
        if (res_value == null) {
            fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp + "\" was expected to be \"" + Bytes.toStringBinary(value) + " but was null");
        }
        if (res_value != null) {
            assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, value, new String(res_value));
        }
    }
}
Also used : NavigableMap(java.util.NavigableMap) Get(org.apache.hadoop.hbase.client.Get) Result(org.apache.hadoop.hbase.client.Result)

Example 38 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method getMetaTableRows.

/**
   * Returns all rows from the hbase:meta table for a given user table
   *
   * @throws IOException When reading the rows fails.
   */
public List<byte[]> getMetaTableRows(TableName tableName) throws IOException {
    // TODO: Redo using MetaTableAccessor.
    Table t = getConnection().getTable(TableName.META_TABLE_NAME);
    List<byte[]> rows = new ArrayList<>();
    ResultScanner s = t.getScanner(new Scan());
    for (Result result : s) {
        HRegionInfo info = MetaTableAccessor.getHRegionInfo(result);
        if (info == null) {
            LOG.error("No region info for row " + Bytes.toString(result.getRow()));
            // TODO figure out what to do for this new hosed case.
            continue;
        }
        if (info.getTable().equals(tableName)) {
            LOG.info("getMetaTableRows: row -> " + Bytes.toStringBinary(result.getRow()) + info);
            rows.add(result.getRow());
        }
    }
    s.close();
    t.close();
    return rows;
}
Also used : HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 39 with Result

use of org.apache.hadoop.hbase.client.Result in project hbase by apache.

the class HBaseTestingUtility method getMetaTableRows.

/**
   * Returns all rows from the hbase:meta table.
   *
   * @throws IOException When reading the rows fails.
   */
public List<byte[]> getMetaTableRows() throws IOException {
    // TODO: Redo using MetaTableAccessor class
    Table t = getConnection().getTable(TableName.META_TABLE_NAME);
    List<byte[]> rows = new ArrayList<>();
    ResultScanner s = t.getScanner(new Scan());
    for (Result result : s) {
        LOG.info("getMetaTableRows: row -> " + Bytes.toStringBinary(result.getRow()));
        rows.add(result.getRow());
    }
    s.close();
    t.close();
    return rows;
}
Also used : HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 40 with Result

use of org.apache.hadoop.hbase.client.Result in project hadoop by apache.

the class TestHBaseStorageFlowActivity method checkFlowActivityTableSeveralRuns.

private void checkFlowActivityTableSeveralRuns(String cluster, String user, String flow, Configuration c1, String flowVersion1, long runid1, String flowVersion2, long runid2, String flowVersion3, long runid3, long appCreatedTime) throws IOException {
    Scan s = new Scan();
    s.addFamily(FlowActivityColumnFamily.INFO.getBytes());
    byte[] startRow = new FlowActivityRowKey(cluster, appCreatedTime, user, flow).getRowKey();
    s.setStartRow(startRow);
    String clusterStop = cluster + "1";
    byte[] stopRow = new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow).getRowKey();
    s.setStopRow(stopRow);
    Connection conn = ConnectionFactory.createConnection(c1);
    Table table1 = conn.getTable(TableName.valueOf(FlowActivityTable.DEFAULT_TABLE_NAME));
    ResultScanner scanner = table1.getScanner(s);
    int rowCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        byte[] row = result.getRow();
        FlowActivityRowKey flowActivityRowKey = FlowActivityRowKey.parseRowKey(row);
        assertNotNull(flowActivityRowKey);
        assertEquals(cluster, flowActivityRowKey.getClusterId());
        assertEquals(user, flowActivityRowKey.getUserId());
        assertEquals(flow, flowActivityRowKey.getFlowName());
        Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
        assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
        Map<byte[], byte[]> values = result.getFamilyMap(FlowActivityColumnFamily.INFO.getBytes());
        rowCount++;
        assertEquals(3, values.size());
        checkFlowActivityRunId(runid1, flowVersion1, values);
        checkFlowActivityRunId(runid2, flowVersion2, values);
        checkFlowActivityRunId(runid3, flowVersion3, values);
    }
    // the flow activity table is such that it will insert
    // into current day's record
    // hence, if this test runs across the midnight boundary,
    // it may fail since it would insert into two records
    // one for each day
    assertEquals(1, rowCount);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

Result (org.apache.hadoop.hbase.client.Result)753 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)302 Test (org.junit.Test)295 Scan (org.apache.hadoop.hbase.client.Scan)287 Get (org.apache.hadoop.hbase.client.Get)282 Table (org.apache.hadoop.hbase.client.Table)228 Cell (org.apache.hadoop.hbase.Cell)203 Put (org.apache.hadoop.hbase.client.Put)183 IOException (java.io.IOException)172 ArrayList (java.util.ArrayList)160 TableName (org.apache.hadoop.hbase.TableName)125 Delete (org.apache.hadoop.hbase.client.Delete)111 Connection (org.apache.hadoop.hbase.client.Connection)102 KeyValue (org.apache.hadoop.hbase.KeyValue)76 Configuration (org.apache.hadoop.conf.Configuration)72 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)62 InterruptedIOException (java.io.InterruptedIOException)59 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)50 CellScanner (org.apache.hadoop.hbase.CellScanner)47 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)45