Search in sources :

Example 91 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestScannersFromClientSide method testGetRowOffset.

/**
   * Test from client side for get with rowOffset
   *
   * @throws Exception
   */
@Test
public void testGetRowOffset() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3);
    byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20);
    Table ht = TEST_UTIL.createTable(tableName, FAMILIES);
    Get get;
    Put put;
    Result result;
    boolean toLog = true;
    List<Cell> kvListExp;
    // Insert one CF for row
    kvListExp = new ArrayList<>();
    put = new Put(ROW);
    for (int i = 0; i < 10; i++) {
        KeyValue kv = new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE);
        put.add(kv);
        // skipping first two kvs
        if (i < 2)
            continue;
        kvListExp.add(kv);
    }
    ht.put(put);
    //setting offset to 2
    get = new Get(ROW);
    get.setRowOffsetPerColumnFamily(2);
    result = ht.get(get);
    verifyResult(result, kvListExp, toLog, "Testing basic setRowOffset");
    //setting offset to 20
    get = new Get(ROW);
    get.setRowOffsetPerColumnFamily(20);
    result = ht.get(get);
    kvListExp = new ArrayList<>();
    verifyResult(result, kvListExp, toLog, "Testing offset > #kvs");
    //offset + maxResultPerCF
    get = new Get(ROW);
    get.setRowOffsetPerColumnFamily(4);
    get.setMaxResultsPerColumnFamily(5);
    result = ht.get(get);
    kvListExp = new ArrayList<>();
    for (int i = 4; i < 9; i++) {
        kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[i], 1, VALUE));
    }
    verifyResult(result, kvListExp, toLog, "Testing offset + setMaxResultsPerCF");
    // Filters: ColumnRangeFilter
    get = new Get(ROW);
    get.setRowOffsetPerColumnFamily(1);
    get.setFilter(new ColumnRangeFilter(QUALIFIERS[2], true, QUALIFIERS[5], true));
    result = ht.get(get);
    kvListExp = new ArrayList<>();
    kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[3], 1, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[4], 1, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILIES[0], QUALIFIERS[5], 1, VALUE));
    verifyResult(result, kvListExp, toLog, "Testing offset with CRF");
    // 10 columns for CF2, 10 columns for CF1
    for (int j = 2; j > 0; j--) {
        put = new Put(ROW);
        for (int i = 0; i < 10; i++) {
            KeyValue kv = new KeyValue(ROW, FAMILIES[j], QUALIFIERS[i], 1, VALUE);
            put.add(kv);
        }
        ht.put(put);
    }
    get = new Get(ROW);
    get.setRowOffsetPerColumnFamily(4);
    get.setMaxResultsPerColumnFamily(2);
    get.addFamily(FAMILIES[1]);
    get.addFamily(FAMILIES[2]);
    result = ht.get(get);
    kvListExp = new ArrayList<>();
    //Exp: CF1:q4, q5, CF2: q4, q5
    kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[4], 1, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILIES[1], QUALIFIERS[5], 1, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[4], 1, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILIES[2], QUALIFIERS[5], 1, VALUE));
    verifyResult(result, kvListExp, toLog, "Testing offset + multiple CFs + maxResults");
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ColumnRangeFilter(org.apache.hadoop.hbase.filter.ColumnRangeFilter) TableName(org.apache.hadoop.hbase.TableName) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 92 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestScannersFromClientSide method testScanOnReopenedRegion.

/**
   * Test from client side for scan while the region is reopened
   * on the same region server.
   *
   * @throws Exception
   */
@Test
public void testScanOnReopenedRegion() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 2);
    Table ht = TEST_UTIL.createTable(tableName, FAMILY);
    Put put;
    Scan scan;
    Result result;
    ResultScanner scanner;
    boolean toLog = false;
    List<Cell> kvListExp;
    // table: row, family, c0:0, c1:1
    put = new Put(ROW);
    for (int i = 0; i < QUALIFIERS.length; i++) {
        KeyValue kv = new KeyValue(ROW, FAMILY, QUALIFIERS[i], i, VALUE);
        put.add(kv);
    }
    ht.put(put);
    scan = new Scan().withStartRow(ROW);
    scanner = ht.getScanner(scan);
    HRegionLocation loc;
    try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        loc = locator.getRegionLocation(ROW);
    }
    HRegionInfo hri = loc.getRegionInfo();
    MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster();
    byte[] regionName = hri.getRegionName();
    int i = cluster.getServerWith(regionName);
    HRegionServer rs = cluster.getRegionServer(i);
    ProtobufUtil.closeRegion(null, rs.getRSRpcServices(), rs.getServerName(), regionName);
    long startTime = EnvironmentEdgeManager.currentTime();
    long timeOut = 300000;
    while (true) {
        if (rs.getOnlineRegion(regionName) == null) {
            break;
        }
        assertTrue("Timed out in closing the testing region", EnvironmentEdgeManager.currentTime() < startTime + timeOut);
        Thread.sleep(500);
    }
    // Now open the region again.
    HMaster master = cluster.getMaster();
    RegionStates states = master.getAssignmentManager().getRegionStates();
    states.regionOffline(hri);
    states.updateRegionState(hri, State.OPENING);
    ProtobufUtil.openRegion(null, rs.getRSRpcServices(), rs.getServerName(), hri);
    startTime = EnvironmentEdgeManager.currentTime();
    while (true) {
        if (rs.getOnlineRegion(regionName) != null) {
            break;
        }
        assertTrue("Timed out in open the testing region", EnvironmentEdgeManager.currentTime() < startTime + timeOut);
        Thread.sleep(500);
    }
    // c0:0, c1:1
    kvListExp = new ArrayList<>();
    kvListExp.add(new KeyValue(ROW, FAMILY, QUALIFIERS[0], 0, VALUE));
    kvListExp.add(new KeyValue(ROW, FAMILY, QUALIFIERS[1], 1, VALUE));
    result = scanner.next();
    verifyResult(result, kvListExp, toLog, "Testing scan on re-opened region");
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) MiniHBaseCluster(org.apache.hadoop.hbase.MiniHBaseCluster) HRegionServer(org.apache.hadoop.hbase.regionserver.HRegionServer) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) RegionStates(org.apache.hadoop.hbase.master.RegionStates) HMaster(org.apache.hadoop.hbase.master.HMaster) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 93 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestScannersFromClientSide method testSmallScan.

@Test
public void testSmallScan() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    int numRows = 10;
    byte[][] ROWS = HTestConst.makeNAscii(ROW, numRows);
    int numQualifiers = 10;
    byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, numQualifiers);
    Table ht = TEST_UTIL.createTable(tableName, FAMILY);
    Put put;
    List<Put> puts = new ArrayList<>();
    for (int row = 0; row < ROWS.length; row++) {
        put = new Put(ROWS[row]);
        for (int qual = 0; qual < QUALIFIERS.length; qual++) {
            KeyValue kv = new KeyValue(ROWS[row], FAMILY, QUALIFIERS[qual], VALUE);
            put.add(kv);
        }
        puts.add(put);
    }
    ht.put(puts);
    int expectedRows = numRows;
    int expectedCols = numRows * numQualifiers;
    // Test normal and reversed
    testSmallScan(ht, true, expectedRows, expectedCols);
    testSmallScan(ht, false, expectedRows, expectedCols);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 94 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestScannersFromClientSide method testAsyncScanner.

private void testAsyncScanner(TableName table, int rowNumber, int familyNumber, int qualifierNumber, int caching, Consumer<Boolean> listener) throws Exception {
    assert rowNumber > 0;
    assert familyNumber > 0;
    assert qualifierNumber > 0;
    byte[] row = Bytes.toBytes("r");
    byte[] family = Bytes.toBytes("f");
    byte[] qualifier = Bytes.toBytes("q");
    byte[][] rows = makeNAsciiWithZeroPrefix(row, rowNumber);
    byte[][] families = makeNAsciiWithZeroPrefix(family, familyNumber);
    byte[][] qualifiers = makeNAsciiWithZeroPrefix(qualifier, qualifierNumber);
    Table ht = TEST_UTIL.createTable(table, families);
    boolean toLog = true;
    List<Cell> kvListExp = new ArrayList<>();
    List<Put> puts = new ArrayList<>();
    for (byte[] r : rows) {
        Put put = new Put(r);
        for (byte[] f : families) {
            for (byte[] q : qualifiers) {
                KeyValue kv = new KeyValue(r, f, q, 1, VALUE);
                put.add(kv);
                kvListExp.add(kv);
            }
        }
        puts.add(put);
        if (puts.size() > 1000) {
            ht.put(puts);
            puts.clear();
        }
    }
    if (!puts.isEmpty()) {
        ht.put(puts);
        puts.clear();
    }
    Scan scan = new Scan();
    scan.setAsyncPrefetch(true);
    if (caching > 0) {
        scan.setCaching(caching);
    }
    try (ResultScanner scanner = ht.getScanner(scan)) {
        assertTrue("Not instance of async scanner", scanner instanceof ClientAsyncPrefetchScanner);
        ((ClientAsyncPrefetchScanner) scanner).setPrefetchListener(listener);
        List<Cell> kvListScan = new ArrayList<>();
        Result result;
        boolean first = true;
        int actualRows = 0;
        while ((result = scanner.next()) != null) {
            ++actualRows;
            // waiting for cache. see HBASE-17376
            if (first) {
                TimeUnit.SECONDS.sleep(1);
                first = false;
            }
            for (Cell kv : result.listCells()) {
                kvListScan.add(kv);
            }
        }
        assertEquals(rowNumber, actualRows);
        // These cells may have different rows but it is ok. The Result#getRow
        // isn't used in the verifyResult()
        result = Result.create(kvListScan);
        verifyResult(result, kvListExp, toLog, "Testing async scan");
    }
    TEST_UTIL.deleteTable(table);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Cell(org.apache.hadoop.hbase.Cell)

Example 95 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class SampleRegionWALObserver method preWALWrite.

@Override
public boolean preWALWrite(ObserverContext<? extends WALCoprocessorEnvironment> env, HRegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {
    boolean bypass = false;
    // check table name matches or not.
    if (!Bytes.equals(info.getTable().toBytes(), this.tableName)) {
        return bypass;
    }
    preWALWriteCalled = true;
    // here we're going to remove one keyvalue from the WALEdit, and add
    // another one to it.
    List<Cell> cells = logEdit.getCells();
    Cell deletedCell = null;
    for (Cell cell : cells) {
        // assume only one kv from the WALEdit matches.
        byte[] family = CellUtil.cloneFamily(cell);
        byte[] qulifier = CellUtil.cloneQualifier(cell);
        if (Arrays.equals(family, ignoredFamily) && Arrays.equals(qulifier, ignoredQualifier)) {
            LOG.debug("Found the KeyValue from WALEdit which should be ignored.");
            deletedCell = cell;
        }
        if (Arrays.equals(family, changedFamily) && Arrays.equals(qulifier, changedQualifier)) {
            LOG.debug("Found the KeyValue from WALEdit which should be changed.");
            cell.getValueArray()[cell.getValueOffset()] += 1;
        }
    }
    if (null != row) {
        cells.add(new KeyValue(row, addedFamily, addedQualifier));
    }
    if (deletedCell != null) {
        LOG.debug("About to delete a KeyValue from WALEdit.");
        cells.remove(deletedCell);
    }
    return bypass;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

KeyValue (org.apache.hadoop.hbase.KeyValue)552 Test (org.junit.Test)289 Cell (org.apache.hadoop.hbase.Cell)193 ArrayList (java.util.ArrayList)172 Put (org.apache.hadoop.hbase.client.Put)98 Scan (org.apache.hadoop.hbase.client.Scan)85 Result (org.apache.hadoop.hbase.client.Result)70 Configuration (org.apache.hadoop.conf.Configuration)64 Path (org.apache.hadoop.fs.Path)55 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)36 Tag (org.apache.hadoop.hbase.Tag)35 ByteBuffer (java.nio.ByteBuffer)34 List (java.util.List)34 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)34 IOException (java.io.IOException)32 TableName (org.apache.hadoop.hbase.TableName)32 TreeMap (java.util.TreeMap)29 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)28 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)28 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)27