Search in sources :

Example 81 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementSummingScannerTest method testWithBatchLimit.

@Test
public void testWithBatchLimit() throws Exception {
    TableId tableId = TableId.from(NamespaceId.DEFAULT.getNamespace(), "testWithBatchLimit");
    byte[] familyBytes = Bytes.toBytes("f");
    byte[] columnBytes = Bytes.toBytes("c2");
    HRegion region = createRegion(tableId, familyBytes);
    try {
        region.initialize();
        long now = System.currentTimeMillis();
        // put a non increment columns
        Put p = new Put(Bytes.toBytes("r4"));
        p.add(familyBytes, Bytes.toBytes("c1"), Bytes.toBytes("value1"));
        region.put(p);
        // now put some increment deltas in a column
        p = new Put(Bytes.toBytes("r4"));
        for (int i = 0; i < 3; i++) {
            p.add(familyBytes, columnBytes, now - i, Bytes.toBytes(1L));
        }
        p.setAttribute(HBaseTable.DELTA_WRITE, TRUE);
        region.put(p);
        // put some non - increment columns
        p = new Put(Bytes.toBytes("r4"));
        p.add(familyBytes, Bytes.toBytes("c3"), Bytes.toBytes("value3"));
        region.put(p);
        p = new Put(Bytes.toBytes("r4"));
        p.add(familyBytes, Bytes.toBytes("c4"), Bytes.toBytes("value4"));
        region.put(p);
        p = new Put(Bytes.toBytes("r4"));
        p.add(familyBytes, Bytes.toBytes("c5"), Bytes.toBytes("value5"));
        region.put(p);
        // this put will appear as a "total" sum prior to all the delta puts
        p = new Put(Bytes.toBytes("r4"));
        p.add(familyBytes, columnBytes, now - 5, Bytes.toBytes(5L));
        region.put(p);
        Scan scan = new Scan(Bytes.toBytes("r4"));
        scan.setMaxVersions();
        RegionScanner scanner = new IncrementSummingScanner(region, 3, region.getScanner(scan), ScanType.USER_SCAN);
        List<Cell> results = Lists.newArrayList();
        scanner.next(results);
        assertEquals(3, results.size());
        Cell cell = results.get(0);
        assertNotNull(cell);
        assertEquals("value1", Bytes.toString(cell.getValue()));
        cell = results.get(1);
        assertNotNull(cell);
        assertEquals(8L, Bytes.toLong(cell.getValue()));
        cell = results.get(2);
        assertNotNull(cell);
        assertEquals("value3", Bytes.toString(cell.getValue()));
    } finally {
        region.close();
    }
}
Also used : TableId(co.cask.cdap.data2.util.TableId) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test) HBase11Test(co.cask.cdap.data.hbase.HBase11Test)

Example 82 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementSummingScannerTest method verifyCounts.

private void verifyCounts(HRegion region, Scan scan, long[] counts, int batch) throws Exception {
    RegionScanner scanner = new IncrementSummingScanner(region, batch, region.getScanner(scan), ScanType.USER_SCAN);
    // init with false if loop will execute zero times
    boolean hasMore = counts.length > 0;
    for (long count : counts) {
        List<Cell> results = Lists.newArrayList();
        hasMore = scanner.next(results);
        assertEquals(1, results.size());
        Cell cell = results.get(0);
        assertNotNull(cell);
        assertEquals(count, Bytes.toLong(cell.getValue()));
    }
    assertFalse(hasMore);
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Cell(org.apache.hadoop.hbase.Cell)

Example 83 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class HBaseTableTest method testEnableIncrements.

@Test
public void testEnableIncrements() throws Exception {
    // setup a table with increments disabled and with it enabled
    String disableTableName = "incr-disable";
    String enabledTableName = "incr-enable";
    TableId disabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, disableTableName);
    TableId enabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, enabledTableName);
    DatasetProperties propsDisabled = TableProperties.builder().setReadlessIncrementSupport(false).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin disabledAdmin = getTableAdmin(CONTEXT1, disableTableName, propsDisabled);
    disabledAdmin.create();
    HBaseAdmin admin = TEST_HBASE.getHBaseAdmin();
    DatasetProperties propsEnabled = TableProperties.builder().setReadlessIncrementSupport(true).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin enabledAdmin = getTableAdmin(CONTEXT1, enabledTableName, propsEnabled);
    enabledAdmin.create();
    try {
        try {
            HTableDescriptor htd = hBaseTableUtil.getHTableDescriptor(admin, disabledTableId);
            List<String> cps = htd.getCoprocessors();
            assertFalse(cps.contains(IncrementHandler.class.getName()));
            htd = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId);
            cps = htd.getCoprocessors();
            assertTrue(cps.contains(IncrementHandler.class.getName()));
        } finally {
            admin.close();
        }
        BufferingTable table = getTable(CONTEXT1, enabledTableName, propsEnabled);
        byte[] row = Bytes.toBytes("row1");
        byte[] col = Bytes.toBytes("col1");
        DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
        Transaction tx = txSystemClient.startShort();
        table.startTx(tx);
        table.increment(row, col, 10);
        table.commitTx();
        // verify that value was written as a delta value
        final byte[] expectedValue = Bytes.add(IncrementHandlerState.DELTA_MAGIC_PREFIX, Bytes.toBytes(10L));
        final AtomicBoolean foundValue = new AtomicBoolean();
        byte[] enabledTableNameBytes = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId).getName();
        TEST_HBASE.forEachRegion(enabledTableNameBytes, new Function<HRegion, Object>() {

            @Override
            public Object apply(HRegion hRegion) {
                org.apache.hadoop.hbase.client.Scan scan = hBaseTableUtil.buildScan().build();
                try {
                    RegionScanner scanner = hRegion.getScanner(scan);
                    List<Cell> results = Lists.newArrayList();
                    boolean hasMore;
                    do {
                        hasMore = scanner.next(results);
                        for (Cell cell : results) {
                            if (CellUtil.matchingValue(cell, expectedValue)) {
                                foundValue.set(true);
                            }
                        }
                    } while (hasMore);
                } catch (IOException ioe) {
                    fail("IOException scanning region: " + ioe.getMessage());
                }
                return null;
            }
        });
        assertTrue("Should have seen the expected encoded delta value in the " + enabledTableName + " table region", foundValue.get());
    } finally {
        disabledAdmin.drop();
        enabledAdmin.drop();
    }
}
Also used : TableId(co.cask.cdap.data2.util.TableId) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BufferingTable(co.cask.cdap.data2.dataset2.lib.table.BufferingTable) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Transaction(org.apache.tephra.Transaction) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) Scan(co.cask.cdap.api.dataset.table.Scan) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Cell(org.apache.hadoop.hbase.Cell) BufferingTableTest(co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 84 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementHandler method preGetOp.

@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> ctx, Get get, List<Cell> results) throws IOException {
    Scan scan = new Scan(get);
    scan.setMaxVersions();
    scan.setFilter(Filters.combine(new IncrementFilter(), scan.getFilter()));
    RegionScanner scanner = null;
    try {
        scanner = new IncrementSummingScanner(region, scan.getBatch(), region.getScanner(scan), ScanType.USER_SCAN);
        scanner.next(results);
        ctx.bypass();
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scan(org.apache.hadoop.hbase.client.Scan)

Example 85 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementSummingScannerTest method verifyCounts.

private void verifyCounts(HRegion region, Scan scan, long[] counts, int batch) throws Exception {
    RegionScanner scanner = new IncrementSummingScanner(region, batch, region.getScanner(scan), ScanType.USER_SCAN);
    // init with false if loop will execute zero times
    boolean hasMore = counts.length > 0;
    for (long count : counts) {
        List<Cell> results = Lists.newArrayList();
        hasMore = scanner.next(results);
        assertEquals(1, results.size());
        Cell cell = results.get(0);
        assertNotNull(cell);
        assertEquals(count, Bytes.toLong(cell.getValue()));
    }
    assertFalse(hasMore);
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)162 Scan (org.apache.hadoop.hbase.client.Scan)126 Cell (org.apache.hadoop.hbase.Cell)116 Put (org.apache.hadoop.hbase.client.Put)60 Test (org.junit.Test)60 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)55 ArrayList (java.util.ArrayList)52 Delete (org.apache.hadoop.hbase.client.Delete)27 TableId (io.cdap.cdap.data2.util.TableId)26 List (java.util.List)25 Region (org.apache.hadoop.hbase.regionserver.Region)22 IOException (java.io.IOException)18 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)18 TableId (co.cask.cdap.data2.util.TableId)17 RegionCoprocessorEnvironment (org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment)11 PTable (org.apache.phoenix.schema.PTable)10 PMetaDataEntity (org.apache.phoenix.schema.PMetaDataEntity)9 KeyValue (org.apache.hadoop.hbase.KeyValue)8 Mutation (org.apache.hadoop.hbase.client.Mutation)8 Result (org.apache.hadoop.hbase.client.Result)8