Search in sources :

Example 1 with BufferingTable

use of io.cdap.cdap.data2.dataset2.lib.table.BufferingTable in project cdap by caskdata.

the class HBaseTableTest method testColumnFamily.

@Test
public void testColumnFamily() throws Exception {
    DatasetProperties props = TableProperties.builder().setColumnFamily("t").build();
    String tableName = "testcf";
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
    admin.create();
    final BufferingTable table = getTable(CONTEXT1, tableName, props);
    TransactionSystemClient txClient = new DetachedTxSystemClient();
    TransactionExecutor executor = new DefaultTransactionExecutor(txClient, table);
    executor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            table.put(new Put("row", "column", "testValue"));
        }
    });
    final BufferingTable table2 = getTable(CONTEXT1, tableName, props);
    executor = new DefaultTransactionExecutor(txClient, table2);
    executor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            Assert.assertEquals("testValue", table2.get(new Get("row", "column")).getString("column"));
        }
    });
    // Verify the column family name
    TableId hTableId = hBaseTableUtil.createHTableId(new NamespaceId(CONTEXT1.getNamespaceId()), tableName);
    HTableDescriptor htd = hBaseTableUtil.getHTableDescriptor(TEST_HBASE.getHBaseAdmin(), hTableId);
    HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes("t"));
    Assert.assertNotNull(hcd);
    Assert.assertEquals("t", hcd.getNameAsString());
}
Also used : TableId(co.cask.cdap.data2.util.TableId) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetAdmin(co.cask.cdap.api.dataset.DatasetAdmin) TransactionExecutor(org.apache.tephra.TransactionExecutor) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) ScannerTimeoutException(org.apache.hadoop.hbase.client.ScannerTimeoutException) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Put(co.cask.cdap.api.dataset.table.Put) BufferingTable(co.cask.cdap.data2.dataset2.lib.table.BufferingTable) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) Get(co.cask.cdap.api.dataset.table.Get) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) NamespaceId(co.cask.cdap.proto.id.NamespaceId) BufferingTableTest(co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 2 with BufferingTable

use of io.cdap.cdap.data2.dataset2.lib.table.BufferingTable in project cdap by caskdata.

the class HBaseTableTest method testEnableIncrements.

@Test
public void testEnableIncrements() throws Exception {
    // setup a table with increments disabled and with it enabled
    String disableTableName = "incr-disable";
    String enabledTableName = "incr-enable";
    TableId disabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, disableTableName);
    TableId enabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, enabledTableName);
    DatasetProperties propsDisabled = TableProperties.builder().setReadlessIncrementSupport(false).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin disabledAdmin = getTableAdmin(CONTEXT1, disableTableName, propsDisabled);
    disabledAdmin.create();
    HBaseAdmin admin = TEST_HBASE.getHBaseAdmin();
    DatasetProperties propsEnabled = TableProperties.builder().setReadlessIncrementSupport(true).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin enabledAdmin = getTableAdmin(CONTEXT1, enabledTableName, propsEnabled);
    enabledAdmin.create();
    try {
        try {
            HTableDescriptor htd = hBaseTableUtil.getHTableDescriptor(admin, disabledTableId);
            List<String> cps = htd.getCoprocessors();
            assertFalse(cps.contains(IncrementHandler.class.getName()));
            htd = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId);
            cps = htd.getCoprocessors();
            assertTrue(cps.contains(IncrementHandler.class.getName()));
        } finally {
            admin.close();
        }
        BufferingTable table = getTable(CONTEXT1, enabledTableName, propsEnabled);
        byte[] row = Bytes.toBytes("row1");
        byte[] col = Bytes.toBytes("col1");
        DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
        Transaction tx = txSystemClient.startShort();
        table.startTx(tx);
        table.increment(row, col, 10);
        table.commitTx();
        // verify that value was written as a delta value
        final byte[] expectedValue = Bytes.add(IncrementHandlerState.DELTA_MAGIC_PREFIX, Bytes.toBytes(10L));
        final AtomicBoolean foundValue = new AtomicBoolean();
        byte[] enabledTableNameBytes = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId).getName();
        TEST_HBASE.forEachRegion(enabledTableNameBytes, new Function<HRegion, Object>() {

            @Override
            public Object apply(HRegion hRegion) {
                org.apache.hadoop.hbase.client.Scan scan = hBaseTableUtil.buildScan().build();
                try {
                    RegionScanner scanner = hRegion.getScanner(scan);
                    List<Cell> results = Lists.newArrayList();
                    boolean hasMore;
                    do {
                        hasMore = scanner.next(results);
                        for (Cell cell : results) {
                            if (CellUtil.matchingValue(cell, expectedValue)) {
                                foundValue.set(true);
                            }
                        }
                    } while (hasMore);
                } catch (IOException ioe) {
                    fail("IOException scanning region: " + ioe.getMessage());
                }
                return null;
            }
        });
        assertTrue("Should have seen the expected encoded delta value in the " + enabledTableName + " table region", foundValue.get());
    } finally {
        disabledAdmin.drop();
        enabledAdmin.drop();
    }
}
Also used : TableId(co.cask.cdap.data2.util.TableId) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BufferingTable(co.cask.cdap.data2.dataset2.lib.table.BufferingTable) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Transaction(org.apache.tephra.Transaction) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) Scan(co.cask.cdap.api.dataset.table.Scan) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Cell(org.apache.hadoop.hbase.Cell) BufferingTableTest(co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 3 with BufferingTable

use of io.cdap.cdap.data2.dataset2.lib.table.BufferingTable in project cdap by caskdata.

the class HBaseTableTest method testEnableIncrements.

@Test
public void testEnableIncrements() throws Exception {
    // setup a table with increments disabled and with it enabled
    String disableTableName = "incr-disable";
    String enabledTableName = "incr-enable";
    TableId disabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, disableTableName);
    TableId enabledTableId = hBaseTableUtil.createHTableId(NAMESPACE1, enabledTableName);
    DatasetProperties propsDisabled = TableProperties.builder().setReadlessIncrementSupport(false).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin disabledAdmin = getTableAdmin(CONTEXT1, disableTableName, propsDisabled);
    disabledAdmin.create();
    HBaseAdmin admin = TEST_HBASE.getHBaseAdmin();
    DatasetProperties propsEnabled = TableProperties.builder().setReadlessIncrementSupport(true).setConflictDetection(ConflictDetection.COLUMN).build();
    HBaseTableAdmin enabledAdmin = getTableAdmin(CONTEXT1, enabledTableName, propsEnabled);
    enabledAdmin.create();
    try {
        try {
            HTableDescriptor htd = hBaseTableUtil.getHTableDescriptor(admin, disabledTableId);
            List<String> cps = htd.getCoprocessors();
            assertFalse(cps.contains(IncrementHandler.class.getName()));
            htd = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId);
            cps = htd.getCoprocessors();
            assertTrue(cps.contains(IncrementHandler.class.getName()));
        } finally {
            admin.close();
        }
        try (BufferingTable table = getTable(CONTEXT1, enabledTableName, propsEnabled)) {
            byte[] row = Bytes.toBytes("row1");
            byte[] col = Bytes.toBytes("col1");
            DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
            Transaction tx = txSystemClient.startShort();
            table.startTx(tx);
            table.increment(row, col, 10);
            table.commitTx();
            // verify that value was written as a delta value
            final byte[] expectedValue = Bytes.add(IncrementHandlerState.DELTA_MAGIC_PREFIX, Bytes.toBytes(10L));
            final AtomicBoolean foundValue = new AtomicBoolean();
            byte[] enabledTableNameBytes = hBaseTableUtil.getHTableDescriptor(admin, enabledTableId).getName();
            TEST_HBASE.forEachRegion(enabledTableNameBytes, new Function<HRegion, Object>() {

                @Override
                public Object apply(HRegion hRegion) {
                    org.apache.hadoop.hbase.client.Scan scan = hBaseTableUtil.buildScan().build();
                    try {
                        RegionScanner scanner = hRegion.getScanner(scan);
                        List<Cell> results = Lists.newArrayList();
                        boolean hasMore;
                        do {
                            hasMore = scanner.next(results);
                            for (Cell cell : results) {
                                if (CellUtil.matchingValue(cell, expectedValue)) {
                                    foundValue.set(true);
                                }
                            }
                        } while (hasMore);
                    } catch (IOException ioe) {
                        fail("IOException scanning region: " + ioe.getMessage());
                    }
                    return null;
                }
            });
            assertTrue("Should have seen the expected encoded delta value in the " + enabledTableName + " table region", foundValue.get());
        }
    } finally {
        disabledAdmin.drop();
        enabledAdmin.drop();
    }
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BufferingTable(io.cdap.cdap.data2.dataset2.lib.table.BufferingTable) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Transaction(org.apache.tephra.Transaction) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) Scan(io.cdap.cdap.api.dataset.table.Scan) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Cell(org.apache.hadoop.hbase.Cell) BufferingTableTest(io.cdap.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 4 with BufferingTable

use of io.cdap.cdap.data2.dataset2.lib.table.BufferingTable in project cdap by caskdata.

the class HBaseTableTest method testColumnFamily.

@Test
public void testColumnFamily() throws Exception {
    DatasetProperties props = TableProperties.builder().setColumnFamily("t").build();
    String tableName = "testcf";
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
    admin.create();
    try (BufferingTable table = getTable(CONTEXT1, tableName, props);
        BufferingTable table2 = getTable(CONTEXT1, tableName, props)) {
        TransactionSystemClient txClient = new DetachedTxSystemClient();
        TransactionExecutor executor = new DefaultTransactionExecutor(txClient, table);
        executor.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                table.put(new Put("row", "column", "testValue"));
            }
        });
        executor = new DefaultTransactionExecutor(txClient, table2);
        executor.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                Assert.assertEquals("testValue", table2.get(new Get("row", "column")).getString("column"));
            }
        });
        // Verify the column family name
        TableId hTableId = hBaseTableUtil.createHTableId(new NamespaceId(CONTEXT1.getNamespaceId()), tableName);
        HTableDescriptor htd = hBaseTableUtil.getHTableDescriptor(TEST_HBASE.getHBaseAdmin(), hTableId);
        HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes("t"));
        Assert.assertNotNull(hcd);
        Assert.assertEquals("t", hcd.getNameAsString());
    }
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) DatasetAdmin(io.cdap.cdap.api.dataset.DatasetAdmin) TransactionExecutor(org.apache.tephra.TransactionExecutor) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) ScannerTimeoutException(org.apache.hadoop.hbase.client.ScannerTimeoutException) IOException(java.io.IOException) Put(io.cdap.cdap.api.dataset.table.Put) BufferingTable(io.cdap.cdap.data2.dataset2.lib.table.BufferingTable) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) Get(io.cdap.cdap.api.dataset.table.Get) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) BufferingTableTest(io.cdap.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Aggregations

IOException (java.io.IOException)4 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)4 DetachedTxSystemClient (org.apache.tephra.inmemory.DetachedTxSystemClient)4 Test (org.junit.Test)4 DatasetProperties (co.cask.cdap.api.dataset.DatasetProperties)2 BufferingTable (co.cask.cdap.data2.dataset2.lib.table.BufferingTable)2 BufferingTableTest (co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest)2 TableId (co.cask.cdap.data2.util.TableId)2 ImmutableList (com.google.common.collect.ImmutableList)2 DatasetProperties (io.cdap.cdap.api.dataset.DatasetProperties)2 BufferingTable (io.cdap.cdap.data2.dataset2.lib.table.BufferingTable)2 BufferingTableTest (io.cdap.cdap.data2.dataset2.lib.table.BufferingTableTest)2 TableId (io.cdap.cdap.data2.util.TableId)2 InterruptedIOException (java.io.InterruptedIOException)2 List (java.util.List)2 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)2 Cell (org.apache.hadoop.hbase.Cell)2 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)2 ScannerTimeoutException (org.apache.hadoop.hbase.client.ScannerTimeoutException)2