Search in sources :

Example 21 with HBaseTableUtilFactory

use of io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory in project cdap by cdapio.

the class IncrementSummingScannerTest method createRegion.

static HRegion createRegion(Configuration hConf, CConfiguration cConf, TableId tableId, HColumnDescriptor cfd) throws Exception {
    HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get();
    HTableDescriptorBuilder htd = tableUtil.buildHTableDescriptor(tableId);
    cfd.setMaxVersions(Integer.MAX_VALUE);
    cfd.setKeepDeletedCells(true);
    htd.addFamily(cfd);
    htd.addCoprocessor(IncrementHandler.class.getName());
    HTableDescriptor desc = htd.build();
    String tableName = desc.getNameAsString();
    Path tablePath = new Path("/tmp/" + tableName);
    Path hlogPath = new Path("/tmp/hlog-" + tableName);
    FileSystem fs = FileSystem.get(hConf);
    assertTrue(fs.mkdirs(tablePath));
    WALFactory walFactory = new WALFactory(hConf, null, hlogPath.toString());
    WAL hLog = walFactory.getWAL(new byte[] { 1 });
    HRegionInfo regionInfo = new HRegionInfo(desc.getTableName());
    HRegionFileSystem regionFS = HRegionFileSystem.createRegionOnFileSystem(hConf, fs, tablePath, regionInfo);
    return new HRegion(regionFS, hLog, hConf, desc, new LocalRegionServerServices(hConf, ServerName.valueOf(InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())));
}
Also used : Path(org.apache.hadoop.fs.Path) HTableDescriptorBuilder(io.cdap.cdap.data2.util.hbase.HTableDescriptorBuilder) WAL(org.apache.hadoop.hbase.wal.WAL) HBaseTableUtil(io.cdap.cdap.data2.util.hbase.HBaseTableUtil) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) FileSystem(org.apache.hadoop.fs.FileSystem) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) HBaseTableUtilFactory(io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory) WALFactory(org.apache.hadoop.hbase.wal.WALFactory)

Example 22 with HBaseTableUtilFactory

use of io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory in project cdap by cdapio.

the class IncrementSummingScannerTest method createRegion.

static HRegion createRegion(Configuration hConf, CConfiguration cConf, TableId tableId, HColumnDescriptor cfd) throws Exception {
    HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get();
    HTableDescriptorBuilder htd = tableUtil.buildHTableDescriptor(tableId);
    cfd.setMaxVersions(Integer.MAX_VALUE);
    cfd.setKeepDeletedCells(true);
    htd.addFamily(cfd);
    htd.addCoprocessor(IncrementHandler.class.getName());
    HTableDescriptor desc = htd.build();
    String tableName = desc.getNameAsString();
    Path tablePath = new Path("/tmp/" + tableName);
    Path hlogPath = new Path("/tmp/hlog-" + tableName);
    FileSystem fs = FileSystem.get(hConf);
    assertTrue(fs.mkdirs(tablePath));
    WALFactory walFactory = new WALFactory(hConf, null, hlogPath.toString());
    WAL hLog = walFactory.getWAL(new byte[] { 1 });
    HRegionInfo regionInfo = new HRegionInfo(desc.getTableName());
    HRegionFileSystem regionFS = HRegionFileSystem.createRegionOnFileSystem(hConf, fs, tablePath, regionInfo);
    return new HRegion(regionFS, hLog, hConf, desc, new LocalRegionServerServices(hConf, ServerName.valueOf(InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())));
}
Also used : Path(org.apache.hadoop.fs.Path) HTableDescriptorBuilder(io.cdap.cdap.data2.util.hbase.HTableDescriptorBuilder) WAL(org.apache.hadoop.hbase.wal.WAL) HBaseTableUtil(io.cdap.cdap.data2.util.hbase.HBaseTableUtil) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) FileSystem(org.apache.hadoop.fs.FileSystem) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) HBaseTableUtilFactory(io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory) WALFactory(org.apache.hadoop.hbase.wal.WALFactory)

Example 23 with HBaseTableUtilFactory

use of io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory in project cdap by cdapio.

the class HBasePayloadTableTestRun method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    hConf = HBASE_TEST_BASE.getConfiguration();
    hConf.set(HBaseTableUtil.CFG_HBASE_TABLE_COMPRESSION, HBaseTableUtil.CompressionType.NONE.name());
    cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
    cConf.set(Constants.CFG_HDFS_NAMESPACE, cConf.get(Constants.CFG_LOCAL_DATA_DIR));
    cConf.set(Constants.CFG_HDFS_USER, System.getProperty("user.name"));
    tableUtil = new HBaseTableUtilFactory(cConf).get();
    ddlExecutor = new HBaseDDLExecutorFactory(cConf, hConf).get();
    ddlExecutor.createNamespaceIfNotExists(tableUtil.getHBaseNamespace(NamespaceId.SYSTEM));
    LocationFactory locationFactory = getInjector().getInstance(LocationFactory.class);
    tableFactory = new HBaseTableFactory(cConf, hConf, tableUtil, locationFactory);
    new ConfigurationWriter(hConf, cConf).write(ConfigurationReader.Type.DEFAULT, cConf);
}
Also used : ConfigurationWriter(io.cdap.cdap.data2.util.hbase.ConfigurationWriter) HBaseDDLExecutorFactory(io.cdap.cdap.data2.util.hbase.HBaseDDLExecutorFactory) HBaseTableUtilFactory(io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory) LocationFactory(org.apache.twill.filesystem.LocationFactory) BeforeClass(org.junit.BeforeClass)

Example 24 with HBaseTableUtilFactory

use of io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory in project cdap by cdapio.

the class HBaseTableCoprocessorTestRun method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    hConf = HBASE_TEST_BASE.getConfiguration();
    hConf.set(HBaseTableUtil.CFG_HBASE_TABLE_COMPRESSION, HBaseTableUtil.CompressionType.NONE.name());
    cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
    cConf.set(Constants.CFG_HDFS_NAMESPACE, cConf.get(Constants.CFG_LOCAL_DATA_DIR));
    cConf.set(Constants.CFG_HDFS_USER, System.getProperty("user.name"));
    // Reduce the metadata cache refresh frequency for unit tests
    cConf.set(Constants.MessagingSystem.COPROCESSOR_METADATA_CACHE_UPDATE_FREQUENCY_SECONDS, Integer.toString(METADATA_CACHE_EXPIRY));
    hBaseAdmin = HBASE_TEST_BASE.getHBaseAdmin();
    hBaseAdmin.getConfiguration().set(HBaseTableUtil.CFG_HBASE_TABLE_COMPRESSION, HBaseTableUtil.CompressionType.NONE.name());
    tableUtil = new HBaseTableUtilFactory(cConf).get();
    ddlExecutor = new HBaseDDLExecutorFactory(cConf, hConf).get();
    ddlExecutor.createNamespaceIfNotExists(tableUtil.getHBaseNamespace(NamespaceId.SYSTEM));
    LocationFactory locationFactory = getInjector().getInstance(LocationFactory.class);
    tableFactory = new HBaseTableFactory(cConf, hBaseAdmin.getConfiguration(), tableUtil, locationFactory);
    new ConfigurationWriter(hConf, cConf).write(ConfigurationReader.Type.DEFAULT, cConf);
    // write an initial transaction snapshot
    invalidList.addAll(ImmutableList.of(V[3], V[5], V[7]));
    TransactionSnapshot txSnapshot = TransactionSnapshot.copyFrom(System.currentTimeMillis(), V[6] - 1, V[7], invalidList, // this will set visibility upper bound to V[6]
    Maps.newTreeMap(ImmutableSortedMap.of(V[6], new TransactionManager.InProgressTx(V[6] - 1, Long.MAX_VALUE, TransactionManager.InProgressType.SHORT))), new HashMap<>(), new TreeMap<>());
    HDFSTransactionStateStorage tmpStorage = new HDFSTransactionStateStorage(hConf, new SnapshotCodecProvider(hConf), new TxMetricsCollector());
    tmpStorage.startAndWait();
    tmpStorage.writeSnapshot(txSnapshot);
    tmpStorage.stopAndWait();
}
Also used : ConfigurationWriter(io.cdap.cdap.data2.util.hbase.ConfigurationWriter) TxMetricsCollector(org.apache.tephra.metrics.TxMetricsCollector) SnapshotCodecProvider(org.apache.tephra.snapshot.SnapshotCodecProvider) LocationFactory(org.apache.twill.filesystem.LocationFactory) HDFSTransactionStateStorage(org.apache.tephra.persist.HDFSTransactionStateStorage) TransactionSnapshot(org.apache.tephra.persist.TransactionSnapshot) TransactionManager(org.apache.tephra.TransactionManager) HBaseDDLExecutorFactory(io.cdap.cdap.data2.util.hbase.HBaseDDLExecutorFactory) HBaseTableUtilFactory(io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory) BeforeClass(org.junit.BeforeClass)

Example 25 with HBaseTableUtilFactory

use of io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory in project cdap by cdapio.

the class AbstractIncrementHandlerTest method testIncrements.

@Test
public void testIncrements() throws Exception {
    TableId tableId = TableId.from(NamespaceId.DEFAULT.getEntityName(), "incrementTest");
    createTable(tableId);
    try (Table table = new HBaseTableUtilFactory(cConf).get().createTable(conf, tableId)) {
        byte[] colA = Bytes.toBytes("a");
        byte[] row1 = Bytes.toBytes("row1");
        // test column containing only increments
        table.put(newIncrement(row1, colA, 1));
        table.put(newIncrement(row1, colA, 1));
        table.put(newIncrement(row1, colA, 1));
        assertColumn(table, row1, colA, 3);
        // test intermixed increments and puts
        table.put(tableUtil.buildPut(row1).add(FAMILY, colA, ts++, Bytes.toBytes(5L)).build());
        assertColumn(table, row1, colA, 5);
        table.put(newIncrement(row1, colA, 1));
        table.put(newIncrement(row1, colA, 1));
        assertColumn(table, row1, colA, 7);
        // test multiple increment columns
        byte[] row2 = Bytes.toBytes("row2");
        byte[] colB = Bytes.toBytes("b");
        // increment A and B twice at the same timestamp
        table.put(newIncrement(row2, colA, 1, 1));
        table.put(newIncrement(row2, colB, 1, 1));
        table.put(newIncrement(row2, colA, 2, 1));
        table.put(newIncrement(row2, colB, 2, 1));
        // increment A once more
        table.put(newIncrement(row2, colA, 1));
        assertColumns(table, row2, new byte[][] { colA, colB }, new long[] { 3, 2 });
        // overwrite B with a new put
        table.put(tableUtil.buildPut(row2).add(FAMILY, colB, ts++, Bytes.toBytes(10L)).build());
        assertColumns(table, row2, new byte[][] { colA, colB }, new long[] { 3, 10 });
    }
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) HBaseTable(io.cdap.cdap.data2.dataset2.lib.table.hbase.HBaseTable) Table(org.apache.hadoop.hbase.client.Table) HBaseTableUtilFactory(io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory) Test(org.junit.Test)

Aggregations

HBaseTableUtilFactory (io.cdap.cdap.data2.util.hbase.HBaseTableUtilFactory)42 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)34 HBaseTableUtil (io.cdap.cdap.data2.util.hbase.HBaseTableUtil)28 HBaseTableUtilFactory (co.cask.cdap.data2.util.hbase.HBaseTableUtilFactory)25 HTableDescriptorBuilder (io.cdap.cdap.data2.util.hbase.HTableDescriptorBuilder)20 HBaseTableUtil (co.cask.cdap.data2.util.hbase.HBaseTableUtil)18 BeforeClass (org.junit.BeforeClass)18 FileSystem (org.apache.hadoop.fs.FileSystem)17 Path (org.apache.hadoop.fs.Path)17 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)17 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)17 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)17 HRegionFileSystem (org.apache.hadoop.hbase.regionserver.HRegionFileSystem)17 WAL (org.apache.hadoop.hbase.wal.WAL)15 WALFactory (org.apache.hadoop.hbase.wal.WALFactory)15 LocationFactory (org.apache.twill.filesystem.LocationFactory)15 HTableDescriptorBuilder (co.cask.cdap.data2.util.hbase.HTableDescriptorBuilder)14 HBaseDDLExecutorFactory (io.cdap.cdap.data2.util.hbase.HBaseDDLExecutorFactory)12 IOException (java.io.IOException)9 HBaseDDLExecutorFactory (co.cask.cdap.data2.util.hbase.HBaseDDLExecutorFactory)6