Search in sources :

Example 46 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestFlushWithThroughputController method setUp.

@Before
public void setUp() {
    hbtu = new HBaseTestingUtil();
    tableName = TableName.valueOf("Table-" + testName.getMethodName());
    hbtu.getConfiguration().set(FlushThroughputControllerFactory.HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY, PressureAwareFlushThroughputController.class.getName());
}
Also used : HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Before(org.junit.Before)

Example 47 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestInitializeStoreFileTracker method setUp.

@Before
public void setUp() throws Exception {
    conf = HBaseConfiguration.create();
    // Speed up the launch of RollingUpgradeChore
    conf.setInt(RollingUpgradeChore.ROLLING_UPGRADE_CHORE_PERIOD_SECONDS_KEY, 1);
    conf.setLong(RollingUpgradeChore.ROLLING_UPGRADE_CHORE_DELAY_SECONDS_KEY, 1);
    // Set the default implementation to file instead of default, to confirm we will not set SFT to
    // file
    conf.set(StoreFileTrackerFactory.TRACKER_IMPL, StoreFileTrackerFactory.Trackers.FILE.name());
    HTU = new HBaseTestingUtil(conf);
    HTU.startMiniCluster();
}
Also used : HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Before(org.junit.Before)

Example 48 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestSCVFWithMiniCluster method setUp.

@BeforeClass
public static void setUp() throws Exception {
    HBaseTestingUtil util = new HBaseTestingUtil();
    util.startMiniCluster(1);
    Admin admin = util.getAdmin();
    destroy(admin, HBASE_TABLE_NAME);
    create(admin, HBASE_TABLE_NAME, FAMILY_A, FAMILY_B);
    admin.close();
    htable = util.getConnection().getTable(HBASE_TABLE_NAME);
    /* Add some values */
    List<Put> puts = new ArrayList<>();
    /* Add a row with 'a:foo' = false */
    Put put = new Put(Bytes.toBytes("1"));
    put.setDurability(Durability.SKIP_WAL);
    put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false"));
    put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    puts.add(put);
    /* Add a row with 'a:foo' = true */
    put = new Put(Bytes.toBytes("2"));
    put.setDurability(Durability.SKIP_WAL);
    put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true"));
    put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    puts.add(put);
    /* Add a row with 'a:foo' qualifier not set */
    put = new Put(Bytes.toBytes("3"));
    put.setDurability(Durability.SKIP_WAL);
    put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
    put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
    puts.add(put);
    htable.put(puts);
    /*
     * We want to filter out from the scan all rows that do not have the column 'a:foo' with value
     * 'false'. Only row with key '1' should be returned in the scan.
     */
    scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("false")));
    ((SingleColumnValueFilter) scanFilter).setFilterIfMissing(true);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ArrayList(java.util.ArrayList) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) BeforeClass(org.junit.BeforeClass)

Example 49 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestResettingCounters method testResettingCounters.

@Test
public void testResettingCounters() throws Exception {
    HBaseTestingUtil htu = new HBaseTestingUtil();
    Configuration conf = htu.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    byte[] table = Bytes.toBytes(name.getMethodName());
    byte[][] families = new byte[][] { Bytes.toBytes("family1"), Bytes.toBytes("family2"), Bytes.toBytes("family3") };
    int numQualifiers = 10;
    byte[][] qualifiers = new byte[numQualifiers][];
    for (int i = 0; i < numQualifiers; i++) qualifiers[i] = Bytes.toBytes("qf" + i);
    int numRows = 10;
    byte[][] rows = new byte[numRows][];
    for (int i = 0; i < numRows; i++) rows[i] = Bytes.toBytes("r" + i);
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(table));
    for (byte[] family : families) {
        builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
    }
    TableDescriptor tableDescriptor = builder.build();
    RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
    String testDir = htu.getDataTestDir() + "/TestResettingCounters/";
    Path path = new Path(testDir);
    if (fs.exists(path)) {
        if (!fs.delete(path, true)) {
            throw new IOException("Failed delete of " + path);
        }
    }
    HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, path, conf, tableDescriptor);
    try {
        Increment odd = new Increment(rows[0]);
        odd.setDurability(Durability.SKIP_WAL);
        Increment even = new Increment(rows[0]);
        even.setDurability(Durability.SKIP_WAL);
        Increment all = new Increment(rows[0]);
        all.setDurability(Durability.SKIP_WAL);
        for (int i = 0; i < numQualifiers; i++) {
            if (i % 2 == 0)
                even.addColumn(families[0], qualifiers[i], 1);
            else
                odd.addColumn(families[0], qualifiers[i], 1);
            all.addColumn(families[0], qualifiers[i], 1);
        }
        // increment odd qualifiers 5 times and flush
        for (int i = 0; i < 5; i++) region.increment(odd, HConstants.NO_NONCE, HConstants.NO_NONCE);
        region.flush(true);
        // increment even qualifiers 5 times
        for (int i = 0; i < 5; i++) region.increment(even, HConstants.NO_NONCE, HConstants.NO_NONCE);
        // increment all qualifiers, should have value=6 for all
        Result result = region.increment(all, HConstants.NO_NONCE, HConstants.NO_NONCE);
        assertEquals(numQualifiers, result.size());
        Cell[] kvs = result.rawCells();
        for (int i = 0; i < kvs.length; i++) {
            System.out.println(kvs[i].toString());
            assertTrue(CellUtil.matchingQualifier(kvs[i], qualifiers[i]));
            assertEquals(6, Bytes.toLong(CellUtil.cloneValue(kvs[i])));
        }
    } finally {
        HBaseTestingUtil.closeRegionAndWAL(region);
    }
    HBaseTestingUtil.closeRegionAndWAL(region);
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) IOException(java.io.IOException) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) Increment(org.apache.hadoop.hbase.client.Increment) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 50 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestSecureBulkloadListener method setUp.

@Before
public void setUp() throws Exception {
    random.nextBytes(randomBytes);
    htu = new HBaseTestingUtil();
    // For the test with multiple blocks
    htu.getConfiguration().setInt("dfs.blocksize", 1024);
    htu.getConfiguration().setInt("dfs.replication", 3);
    htu.startMiniDFSCluster(3, new String[] { "/r1", "/r2", "/r3" }, new String[] { host1, host2, host3 });
    conf = htu.getConfiguration();
    cluster = htu.getDFSCluster();
    dfs = (DistributedFileSystem) FileSystem.get(conf);
}
Also used : HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Before(org.junit.Before)

Aggregations

HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)144 Configuration (org.apache.hadoop.conf.Configuration)42 Test (org.junit.Test)42 Before (org.junit.Before)41 BeforeClass (org.junit.BeforeClass)37 Path (org.apache.hadoop.fs.Path)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Admin (org.apache.hadoop.hbase.client.Admin)22 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)15 StartTestingClusterOption (org.apache.hadoop.hbase.StartTestingClusterOption)14 FileSystem (org.apache.hadoop.fs.FileSystem)13 MiniZooKeeperCluster (org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster)12 TableName (org.apache.hadoop.hbase.TableName)10 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)10 SingleProcessHBaseCluster (org.apache.hadoop.hbase.SingleProcessHBaseCluster)9 ServerName (org.apache.hadoop.hbase.ServerName)8 Table (org.apache.hadoop.hbase.client.Table)8 ZKWatcher (org.apache.hadoop.hbase.zookeeper.ZKWatcher)8 IOException (java.io.IOException)7 ArrayList (java.util.ArrayList)7