Search in sources :

Example 71 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestAlwaysStandByHMaster method testAlwaysStandBy.

/**
 * Tests that the AlwaysStandByHMaster does not transition to active state even if no active
 * master exists.
 */
@Test
public void testAlwaysStandBy() throws Exception {
    HBaseTestingUtil testUtil = miniClusterRule.getTestingUtility();
    // Make sure there is an active master.
    assertNotNull(testUtil.getMiniHBaseCluster().getMaster());
    assertEquals(2, testUtil.getMiniHBaseCluster().getMasterThreads().size());
    // Kill the only active master.
    testUtil.getMiniHBaseCluster().stopMaster(0).join();
    // Wait for 5s to make sure the always standby doesn't transition to active state.
    assertFalse(testUtil.getMiniHBaseCluster().waitForActiveAndReadyMaster(5000));
    // Add a new master.
    HMaster newActive = testUtil.getMiniHBaseCluster().startMaster().getMaster();
    assertTrue(testUtil.getMiniHBaseCluster().waitForActiveAndReadyMaster(5000));
    // Newly added master should be the active.
    assertEquals(newActive.getServerName(), testUtil.getMiniHBaseCluster().getMaster().getServerName());
}
Also used : HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Test(org.junit.Test)

Example 72 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestVerifyBucketCacheFile method testModifiedBucketCacheFileData.

/**
 * Test whether BucketCache is started normally after modifying the cache file.
 * Start BucketCache and add some blocks, then shutdown BucketCache and persist cache to file.
 * Restart BucketCache after modify cache file's data, and it can't restore cache from file,
 * the cache file and persistence file would be deleted before BucketCache start normally.
 * @throws Exception the exception
 */
@Test
public void testModifiedBucketCacheFileData() throws Exception {
    HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
    Path testDir = TEST_UTIL.getDataTestDir();
    TEST_UTIL.getTestFileSystem().mkdirs(testDir);
    BucketCache bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    long usedSize = bucketCache.getAllocator().getUsedSize();
    assertEquals(0, usedSize);
    CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(constructedBlockSize, 1);
    // Add blocks
    for (CacheTestUtils.HFileBlockPair block : blocks) {
        cacheAndWaitUntilFlushedToBucket(bucketCache, block.getBlockName(), block.getBlock());
    }
    usedSize = bucketCache.getAllocator().getUsedSize();
    assertNotEquals(0, usedSize);
    // persist cache to file
    bucketCache.shutdown();
    // modified bucket cache file
    String file = testDir + "/bucket.cache";
    try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, false)))) {
        out.write("test bucket cache");
    }
    // can't restore cache from file
    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    assertEquals(0, bucketCache.getAllocator().getUsedSize());
    assertEquals(0, bucketCache.backingMap.size());
    TEST_UTIL.cleanupTestDir();
}
Also used : Path(org.apache.hadoop.fs.Path) FileOutputStream(java.io.FileOutputStream) CacheTestUtils(org.apache.hadoop.hbase.io.hfile.CacheTestUtils) OutputStreamWriter(java.io.OutputStreamWriter) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) BufferedWriter(java.io.BufferedWriter) Test(org.junit.Test)

Example 73 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestVerifyBucketCacheFile method testRetrieveFromFile.

/**
 * Test cache file or persistence file does not exist whether BucketCache starts normally
 * (1) Start BucketCache and add some blocks, then shutdown BucketCache and persist cache
 * to file. Restart BucketCache and it can restore cache from file.
 * (2) Delete bucket cache file after shutdown BucketCache. Restart BucketCache and it can't
 * restore cache from file, the cache file and persistence file would be deleted before
 * BucketCache start normally.
 * (3) Delete persistence file after shutdown BucketCache. Restart BucketCache and it can't
 * restore cache from file, the cache file and persistence file would be deleted before
 * BucketCache start normally.
 * @throws Exception the exception
 */
@Test
public void testRetrieveFromFile() throws Exception {
    HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
    Path testDir = TEST_UTIL.getDataTestDir();
    TEST_UTIL.getTestFileSystem().mkdirs(testDir);
    BucketCache bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    long usedSize = bucketCache.getAllocator().getUsedSize();
    assertEquals(0, usedSize);
    CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(constructedBlockSize, 1);
    // Add blocks
    for (CacheTestUtils.HFileBlockPair block : blocks) {
        cacheAndWaitUntilFlushedToBucket(bucketCache, block.getBlockName(), block.getBlock());
    }
    usedSize = bucketCache.getAllocator().getUsedSize();
    assertNotEquals(0, usedSize);
    // 1.persist cache to file
    bucketCache.shutdown();
    // restore cache from file
    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    assertEquals(usedSize, bucketCache.getAllocator().getUsedSize());
    // persist cache to file
    bucketCache.shutdown();
    // 2.delete bucket cache file
    final java.nio.file.Path cacheFile = FileSystems.getDefault().getPath(testDir.toString(), "bucket.cache");
    assertTrue(Files.deleteIfExists(cacheFile));
    // can't restore cache from file
    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    assertEquals(0, bucketCache.getAllocator().getUsedSize());
    assertEquals(0, bucketCache.backingMap.size());
    // Add blocks
    for (CacheTestUtils.HFileBlockPair block : blocks) {
        cacheAndWaitUntilFlushedToBucket(bucketCache, block.getBlockName(), block.getBlock());
    }
    usedSize = bucketCache.getAllocator().getUsedSize();
    assertNotEquals(0, usedSize);
    // persist cache to file
    bucketCache.shutdown();
    // 3.delete backingMap persistence file
    final java.nio.file.Path mapFile = FileSystems.getDefault().getPath(testDir.toString(), "bucket.persistence");
    assertTrue(Files.deleteIfExists(mapFile));
    // can't restore cache from file
    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    assertEquals(0, bucketCache.getAllocator().getUsedSize());
    assertEquals(0, bucketCache.backingMap.size());
    TEST_UTIL.cleanupTestDir();
}
Also used : Path(org.apache.hadoop.fs.Path) CacheTestUtils(org.apache.hadoop.hbase.io.hfile.CacheTestUtils) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Test(org.junit.Test)

Example 74 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestVerifyBucketCacheFile method testModifiedBucketCacheFileTime.

/**
 * Test whether BucketCache is started normally after modifying the cache file's last modified
 * time. First Start BucketCache and add some blocks, then shutdown BucketCache and persist
 * cache to file. Then Restart BucketCache after modify cache file's last modified time, and
 * it can't restore cache from file, the cache file and persistence file would be deleted
 * before BucketCache start normally.
 * @throws Exception the exception
 */
@Test
public void testModifiedBucketCacheFileTime() throws Exception {
    HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
    Path testDir = TEST_UTIL.getDataTestDir();
    TEST_UTIL.getTestFileSystem().mkdirs(testDir);
    BucketCache bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    long usedSize = bucketCache.getAllocator().getUsedSize();
    assertEquals(0, usedSize);
    CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(constructedBlockSize, 1);
    // Add blocks
    for (CacheTestUtils.HFileBlockPair block : blocks) {
        cacheAndWaitUntilFlushedToBucket(bucketCache, block.getBlockName(), block.getBlock());
    }
    usedSize = bucketCache.getAllocator().getUsedSize();
    assertNotEquals(0, usedSize);
    // persist cache to file
    bucketCache.shutdown();
    // modified bucket cache file LastModifiedTime
    final java.nio.file.Path file = FileSystems.getDefault().getPath(testDir.toString(), "bucket.cache");
    Files.setLastModifiedTime(file, FileTime.from(Instant.now().plusMillis(1_000)));
    // can't restore cache from file
    bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
    assertEquals(0, bucketCache.getAllocator().getUsedSize());
    assertEquals(0, bucketCache.backingMap.size());
    TEST_UTIL.cleanupTestDir();
}
Also used : Path(org.apache.hadoop.fs.Path) CacheTestUtils(org.apache.hadoop.hbase.io.hfile.CacheTestUtils) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Test(org.junit.Test)

Example 75 with HBaseTestingUtil

use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.

the class TestReplicationEditsDroppedWithDeletedTableCFs method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    // Set true to filter replication edits for dropped table
    conf1.setBoolean(REPLICATION_DROP_ON_DELETED_COLUMN_FAMILY_KEY, true);
    conf1.set(ZOOKEEPER_ZNODE_PARENT, "/1");
    conf1.setInt("replication.source.nb.capacity", 1);
    utility1 = new HBaseTestingUtil(conf1);
    utility1.startMiniZKCluster();
    MiniZooKeeperCluster miniZK = utility1.getZkCluster();
    conf1 = utility1.getConfiguration();
    conf2 = HBaseConfiguration.create(conf1);
    conf2.set(ZOOKEEPER_ZNODE_PARENT, "/2");
    utility2 = new HBaseTestingUtil(conf2);
    utility2.setZkCluster(miniZK);
    utility1.startMiniCluster(1);
    utility2.startMiniCluster(1);
    admin1 = utility1.getAdmin();
    admin2 = utility2.getAdmin();
}
Also used : MiniZooKeeperCluster(org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) BeforeClass(org.junit.BeforeClass)

Aggregations

HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)144 Configuration (org.apache.hadoop.conf.Configuration)42 Test (org.junit.Test)42 Before (org.junit.Before)41 BeforeClass (org.junit.BeforeClass)37 Path (org.apache.hadoop.fs.Path)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Admin (org.apache.hadoop.hbase.client.Admin)22 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)15 StartTestingClusterOption (org.apache.hadoop.hbase.StartTestingClusterOption)14 FileSystem (org.apache.hadoop.fs.FileSystem)13 MiniZooKeeperCluster (org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster)12 TableName (org.apache.hadoop.hbase.TableName)10 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)10 SingleProcessHBaseCluster (org.apache.hadoop.hbase.SingleProcessHBaseCluster)9 ServerName (org.apache.hadoop.hbase.ServerName)8 Table (org.apache.hadoop.hbase.client.Table)8 ZKWatcher (org.apache.hadoop.hbase.zookeeper.ZKWatcher)8 IOException (java.io.IOException)7 ArrayList (java.util.ArrayList)7