Search in sources :

Example 11 with ByteBuffAllocator

use of org.apache.hadoop.hbase.io.ByteBuffAllocator in project hbase by apache.

the class TestHFile method testReaderWithCombinedBlockCache.

/**
 * Test case for HBASE-22127 in CombinedBlockCache
 */
@Test
public void testReaderWithCombinedBlockCache() throws Exception {
    int bufCount = 1024, blockSize = 64 * 1024;
    ByteBuffAllocator alloc = initAllocator(true, bufCount, blockSize, 0);
    fillByteBuffAllocator(alloc, bufCount);
    Path storeFilePath = writeStoreFile();
    // Open the file reader with CombinedBlockCache
    BlockCache combined = initCombinedBlockCache("LRU");
    conf.setBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, true);
    CacheConfig cacheConfig = new CacheConfig(conf, null, combined, alloc);
    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConfig, true, conf);
    long offset = 0;
    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
        BlockCacheKey key = new BlockCacheKey(storeFilePath.getName(), offset);
        HFileBlock block = reader.readBlock(offset, -1, true, true, false, true, null, null);
        offset += block.getOnDiskSizeWithHeader();
        // Read the cached block.
        Cacheable cachedBlock = combined.getBlock(key, false, false, true);
        try {
            Assert.assertNotNull(cachedBlock);
            Assert.assertTrue(cachedBlock instanceof HFileBlock);
            HFileBlock hfb = (HFileBlock) cachedBlock;
            // Data block will be cached in BucketCache, so it should be an off-heap block.
            if (hfb.getBlockType().isData()) {
                Assert.assertTrue(hfb.isSharedMem());
            } else {
                // Non-data block will be cached in LRUBlockCache, so it must be an on-heap block.
                Assert.assertFalse(hfb.isSharedMem());
            }
        } finally {
            cachedBlock.release();
        }
        // return back the ByteBuffer back to allocator.
        block.release();
    }
    reader.close();
    combined.shutdown();
    Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
    alloc.clean();
}
Also used : Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) ByteBuffAllocator(org.apache.hadoop.hbase.io.ByteBuffAllocator) Test(org.junit.Test)

Aggregations

ByteBuffAllocator (org.apache.hadoop.hbase.io.ByteBuffAllocator)11 Test (org.junit.Test)9 BlockCacheKey (org.apache.hadoop.hbase.io.hfile.BlockCacheKey)5 HFileBlock (org.apache.hadoop.hbase.io.hfile.HFileBlock)5 Path (org.apache.hadoop.fs.Path)4 Reader (org.apache.hadoop.hbase.io.hfile.HFile.Reader)3 IOException (java.io.IOException)2 ByteBuffer (java.nio.ByteBuffer)2 AtomicReference (java.util.concurrent.atomic.AtomicReference)2 Cacheable (org.apache.hadoop.hbase.io.hfile.Cacheable)2 WriterThread (org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.WriterThread)2 ByteBuff (org.apache.hadoop.hbase.nio.ByteBuff)2 ArrayList (java.util.ArrayList)1 Configuration (org.apache.hadoop.conf.Configuration)1 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)1 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)1 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)1 TestByteBufferIOEngine.createBucketEntry (org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.createBucketEntry)1 TestByteBufferIOEngine.getByteBuff (org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.getByteBuff)1 MultiByteBuff (org.apache.hadoop.hbase.nio.MultiByteBuff)1