use of org.apache.hadoop.hbase.io.hfile.LruBlockCache in project hbase by apache.
the class TestEncodedSeekers method testEncodedSeeker.
@Test
public void testEncodedSeeker() throws IOException {
System.err.println("Testing encoded seekers for encoding : " + encoding + ", includeTags : " + includeTags + ", compressTags : " + compressTags);
if (includeTags) {
testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3);
}
LruBlockCache cache = (LruBlockCache) new CacheConfig(testUtil.getConfiguration()).getBlockCache();
cache.clearCache();
// Need to disable default row bloom filter for this test to pass.
HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).setDataBlockEncoding(encoding).setBlocksize(BLOCK_SIZE).setBloomFilterType(BloomType.NONE).setCompressTags(compressTags);
Region region = testUtil.createTestRegion(TABLE_NAME, hcd);
//write the data, but leave some in the memstore
doPuts(region);
//verify correctness when memstore contains data
doGets(region);
//verify correctness again after compacting
region.compact(false);
doGets(region);
Map<DataBlockEncoding, Integer> encodingCounts = cache.getEncodingCountsForTest();
// Ensure that compactions don't pollute the cache with unencoded blocks
// in case of in-cache-only encoding.
System.err.println("encodingCounts=" + encodingCounts);
assertEquals(1, encodingCounts.size());
DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();
assertEquals(encoding, encodingInCache);
assertTrue(encodingCounts.get(encodingInCache) > 0);
}
Aggregations