use of org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl in project hbase by apache.
the class TestMajorCompaction method majorCompactionWithDataBlockEncoding.
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
Map<Store, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
for (Store store : r.getStores()) {
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
replaceBlockCache.put(store, blockEncoder);
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
}
majorCompaction();
// restore settings
for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
((HStore) entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
}
}
use of org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl in project hbase by apache.
the class TestStoreFile method testDataBlockEncodingMetaData.
/**
* Check if data block encoding information is saved correctly in HFile's
* file info.
*/
@Test
public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
Path path = new Path(dir, "1234567890");
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF;
HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(dataBlockEncoderAlgo);
cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).withDataBlockEncoding(dataBlockEncoderAlgo).build();
// Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
writer.close();
StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE);
StoreFileReader reader = storeFile.createReader();
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
Aggregations