Search in sources :

Example 21 with DataBlockEncoding

use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.

the class HFileReaderImpl method getCachedBlock.

/**
 * Retrieve block from cache. Validates the retrieved block's type vs {@code expectedBlockType}
 * and its encoding vs. {@code expectedDataBlockEncoding}. Unpacks the block as necessary.
 */
private HFileBlock getCachedBlock(BlockCacheKey cacheKey, boolean cacheBlock, boolean useLock, boolean isCompaction, boolean updateCacheMetrics, BlockType expectedBlockType, DataBlockEncoding expectedDataBlockEncoding) throws IOException {
    // Check cache for block. If found return.
    BlockCache cache = cacheConf.getBlockCache().orElse(null);
    if (cache != null) {
        HFileBlock cachedBlock = (HFileBlock) cache.getBlock(cacheKey, cacheBlock, useLock, updateCacheMetrics, expectedBlockType);
        if (cachedBlock != null) {
            if (cacheConf.shouldCacheCompressed(cachedBlock.getBlockType().getCategory())) {
                HFileBlock compressedBlock = cachedBlock;
                cachedBlock = compressedBlock.unpack(hfileContext, fsBlockReader);
                // In case of compressed block after unpacking we can release the compressed block
                if (compressedBlock != cachedBlock) {
                    compressedBlock.release();
                }
            }
            try {
                validateBlockType(cachedBlock, expectedBlockType);
            } catch (IOException e) {
                returnAndEvictBlock(cache, cacheKey, cachedBlock);
                throw e;
            }
            if (expectedDataBlockEncoding == null) {
                return cachedBlock;
            }
            DataBlockEncoding actualDataBlockEncoding = cachedBlock.getDataBlockEncoding();
            // perform this check if cached block is a data block.
            if (cachedBlock.getBlockType().isData() && !actualDataBlockEncoding.equals(expectedDataBlockEncoding)) {
                // justify the work on a CompoundScanner.
                if (!expectedDataBlockEncoding.equals(DataBlockEncoding.NONE) && !actualDataBlockEncoding.equals(DataBlockEncoding.NONE)) {
                    // If the block is encoded but the encoding does not match the
                    // expected encoding it is likely the encoding was changed but the
                    // block was not yet evicted. Evictions on file close happen async
                    // so blocks with the old encoding still linger in cache for some
                    // period of time. This event should be rare as it only happens on
                    // schema definition change.
                    LOG.info("Evicting cached block with key {} because data block encoding mismatch; " + "expected {}, actual {}, path={}", cacheKey, actualDataBlockEncoding, expectedDataBlockEncoding, path);
                    // This is an error scenario. so here we need to release the block.
                    returnAndEvictBlock(cache, cacheKey, cachedBlock);
                }
                return null;
            }
            return cachedBlock;
        }
    }
    return null;
}
Also used : DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) IOException(java.io.IOException)

Example 22 with DataBlockEncoding

use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.

the class LruBlockCache method getEncodingCountsForTest.

public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
    Map<DataBlockEncoding, Integer> counts = new EnumMap<>(DataBlockEncoding.class);
    for (LruCachedBlock block : map.values()) {
        DataBlockEncoding encoding = ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
        Integer count = counts.get(encoding);
        counts.put(encoding, (count == null ? 0 : count) + 1);
    }
    return counts;
}
Also used : DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) EnumMap(java.util.EnumMap)

Example 23 with DataBlockEncoding

use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.

the class TestHStoreFile method testDataBlockEncodingMetaData.

/**
 * Check if data block encoding information is saved correctly in HFile's file info.
 */
@Test
public void testDataBlockEncodingMetaData() throws IOException {
    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
    Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
    Path path = new Path(dir, "1234567890");
    DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF;
    cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).withDataBlockEncoding(dataBlockEncoderAlgo).build();
    // Make a store file and write data to it.
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
    writer.close();
    HStoreFile storeFile = new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
    storeFile.initReader();
    StoreFileReader reader = storeFile.getReader();
    Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
    byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
    assertArrayEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
Also used : Path(org.apache.hadoop.fs.Path) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) ReaderContextBuilder(org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder) RegionInfoBuilder(org.apache.hadoop.hbase.client.RegionInfoBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 24 with DataBlockEncoding

use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.

the class TestMajorCompaction method majorCompactionWithDataBlockEncoding.

public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
    Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
    for (HStore store : r.getStores()) {
        HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
        replaceBlockCache.put(store, blockEncoder);
        final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
        final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
        ((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
    }
    majorCompaction();
    // restore settings
    for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
        ((HStore) entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
    }
}
Also used : HFileDataBlockEncoder(org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) HashMap(java.util.HashMap) HFileDataBlockEncoderImpl(org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl)

Example 25 with DataBlockEncoding

use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.

the class TestHFileOutputFormat2 method setupMockColumnFamiliesForDataBlockEncoding.

private void setupMockColumnFamiliesForDataBlockEncoding(Table table, Map<String, DataBlockEncoding> familyToDataBlockEncoding) throws IOException {
    TableDescriptorBuilder mockTableDescriptor = TableDescriptorBuilder.newBuilder(TABLE_NAMES[0]);
    for (Entry<String, DataBlockEncoding> entry : familyToDataBlockEncoding.entrySet()) {
        ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(entry.getKey())).setMaxVersions(1).setDataBlockEncoding(entry.getValue()).setBlockCacheEnabled(false).setTimeToLive(0).build();
        mockTableDescriptor.setColumnFamily(columnFamilyDescriptor);
    }
    Mockito.doReturn(mockTableDescriptor).when(table).getDescriptor();
}
Also used : DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Aggregations

DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)29 Path (org.apache.hadoop.fs.Path)8 ArrayList (java.util.ArrayList)7 Test (org.junit.Test)7 Configuration (org.apache.hadoop.conf.Configuration)6 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)6 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)6 Compression (org.apache.hadoop.hbase.io.compress.Compression)5 Algorithm (org.apache.hadoop.hbase.io.compress.Compression.Algorithm)5 IOException (java.io.IOException)4 KeyValue (org.apache.hadoop.hbase.KeyValue)4 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)4 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)4 BloomType (org.apache.hadoop.hbase.regionserver.BloomType)4 EnumMap (java.util.EnumMap)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 Cell (org.apache.hadoop.hbase.Cell)3 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)3 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)3 ByteBuffer (java.nio.ByteBuffer)2