Search in sources :

Example 61 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestBucketCache method testFreeBlockWhenIOEngineWriteFailure.

@Test
public void testFreeBlockWhenIOEngineWriteFailure() throws IOException {
    // initialize an block.
    int size = 100, offset = 20;
    int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
    ByteBuffer buf = ByteBuffer.allocate(length);
    HFileContext meta = new HFileContextBuilder().build();
    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, offset, 52, -1, meta, ByteBuffAllocator.HEAP);
    // initialize an mocked ioengine.
    IOEngine ioEngine = Mockito.mock(IOEngine.class);
    Mockito.when(ioEngine.usesSharedMemory()).thenReturn(false);
    // Mockito.doNothing().when(ioEngine).write(Mockito.any(ByteBuffer.class), Mockito.anyLong());
    Mockito.doThrow(RuntimeException.class).when(ioEngine).write(Mockito.any(ByteBuffer.class), Mockito.anyLong());
    Mockito.doThrow(RuntimeException.class).when(ioEngine).write(Mockito.any(ByteBuff.class), Mockito.anyLong());
    // create an bucket allocator.
    long availableSpace = 1024 * 1024 * 1024L;
    BucketAllocator allocator = new BucketAllocator(availableSpace, null);
    BlockCacheKey key = new BlockCacheKey("dummy", 1L);
    RAMQueueEntry re = new RAMQueueEntry(key, block, 1, true);
    Assert.assertEquals(0, allocator.getUsedSize());
    try {
        re.writeToCache(ioEngine, allocator, null, null, ByteBuffer.allocate(HFileBlock.BLOCK_METADATA_SPACE));
        Assert.fail();
    } catch (Exception e) {
    }
    Assert.assertEquals(0, allocator.getUsedSize());
}
Also used : HFileBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock) RAMQueueEntry(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuff(org.apache.hadoop.hbase.nio.ByteBuff) ByteBuffer(java.nio.ByteBuffer) BlockCacheKey(org.apache.hadoop.hbase.io.hfile.BlockCacheKey) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 62 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestBucketCache method testRAMCache.

@Test
public void testRAMCache() {
    int size = 100;
    int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
    byte[] byteArr = new byte[length];
    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
    HFileContext meta = new HFileContextBuilder().build();
    RAMCache cache = new RAMCache();
    BlockCacheKey key1 = new BlockCacheKey("file-1", 1);
    BlockCacheKey key2 = new BlockCacheKey("file-2", 2);
    HFileBlock blk1 = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, 52, -1, meta, ByteBuffAllocator.HEAP);
    HFileBlock blk2 = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, -1, -1, meta, ByteBuffAllocator.HEAP);
    RAMQueueEntry re1 = new RAMQueueEntry(key1, blk1, 1, false);
    RAMQueueEntry re2 = new RAMQueueEntry(key1, blk2, 1, false);
    assertFalse(cache.containsKey(key1));
    assertNull(cache.putIfAbsent(key1, re1));
    assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
    assertNotNull(cache.putIfAbsent(key1, re2));
    assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
    assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
    assertNull(cache.putIfAbsent(key2, re2));
    assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
    assertEquals(2, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
    cache.remove(key1);
    assertEquals(1, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
    assertEquals(2, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
    cache.clear();
    assertEquals(1, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
    assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
}
Also used : HFileBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock) RAMQueueEntry(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) RAMCache(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMCache) ByteBuffer(java.nio.ByteBuffer) BlockCacheKey(org.apache.hadoop.hbase.io.hfile.BlockCacheKey) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 63 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestDataBlockEncoders method testNextOnSample.

@Test
public void testNextOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        if (encoding.getEncoder() == null) {
            continue;
        }
        DataBlockEncoder encoder = encoding.getEncoder();
        ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        int i = 0;
        do {
            KeyValue expectedKeyValue = sampleKv.get(i);
            Cell cell = seeker.getCell();
            if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, expectedKeyValue, cell) != 0) {
                int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true);
                fail(String.format("next() produces wrong results " + "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual      %s", encoder.toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(), expectedKeyValue.getKeyOffset(), expectedKeyValue.getKeyLength()), CellUtil.toString(cell, false)));
            }
            i++;
        } while (seeker.next());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 64 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestDataBlockEncoders method testRowIndexWithTagsButNoTagsInCell.

@Test
public void testRowIndexWithTagsButNoTagsInCell() throws IOException {
    List<KeyValue> kvList = new ArrayList<>();
    byte[] row = new byte[0];
    byte[] family = new byte[0];
    byte[] qualifier = new byte[0];
    byte[] value = new byte[0];
    KeyValue expectedKV = new KeyValue(row, family, qualifier, 1L, Type.Put, value);
    kvList.add(expectedKV);
    DataBlockEncoding encoding = DataBlockEncoding.ROW_INDEX_V1;
    DataBlockEncoder encoder = encoding.getEncoder();
    ByteBuffer encodedBuffer = encodeKeyValues(encoding, kvList, getEncodingContext(conf, Algorithm.NONE, encoding), false);
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
    DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
    seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
    Cell cell = seeker.getCell();
    Assert.assertEquals(expectedKV.getLength(), ((KeyValue) cell).getLength());
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 65 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestDataBlockEncoders method testEncodersOnDataset.

private void testEncodersOnDataset(List<KeyValue> kvList, boolean includesMemstoreTS, boolean includesTags) throws IOException {
    ByteBuffer unencodedDataBuf = RedundantKVGenerator.convertKvToByteBuffer(kvList, includesMemstoreTS);
    HFileContext fileContext = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).build();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        DataBlockEncoder encoder = encoding.getEncoder();
        if (encoder == null) {
            continue;
        }
        HFileBlockEncodingContext encodingContext = new HFileBlockDefaultEncodingContext(conf, encoding, HFILEBLOCK_DUMMY_HEADER, fileContext);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        baos.write(HFILEBLOCK_DUMMY_HEADER);
        DataOutputStream dos = new DataOutputStream(baos);
        encoder.startBlockEncoding(encodingContext, dos);
        for (KeyValue kv : kvList) {
            encoder.encode(kv, encodingContext, dos);
        }
        encoder.endBlockEncoding(encodingContext, dos, baos.getBuffer());
        byte[] encodedData = baos.toByteArray();
        testAlgorithm(encodedData, unencodedDataBuf, encoder);
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(org.apache.hadoop.hbase.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5