use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestBucketCache method testFreeBlockWhenIOEngineWriteFailure.
@Test
public void testFreeBlockWhenIOEngineWriteFailure() throws IOException {
// initialize an block.
int size = 100, offset = 20;
int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
ByteBuffer buf = ByteBuffer.allocate(length);
HFileContext meta = new HFileContextBuilder().build();
HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, offset, 52, -1, meta, ByteBuffAllocator.HEAP);
// initialize an mocked ioengine.
IOEngine ioEngine = Mockito.mock(IOEngine.class);
Mockito.when(ioEngine.usesSharedMemory()).thenReturn(false);
// Mockito.doNothing().when(ioEngine).write(Mockito.any(ByteBuffer.class), Mockito.anyLong());
Mockito.doThrow(RuntimeException.class).when(ioEngine).write(Mockito.any(ByteBuffer.class), Mockito.anyLong());
Mockito.doThrow(RuntimeException.class).when(ioEngine).write(Mockito.any(ByteBuff.class), Mockito.anyLong());
// create an bucket allocator.
long availableSpace = 1024 * 1024 * 1024L;
BucketAllocator allocator = new BucketAllocator(availableSpace, null);
BlockCacheKey key = new BlockCacheKey("dummy", 1L);
RAMQueueEntry re = new RAMQueueEntry(key, block, 1, true);
Assert.assertEquals(0, allocator.getUsedSize());
try {
re.writeToCache(ioEngine, allocator, null, null, ByteBuffer.allocate(HFileBlock.BLOCK_METADATA_SPACE));
Assert.fail();
} catch (Exception e) {
}
Assert.assertEquals(0, allocator.getUsedSize());
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestBucketCache method testRAMCache.
@Test
public void testRAMCache() {
int size = 100;
int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
byte[] byteArr = new byte[length];
ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
HFileContext meta = new HFileContextBuilder().build();
RAMCache cache = new RAMCache();
BlockCacheKey key1 = new BlockCacheKey("file-1", 1);
BlockCacheKey key2 = new BlockCacheKey("file-2", 2);
HFileBlock blk1 = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, 52, -1, meta, ByteBuffAllocator.HEAP);
HFileBlock blk2 = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, -1, -1, meta, ByteBuffAllocator.HEAP);
RAMQueueEntry re1 = new RAMQueueEntry(key1, blk1, 1, false);
RAMQueueEntry re2 = new RAMQueueEntry(key1, blk2, 1, false);
assertFalse(cache.containsKey(key1));
assertNull(cache.putIfAbsent(key1, re1));
assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
assertNotNull(cache.putIfAbsent(key1, re2));
assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
assertNull(cache.putIfAbsent(key2, re2));
assertEquals(2, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
assertEquals(2, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
cache.remove(key1);
assertEquals(1, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
assertEquals(2, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
cache.clear();
assertEquals(1, ((HFileBlock) re1.getData()).getBufferReadOnly().refCnt());
assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestDataBlockEncoders method testNextOnSample.
@Test
public void testNextOnSample() throws IOException {
List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
if (encoding.getEncoder() == null) {
continue;
}
DataBlockEncoder encoder = encoding.getEncoder();
ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
int i = 0;
do {
KeyValue expectedKeyValue = sampleKv.get(i);
Cell cell = seeker.getCell();
if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, expectedKeyValue, cell) != 0) {
int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true);
fail(String.format("next() produces wrong results " + "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual %s", encoder.toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(), expectedKeyValue.getKeyOffset(), expectedKeyValue.getKeyLength()), CellUtil.toString(cell, false)));
}
i++;
} while (seeker.next());
}
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestDataBlockEncoders method testRowIndexWithTagsButNoTagsInCell.
@Test
public void testRowIndexWithTagsButNoTagsInCell() throws IOException {
List<KeyValue> kvList = new ArrayList<>();
byte[] row = new byte[0];
byte[] family = new byte[0];
byte[] qualifier = new byte[0];
byte[] value = new byte[0];
KeyValue expectedKV = new KeyValue(row, family, qualifier, 1L, Type.Put, value);
kvList.add(expectedKV);
DataBlockEncoding encoding = DataBlockEncoding.ROW_INDEX_V1;
DataBlockEncoder encoder = encoding.getEncoder();
ByteBuffer encodedBuffer = encodeKeyValues(encoding, kvList, getEncodingContext(conf, Algorithm.NONE, encoding), false);
HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
Cell cell = seeker.getCell();
Assert.assertEquals(expectedKV.getLength(), ((KeyValue) cell).getLength());
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestDataBlockEncoders method testEncodersOnDataset.
private void testEncodersOnDataset(List<KeyValue> kvList, boolean includesMemstoreTS, boolean includesTags) throws IOException {
ByteBuffer unencodedDataBuf = RedundantKVGenerator.convertKvToByteBuffer(kvList, includesMemstoreTS);
HFileContext fileContext = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).build();
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
DataBlockEncoder encoder = encoding.getEncoder();
if (encoder == null) {
continue;
}
HFileBlockEncodingContext encodingContext = new HFileBlockDefaultEncodingContext(conf, encoding, HFILEBLOCK_DUMMY_HEADER, fileContext);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write(HFILEBLOCK_DUMMY_HEADER);
DataOutputStream dos = new DataOutputStream(baos);
encoder.startBlockEncoding(encodingContext, dos);
for (KeyValue kv : kvList) {
encoder.encode(kv, encodingContext, dos);
}
encoder.endBlockEncoding(encodingContext, dos, baos.getBuffer());
byte[] encodedData = baos.toByteArray();
testAlgorithm(encodedData, unencodedDataBuf, encoder);
}
}
Aggregations