use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestCellBlockBuilder method doBuildCellBlockUndoCellBlock.
static void doBuildCellBlockUndoCellBlock(final CellBlockBuilder builder, final Codec codec, final CompressionCodec compressor, final int count, final int size, final boolean sized) throws IOException {
Cell[] cells = getCells(count, size);
CellScanner cellScanner = sized ? getSizedCellScanner(cells) : CellUtil.createCellScanner(Arrays.asList(cells).iterator());
ByteBuffer bb = builder.buildCellBlock(codec, compressor, cellScanner);
cellScanner = builder.createCellScannerReusingBuffers(codec, compressor, new SingleByteBuff(bb));
int i = 0;
while (cellScanner.advance()) {
i++;
}
assertEquals(count, i);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestTagCompressionContext method testCompressUncompressTagsWithOffheapKeyValue1.
@Test
public void testCompressUncompressTagsWithOffheapKeyValue1() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream daos = new ByteBufferWriterDataOutputStream(baos);
TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
ByteBufferExtendedCell kv1 = (ByteBufferExtendedCell) createOffheapKVWithTags(2);
int tagsLength1 = kv1.getTagsLength();
context.compressTags(daos, kv1.getTagsByteBuffer(), kv1.getTagsPosition(), tagsLength1);
ByteBufferExtendedCell kv2 = (ByteBufferExtendedCell) createOffheapKVWithTags(3);
int tagsLength2 = kv2.getTagsLength();
context.compressTags(daos, kv2.getTagsByteBuffer(), kv2.getTagsPosition(), tagsLength2);
context.clear();
byte[] dest = new byte[tagsLength1];
ByteBuffer ob = ByteBuffer.wrap(baos.getBuffer());
context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength1);
assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1));
dest = new byte[tagsLength2];
context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength2);
assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2));
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestByteBufferArray method testAsSubByteBuff.
private void testAsSubByteBuff(ByteBufferArray array, int off, int len, boolean isMulti) {
ByteBuff ret = ByteBuff.wrap(array.asSubByteBuffers(off, len));
if (isMulti) {
assertTrue(ret instanceof MultiByteBuff);
} else {
assertTrue(ret instanceof SingleByteBuff);
}
assertTrue(!ret.hasArray());
assertEquals(len, ret.remaining());
ByteBuff tmp = createByteBuff(len);
int pos = tmp.position(), lim = tmp.limit();
try {
assertEquals(len, array.read(off, tmp));
assertEquals(0, tmp.remaining());
} finally {
tmp.position(pos).limit(lim);
}
assertByteBuffEquals(ret, tmp);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testReadWithExtra.
@Test
public void testReadWithExtra() throws IOException {
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path p = new Path(TEST_UTIL.getDataTestDirOnTestFS(), "testReadWithExtra");
String s = "hello world";
try (FSDataOutputStream out = fs.create(p)) {
out.writeBytes(s);
}
ByteBuff buf = new SingleByteBuff(ByteBuffer.allocate(8));
try (FSDataInputStream in = fs.open(p)) {
assertTrue(BlockIOUtils.readWithExtra(buf, in, 6, 2));
}
buf.rewind();
byte[] heapBuf = new byte[buf.capacity()];
buf.get(heapBuf, 0, heapBuf.length);
assertArrayEquals(Bytes.toBytes("hello wo"), heapBuf);
buf = new MultiByteBuff(ByteBuffer.allocate(4), ByteBuffer.allocate(4), ByteBuffer.allocate(4));
try (FSDataInputStream in = fs.open(p)) {
assertTrue(BlockIOUtils.readWithExtra(buf, in, 8, 3));
}
buf.rewind();
heapBuf = new byte[11];
buf.get(heapBuf, 0, heapBuf.length);
assertArrayEquals(Bytes.toBytes("hello world"), heapBuf);
buf.position(0).limit(12);
try (FSDataInputStream in = fs.open(p)) {
try {
BlockIOUtils.readWithExtra(buf, in, 12, 0);
fail("Should only read 11 bytes");
} catch (IOException e) {
}
}
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testPositionalReadPrematureEOF.
@Test
public void testPositionalReadPrematureEOF() throws IOException {
long position = 0;
int bufOffset = 0;
int necessaryLen = 10;
int extraLen = 0;
int totalLen = necessaryLen + extraLen;
byte[] buf = new byte[totalLen];
ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen));
FSDataInputStream in = mock(FSDataInputStream.class);
when(in.read(position, buf, bufOffset, totalLen)).thenReturn(9);
when(in.read(position, buf, bufOffset, totalLen)).thenReturn(-1);
when(in.hasCapability(anyString())).thenReturn(false);
exception.expect(IOException.class);
exception.expectMessage("EOF");
BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
}
Aggregations