use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testByteBufferPositionedReadableEOF.
@Test
public void testByteBufferPositionedReadableEOF() throws IOException {
assumeTrue("Skip the test because ByteBufferPositionedReadable is not available", isByteBufferPositionedReadable());
long position = 0;
int necessaryLen = 10;
int extraLen = 0;
int totalLen = necessaryLen + extraLen;
int firstReadLen = 9;
ByteBuffer buf = ByteBuffer.allocate(totalLen);
ByteBuff bb = new SingleByteBuff(buf);
MyFSDataInputStream in = mock(MyFSDataInputStream.class);
when(in.read(position, buf)).thenReturn(firstReadLen);
when(in.read(position, buf)).thenReturn(-1);
when(in.hasCapability(anyString())).thenReturn(true);
exception.expect(IOException.class);
exception.expectMessage("EOF");
BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
verify(in).read(position, buf);
verify(in).read(firstReadLen, buf);
verify(in).hasCapability(anyString());
verifyNoMoreInteractions(in);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testPositionalReadNoExtra.
@Test
public void testPositionalReadNoExtra() throws IOException {
long position = 0;
int bufOffset = 0;
int necessaryLen = 10;
int extraLen = 0;
int totalLen = necessaryLen + extraLen;
byte[] buf = new byte[totalLen];
ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen));
FSDataInputStream in = mock(FSDataInputStream.class);
when(in.read(position, buf, bufOffset, totalLen)).thenReturn(totalLen);
when(in.hasCapability(anyString())).thenReturn(false);
boolean ret = BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
assertFalse("Expect false return when no extra bytes requested", ret);
verify(in).read(position, buf, bufOffset, totalLen);
verify(in).hasCapability(anyString());
verifyNoMoreInteractions(in);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testReadFully.
@Test
public void testReadFully() throws IOException {
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path p = new Path(TEST_UTIL.getDataTestDirOnTestFS(), "testReadFully");
String s = "hello world";
try (FSDataOutputStream out = fs.create(p)) {
out.writeBytes(s);
}
ByteBuff buf = new SingleByteBuff(ByteBuffer.allocate(11));
try (FSDataInputStream in = fs.open(p)) {
BlockIOUtils.readFully(buf, in, 11);
}
buf.rewind();
byte[] heapBuf = new byte[s.length()];
buf.get(heapBuf, 0, heapBuf.length);
assertArrayEquals(Bytes.toBytes(s), heapBuf);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestBlockIOUtils method testPositionalReadExtraFailed.
@Test
public void testPositionalReadExtraFailed() throws IOException {
long position = 0;
int bufOffset = 0;
int necessaryLen = 10;
int extraLen = 5;
int totalLen = necessaryLen + extraLen;
byte[] buf = new byte[totalLen];
ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen));
FSDataInputStream in = mock(FSDataInputStream.class);
when(in.read(position, buf, bufOffset, totalLen)).thenReturn(necessaryLen);
when(in.hasCapability(anyString())).thenReturn(false);
boolean ret = BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
assertFalse("Expect false return when reading extra bytes fails", ret);
verify(in).read(position, buf, bufOffset, totalLen);
verify(in).hasCapability(anyString());
verifyNoMoreInteractions(in);
}
use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.
the class TestDataBlockEncoders method testSeekingOnSample.
/**
* Test seeking while file is encoded.
*/
@Test
public void testSeekingOnSample() throws IOException {
List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
// create all seekers
List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
LOG.info("Encoding: " + encoding);
DataBlockEncoder encoder = encoding.getEncoder();
if (encoder == null) {
continue;
}
LOG.info("Encoder: " + encoder);
ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
encodedSeekers.add(seeker);
}
LOG.info("Testing it!");
// try a few random seeks
for (boolean seekBefore : new boolean[] { false, true }) {
for (int i = 0; i < NUM_RANDOM_SEEKS; ++i) {
int keyValueId;
if (!seekBefore) {
keyValueId = randomizer.nextInt(sampleKv.size());
} else {
keyValueId = randomizer.nextInt(sampleKv.size() - 1) + 1;
}
KeyValue keyValue = sampleKv.get(keyValueId);
checkSeekingConsistency(encodedSeekers, seekBefore, keyValue);
}
}
// check edge cases
LOG.info("Checking edge cases");
checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
for (boolean seekBefore : new boolean[] { false, true }) {
checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
KeyValue midKv = sampleKv.get(sampleKv.size() / 2);
Cell lastMidKv = PrivateCellUtil.createLastOnRowCol(midKv);
checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
}
LOG.info("Done");
}
Aggregations