Search in sources :

Example 16 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestBlockIOUtils method testByteBufferPositionedReadableEOF.

@Test
public void testByteBufferPositionedReadableEOF() throws IOException {
    assumeTrue("Skip the test because ByteBufferPositionedReadable is not available", isByteBufferPositionedReadable());
    long position = 0;
    int necessaryLen = 10;
    int extraLen = 0;
    int totalLen = necessaryLen + extraLen;
    int firstReadLen = 9;
    ByteBuffer buf = ByteBuffer.allocate(totalLen);
    ByteBuff bb = new SingleByteBuff(buf);
    MyFSDataInputStream in = mock(MyFSDataInputStream.class);
    when(in.read(position, buf)).thenReturn(firstReadLen);
    when(in.read(position, buf)).thenReturn(-1);
    when(in.hasCapability(anyString())).thenReturn(true);
    exception.expect(IOException.class);
    exception.expectMessage("EOF");
    BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
    verify(in).read(position, buf);
    verify(in).read(firstReadLen, buf);
    verify(in).hasCapability(anyString());
    verifyNoMoreInteractions(in);
}
Also used : SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) MultiByteBuff(org.apache.hadoop.hbase.nio.MultiByteBuff) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuff(org.apache.hadoop.hbase.nio.ByteBuff) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 17 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestBlockIOUtils method testPositionalReadNoExtra.

@Test
public void testPositionalReadNoExtra() throws IOException {
    long position = 0;
    int bufOffset = 0;
    int necessaryLen = 10;
    int extraLen = 0;
    int totalLen = necessaryLen + extraLen;
    byte[] buf = new byte[totalLen];
    ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen));
    FSDataInputStream in = mock(FSDataInputStream.class);
    when(in.read(position, buf, bufOffset, totalLen)).thenReturn(totalLen);
    when(in.hasCapability(anyString())).thenReturn(false);
    boolean ret = BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
    assertFalse("Expect false return when no extra bytes requested", ret);
    verify(in).read(position, buf, bufOffset, totalLen);
    verify(in).hasCapability(anyString());
    verifyNoMoreInteractions(in);
}
Also used : SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) MultiByteBuff(org.apache.hadoop.hbase.nio.MultiByteBuff) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuff(org.apache.hadoop.hbase.nio.ByteBuff) Test(org.junit.Test)

Example 18 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestBlockIOUtils method testReadFully.

@Test
public void testReadFully() throws IOException {
    FileSystem fs = TEST_UTIL.getTestFileSystem();
    Path p = new Path(TEST_UTIL.getDataTestDirOnTestFS(), "testReadFully");
    String s = "hello world";
    try (FSDataOutputStream out = fs.create(p)) {
        out.writeBytes(s);
    }
    ByteBuff buf = new SingleByteBuff(ByteBuffer.allocate(11));
    try (FSDataInputStream in = fs.open(p)) {
        BlockIOUtils.readFully(buf, in, 11);
    }
    buf.rewind();
    byte[] heapBuf = new byte[s.length()];
    buf.get(heapBuf, 0, heapBuf.length);
    assertArrayEquals(Bytes.toBytes(s), heapBuf);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) MultiByteBuff(org.apache.hadoop.hbase.nio.MultiByteBuff) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuff(org.apache.hadoop.hbase.nio.ByteBuff) Test(org.junit.Test)

Example 19 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestBlockIOUtils method testPositionalReadExtraFailed.

@Test
public void testPositionalReadExtraFailed() throws IOException {
    long position = 0;
    int bufOffset = 0;
    int necessaryLen = 10;
    int extraLen = 5;
    int totalLen = necessaryLen + extraLen;
    byte[] buf = new byte[totalLen];
    ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen));
    FSDataInputStream in = mock(FSDataInputStream.class);
    when(in.read(position, buf, bufOffset, totalLen)).thenReturn(necessaryLen);
    when(in.hasCapability(anyString())).thenReturn(false);
    boolean ret = BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen);
    assertFalse("Expect false return when reading extra bytes fails", ret);
    verify(in).read(position, buf, bufOffset, totalLen);
    verify(in).hasCapability(anyString());
    verifyNoMoreInteractions(in);
}
Also used : SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) MultiByteBuff(org.apache.hadoop.hbase.nio.MultiByteBuff) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuff(org.apache.hadoop.hbase.nio.ByteBuff) Test(org.junit.Test)

Example 20 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestDataBlockEncoders method testSeekingOnSample.

/**
 * Test seeking while file is encoded.
 */
@Test
public void testSeekingOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
    // create all seekers
    List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        LOG.info("Encoding: " + encoding);
        DataBlockEncoder encoder = encoding.getEncoder();
        if (encoder == null) {
            continue;
        }
        LOG.info("Encoder: " + encoder);
        ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        encodedSeekers.add(seeker);
    }
    LOG.info("Testing it!");
    // try a few random seeks
    for (boolean seekBefore : new boolean[] { false, true }) {
        for (int i = 0; i < NUM_RANDOM_SEEKS; ++i) {
            int keyValueId;
            if (!seekBefore) {
                keyValueId = randomizer.nextInt(sampleKv.size());
            } else {
                keyValueId = randomizer.nextInt(sampleKv.size() - 1) + 1;
            }
            KeyValue keyValue = sampleKv.get(keyValueId);
            checkSeekingConsistency(encodedSeekers, seekBefore, keyValue);
        }
    }
    // check edge cases
    LOG.info("Checking edge cases");
    checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
    for (boolean seekBefore : new boolean[] { false, true }) {
        checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
        KeyValue midKv = sampleKv.get(sampleKv.size() / 2);
        Cell lastMidKv = PrivateCellUtil.createLastOnRowCol(midKv);
        checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
    }
    LOG.info("Done");
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)47 ByteBuffer (java.nio.ByteBuffer)27 Test (org.junit.Test)27 MultiByteBuff (org.apache.hadoop.hbase.nio.MultiByteBuff)21 ByteBuff (org.apache.hadoop.hbase.nio.ByteBuff)19 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)12 ArrayList (java.util.ArrayList)9 KeyValue (org.apache.hadoop.hbase.KeyValue)9 Cell (org.apache.hadoop.hbase.Cell)8 DataOutputStream (java.io.DataOutputStream)7 Path (org.apache.hadoop.fs.Path)7 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)6 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 FSDataInputStreamWrapper (org.apache.hadoop.hbase.io.FSDataInputStreamWrapper)5 Compression (org.apache.hadoop.hbase.io.compress.Compression)4 Configuration (org.apache.hadoop.conf.Configuration)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 Random (java.util.Random)2