Search in sources :

Example 1 with PrefixTreeCodec

use of org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec in project hbase by apache.

the class TestPrefixTreeEncoding method testSeekBeforeWithFixedData.

@Test
public void testSeekBeforeWithFixedData() throws Exception {
    formatRowNum = true;
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    int batchId = numBatchesWritten++;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    generateFixedTestData(kvset, batchId, false, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(new SingleByteBuff(readBuffer));
    // Seek before the first keyvalue;
    Cell seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(getRowKey(batchId, 0), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertEquals(null, seeker.getCell());
    // Seek before the middle keyvalue;
    seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertNotNull(seeker.getCell());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), CellUtil.cloneRow(seeker.getCell()));
    // Seek before the last keyvalue;
    seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertNotNull(seeker.getCell());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), CellUtil.cloneRow(seeker.getCell()));
}
Also used : EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 2 with PrefixTreeCodec

use of org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec in project hbase by apache.

the class TestPrefixTreeEncoding method testSeekWithRandomData.

@Test
public void testSeekWithRandomData() throws Exception {
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    int batchId = numBatchesWritten++;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    generateRandomTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    verifySeeking(seeker, readBuffer, batchId);
}
Also used : PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 3 with PrefixTreeCodec

use of org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec in project hbase by apache.

the class TestPrefixTreeEncoding method testScanWithRandomData.

@Test
public void testScanWithRandomData() throws Exception {
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    generateRandomTestData(kvset, numBatchesWritten++, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(new SingleByteBuff(readBuffer));
    Cell previousKV = null;
    do {
        Cell currentKV = seeker.getCell();
        System.out.println(currentKV);
        if (previousKV != null && CellComparator.COMPARATOR.compare(currentKV, previousKV) < 0) {
            dumpInputKVSet();
            fail("Current kv " + currentKV + " is smaller than previous keyvalue " + previousKV);
        }
        if (!includesTag) {
            assertFalse(currentKV.getTagsLength() > 0);
        } else {
            Assert.assertTrue(currentKV.getTagsLength() > 0);
        }
        previousKV = currentKV;
    } while (seeker.next());
}
Also used : PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 4 with PrefixTreeCodec

use of org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec in project hbase by apache.

the class TestPrefixTreeEncoding method testSeekWithFixedData.

@Test
public void testSeekWithFixedData() throws Exception {
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    int batchId = numBatchesWritten++;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    generateFixedTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    verifySeeking(seeker, readBuffer, batchId);
}
Also used : PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 DataOutputStream (java.io.DataOutputStream)4 ByteBuffer (java.nio.ByteBuffer)4 PrefixTreeCodec (org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec)4 EncodedSeeker (org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker)4 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)4 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)4 Test (org.junit.Test)4 Cell (org.apache.hadoop.hbase.Cell)2 SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)2