Search in sources :

Example 36 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class ByteBufferListOutputStream method getByteBuffers.

/**
 * We can be assured that the buffers returned by this method are all flipped
 * @return list of bytebuffers
 */
public List<ByteBuffer> getByteBuffers() {
    if (!this.lastBufFlipped) {
        this.lastBufFlipped = true;
        // All the other BBs are already flipped while moving to the new BB.
        curBuf.flip();
    }
    List<ByteBuffer> bbs = new ArrayList<>(this.allBufs.size());
    for (SingleByteBuff bb : this.allBufs) {
        bbs.add(bb.nioByteBuffers()[0]);
    }
    return bbs;
}
Also used : ArrayList(java.util.ArrayList) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuffer(java.nio.ByteBuffer)

Example 37 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class RowIndexCodecV1 method decodeKeyValues.

@Override
public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx) throws IOException {
    ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(// waste
    source);
    sourceAsBuffer.mark();
    if (!decodingCtx.getHFileContext().isIncludesTags()) {
        sourceAsBuffer.position(sourceAsBuffer.limit() - Bytes.SIZEOF_INT);
        int onDiskSize = sourceAsBuffer.getInt();
        sourceAsBuffer.reset();
        ByteBuffer dup = sourceAsBuffer.duplicate();
        dup.position(sourceAsBuffer.position());
        dup.limit(sourceAsBuffer.position() + onDiskSize);
        return dup.slice();
    } else {
        RowIndexSeekerV1 seeker = new RowIndexSeekerV1(decodingCtx);
        seeker.setCurrentBuffer(new SingleByteBuff(sourceAsBuffer));
        List<Cell> kvs = new ArrayList<>();
        kvs.add(seeker.getCell());
        while (seeker.next()) {
            kvs.add(seeker.getCell());
        }
        boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        try (DataOutputStream out = new DataOutputStream(baos)) {
            for (Cell cell : kvs) {
                KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(cell);
                out.write(currentCell.getBuffer(), currentCell.getOffset(), currentCell.getLength());
                if (includesMvcc) {
                    WritableUtils.writeVLong(out, cell.getSequenceId());
                }
            }
            out.flush();
        }
        return ByteBuffer.wrap(baos.getBuffer(), 0, baos.size());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) DataOutputStream(java.io.DataOutputStream) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ArrayList(java.util.ArrayList) ByteArrayOutputStream(org.apache.hadoop.hbase.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell)

Example 38 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestTagCompressionContext method testCompressUncompressTags1.

@Test
public void testCompressUncompressTags1() throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    KeyValue kv1 = createKVWithTags(2);
    int tagsLength1 = kv1.getTagsLength();
    ByteBuffer ib = ByteBuffer.wrap(kv1.getTagsArray());
    context.compressTags(baos, ib, kv1.getTagsOffset(), tagsLength1);
    KeyValue kv2 = createKVWithTags(3);
    int tagsLength2 = kv2.getTagsLength();
    ib = ByteBuffer.wrap(kv2.getTagsArray());
    context.compressTags(baos, ib, kv2.getTagsOffset(), tagsLength2);
    context.clear();
    byte[] dest = new byte[tagsLength1];
    ByteBuffer ob = ByteBuffer.wrap(baos.toByteArray());
    context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength1);
    assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1));
    dest = new byte[tagsLength2];
    context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength2);
    assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2));
}
Also used : ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 39 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestDataBlockEncoders method testNextOnSample.

@Test
public void testNextOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        if (encoding.getEncoder() == null) {
            continue;
        }
        DataBlockEncoder encoder = encoding.getEncoder();
        ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        int i = 0;
        do {
            KeyValue expectedKeyValue = sampleKv.get(i);
            Cell cell = seeker.getCell();
            if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, expectedKeyValue, cell) != 0) {
                int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true);
                fail(String.format("next() produces wrong results " + "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual      %s", encoder.toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(), expectedKeyValue.getKeyOffset(), expectedKeyValue.getKeyLength()), CellUtil.toString(cell, false)));
            }
            i++;
        } while (seeker.next());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 40 with SingleByteBuff

use of org.apache.hadoop.hbase.nio.SingleByteBuff in project hbase by apache.

the class TestDataBlockEncoders method testRowIndexWithTagsButNoTagsInCell.

@Test
public void testRowIndexWithTagsButNoTagsInCell() throws IOException {
    List<KeyValue> kvList = new ArrayList<>();
    byte[] row = new byte[0];
    byte[] family = new byte[0];
    byte[] qualifier = new byte[0];
    byte[] value = new byte[0];
    KeyValue expectedKV = new KeyValue(row, family, qualifier, 1L, Type.Put, value);
    kvList.add(expectedKV);
    DataBlockEncoding encoding = DataBlockEncoding.ROW_INDEX_V1;
    DataBlockEncoder encoder = encoding.getEncoder();
    ByteBuffer encodedBuffer = encodeKeyValues(encoding, kvList, getEncodingContext(conf, Algorithm.NONE, encoding), false);
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
    DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
    seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
    Cell cell = seeker.getCell();
    Assert.assertEquals(expectedKV.getLength(), ((KeyValue) cell).getLength());
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)47 ByteBuffer (java.nio.ByteBuffer)27 Test (org.junit.Test)27 MultiByteBuff (org.apache.hadoop.hbase.nio.MultiByteBuff)21 ByteBuff (org.apache.hadoop.hbase.nio.ByteBuff)19 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)12 ArrayList (java.util.ArrayList)9 KeyValue (org.apache.hadoop.hbase.KeyValue)9 Cell (org.apache.hadoop.hbase.Cell)8 DataOutputStream (java.io.DataOutputStream)7 Path (org.apache.hadoop.fs.Path)7 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)6 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 FSDataInputStreamWrapper (org.apache.hadoop.hbase.io.FSDataInputStreamWrapper)5 Compression (org.apache.hadoop.hbase.io.compress.Compression)4 Configuration (org.apache.hadoop.conf.Configuration)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 Random (java.util.Random)2