Search in sources :

Example 11 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestByteRangeWithKVSerialization method testWritingAndReadingCells.

@Test
public void testWritingAndReadingCells() throws Exception {
    final byte[] FAMILY = Bytes.toBytes("f1");
    final byte[] QUALIFIER = Bytes.toBytes("q1");
    final byte[] VALUE = Bytes.toBytes("v");
    int kvCount = 1000000;
    List<KeyValue> kvs = new ArrayList<>(kvCount);
    int totalSize = 0;
    Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") };
    for (int i = 0; i < kvCount; i++) {
        KeyValue kv = new KeyValue(Bytes.toBytes(i), FAMILY, QUALIFIER, i, VALUE, tags);
        kv.setSequenceId(i);
        kvs.add(kv);
        totalSize += kv.getLength() + Bytes.SIZEOF_LONG;
    }
    PositionedByteRange pbr = new SimplePositionedMutableByteRange(totalSize);
    for (KeyValue kv : kvs) {
        writeCell(pbr, kv);
    }
    PositionedByteRange pbr1 = new SimplePositionedMutableByteRange(pbr.getBytes(), 0, pbr.getPosition());
    for (int i = 0; i < kvCount; i++) {
        KeyValue kv = readCell(pbr1);
        KeyValue kv1 = kvs.get(i);
        Assert.assertTrue(kv.equals(kv1));
        Assert.assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength()));
        Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(), kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(), kv1.getTagsLength()));
        Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Test(org.junit.Test)

Example 12 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class PrefixTreeCodec method decodeKeyValues.

/**
   * I don't think this method is called during normal HBase operation, so efficiency is not
   * important.
   */
public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
    // waste
    ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);
    sourceAsBuffer.mark();
    PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(new SingleByteBuff(sourceAsBuffer));
    sourceAsBuffer.rewind();
    int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
    byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
    ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
    result.rewind();
    CellSearcher searcher = null;
    try {
        boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
        searcher = DecoderFactory.checkOut(new SingleByteBuff(sourceAsBuffer), includesMvcc);
        while (searcher.advance()) {
            KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
            // needs to be modified for DirectByteBuffers. no existing methods to
            // write VLongs to byte[]
            int offset = result.arrayOffset() + result.position();
            System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset, currentCell.getLength());
            int keyValueLength = KeyValueUtil.length(currentCell);
            ByteBufferUtils.skip(result, keyValueLength);
            offset += keyValueLength;
            if (includesMvcc) {
                ByteBufferUtils.writeVLong(result, currentCell.getSequenceId());
            }
        }
        //make it appear as if we were appending
        result.position(result.limit());
        return result;
    } finally {
        DecoderFactory.checkIn(searcher);
    }
}
Also used : CellSearcher(org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher) KeyValue(org.apache.hadoop.hbase.KeyValue) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) ByteBuffer(java.nio.ByteBuffer)

Example 13 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestRowDataTrivialWithTags method individualSearcherAssertions.

@Override
public void individualSearcherAssertions(CellSearcher searcher) {
    /**
     * The searcher should get a token mismatch on the "r" branch. Assert that
     * it skips not only rA, but rB as well.
     */
    KeyValue afterLast = KeyValueUtil.createFirstOnRow(Bytes.toBytes("zzz"));
    CellScannerPosition position = searcher.positionAtOrAfter(afterLast);
    Assert.assertEquals(CellScannerPosition.AFTER_LAST, position);
    Assert.assertNull(searcher.current());
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CellScannerPosition(org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition)

Example 14 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestRowEncoder method testReverseScanner.

/**
   * probably not needed since testReverseScannerWithJitter() below is more thorough
   */
@Test
public void testReverseScanner() {
    searcher.positionAfterLastCell();
    int counter = -1;
    while (searcher.previous()) {
        ++counter;
        int oppositeIndex = rows.getInputs().size() - counter - 1;
        KeyValue inputKv = rows.getInputs().get(oppositeIndex);
        KeyValue outputKv = KeyValueUtil.copyToNewKeyValue(searcher.current());
        assertKeyAndValueEqual(inputKv, outputKv);
    }
    Assert.assertEquals(rows.getInputs().size(), counter + 1);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Test(org.junit.Test)

Example 15 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestRowEncoder method compile.

@Before
public void compile() throws IOException {
    // Always run with tags. But should also ensure that KVs without tags work fine
    os = new ByteArrayOutputStream(1 << 20);
    encoder = new PrefixTreeEncoder(os, includeMemstoreTS);
    inputKvs = rows.getInputs();
    for (KeyValue kv : inputKvs) {
        encoder.write(kv);
    }
    encoder.flush();
    totalBytes = encoder.getTotalBytes();
    blockMetaWriter = encoder.getBlockMeta();
    outputBytes = os.toByteArray();
    // start reading, but save the assertions for @Test methods
    ByteBuffer out = ByteBuffer.allocateDirect(outputBytes.length);
    ByteBufferUtils.copyFromArrayToBuffer(out, outputBytes, 0, outputBytes.length);
    out.position(0);
    buffer = new SingleByteBuff(out);
    blockMetaReader = new PrefixTreeBlockMeta(buffer);
    searcher = new PrefixTreeArraySearcher(blockMetaReader, blockMetaReader.getRowTreeDepth(), blockMetaReader.getMaxRowLength(), blockMetaReader.getMaxQualifierLength(), blockMetaReader.getMaxTagsLength());
    searcher.initOnBlock(blockMetaReader, buffer, includeMemstoreTS);
}
Also used : PrefixTreeEncoder(org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder) PrefixTreeArraySearcher(org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher) KeyValue(org.apache.hadoop.hbase.KeyValue) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) PrefixTreeBlockMeta(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) Before(org.junit.Before)

Aggregations

KeyValue (org.apache.hadoop.hbase.KeyValue)552 Test (org.junit.Test)289 Cell (org.apache.hadoop.hbase.Cell)193 ArrayList (java.util.ArrayList)172 Put (org.apache.hadoop.hbase.client.Put)98 Scan (org.apache.hadoop.hbase.client.Scan)85 Result (org.apache.hadoop.hbase.client.Result)70 Configuration (org.apache.hadoop.conf.Configuration)64 Path (org.apache.hadoop.fs.Path)55 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)36 Tag (org.apache.hadoop.hbase.Tag)35 ByteBuffer (java.nio.ByteBuffer)34 List (java.util.List)34 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)34 IOException (java.io.IOException)32 TableName (org.apache.hadoop.hbase.TableName)32 TreeMap (java.util.TreeMap)29 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)28 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)28 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)27