Search in sources :

Example 41 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class HRegion method reckonAppend.

private Cell reckonAppend(final Cell delta, final Cell currentValue, final long now, Append mutation) throws IOException {
    // Forward any tags found on the delta.
    List<Tag> tags = TagUtil.carryForwardTags(delta);
    long ts = now;
    Cell newCell = null;
    byte[] row = mutation.getRow();
    if (currentValue != null) {
        tags = TagUtil.carryForwardTags(tags, currentValue);
        ts = Math.max(now, currentValue.getTimestamp() + 1);
        tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
        byte[] tagBytes = TagUtil.fromList(tags);
        // Allocate an empty cell and copy in all parts.
        // TODO: This is intimate knowledge of how a KeyValue is made. Undo!!! Prevents our doing
        // other Cell types. Copying on-heap too if an off-heap Cell.
        newCell = new KeyValue(row.length, delta.getFamilyLength(), delta.getQualifierLength(), ts, KeyValue.Type.Put, delta.getValueLength() + currentValue.getValueLength(), tagBytes == null ? 0 : tagBytes.length);
        // Copy in row, family, and qualifier
        System.arraycopy(row, 0, newCell.getRowArray(), newCell.getRowOffset(), row.length);
        System.arraycopy(delta.getFamilyArray(), delta.getFamilyOffset(), newCell.getFamilyArray(), newCell.getFamilyOffset(), delta.getFamilyLength());
        System.arraycopy(delta.getQualifierArray(), delta.getQualifierOffset(), newCell.getQualifierArray(), newCell.getQualifierOffset(), delta.getQualifierLength());
        // Copy in the value
        CellUtil.copyValueTo(currentValue, newCell.getValueArray(), newCell.getValueOffset());
        System.arraycopy(delta.getValueArray(), delta.getValueOffset(), newCell.getValueArray(), newCell.getValueOffset() + currentValue.getValueLength(), delta.getValueLength());
        // Copy in tag data
        if (tagBytes != null) {
            System.arraycopy(tagBytes, 0, newCell.getTagsArray(), newCell.getTagsOffset(), tagBytes.length);
        }
    } else {
        // Append's KeyValue.Type==Put and ts==HConstants.LATEST_TIMESTAMP
        CellUtil.updateLatestStamp(delta, now);
        newCell = delta;
        tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
        if (tags != null) {
            newCell = CellUtil.createCell(delta, tags);
        }
    }
    return newCell;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Tag(org.apache.hadoop.hbase.Tag) Cell(org.apache.hadoop.hbase.Cell)

Example 42 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestPutDeleteEtcCellIteration method testIncrementIteration.

@Test
public void testIncrementIteration() throws IOException {
    Increment increment = new Increment(ROW);
    for (int i = 0; i < COUNT; i++) {
        byte[] bytes = Bytes.toBytes(i);
        increment.addColumn(bytes, bytes, i);
    }
    int index = 0;
    for (CellScanner cellScanner = increment.cellScanner(); cellScanner.advance(); ) {
        Cell cell = cellScanner.current();
        int value = index;
        byte[] bytes = Bytes.toBytes(index++);
        KeyValue kv = (KeyValue) cell;
        assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
        long a = Bytes.toLong(CellUtil.cloneValue(kv));
        assertEquals(value, a);
    }
    assertEquals(COUNT, index);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CellScanner(org.apache.hadoop.hbase.CellScanner) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 43 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestFromClientSide method testAddKeyValue.

@Test
public void testAddKeyValue() throws IOException {
    final byte[] CONTENTS_FAMILY = Bytes.toBytes("contents");
    final byte[] value = Bytes.toBytes("abcd");
    final byte[] row1 = Bytes.toBytes("row1");
    final byte[] row2 = Bytes.toBytes("row2");
    byte[] qualifier = Bytes.toBytes("qf1");
    Put put = new Put(row1);
    // Adding KeyValue with the same row
    KeyValue kv = new KeyValue(row1, CONTENTS_FAMILY, qualifier, value);
    boolean ok = true;
    try {
        put.add(kv);
    } catch (IOException e) {
        ok = false;
    }
    assertEquals(true, ok);
    // Adding KeyValue with the different row
    kv = new KeyValue(row2, CONTENTS_FAMILY, qualifier, value);
    ok = false;
    try {
        put.add(kv);
    } catch (IOException e) {
        ok = true;
    }
    assertEquals(true, ok);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Test(org.junit.Test)

Example 44 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestResult method testCompareResults.

/**
   * Verify that Result.compareResults(...) behaves correctly.
   */
public void testCompareResults() throws Exception {
    byte[] value1 = Bytes.toBytes("value1");
    byte[] qual = Bytes.toBytes("qual");
    KeyValue kv1 = new KeyValue(row, family, qual, value);
    KeyValue kv2 = new KeyValue(row, family, qual, value1);
    Result r1 = Result.create(new KeyValue[] { kv1 });
    Result r2 = Result.create(new KeyValue[] { kv2 });
    // no exception thrown
    Result.compareResults(r1, r1);
    try {
        // these are different (HBASE-4800)
        Result.compareResults(r1, r2);
        fail();
    } catch (Exception x) {
        assertTrue(x.getMessage().startsWith("This result was different:"));
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) IOException(java.io.IOException)

Example 45 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.

the class TestResult method testMultiVersionLoadValue.

public void testMultiVersionLoadValue() throws Exception {
    KeyValue[] kvs1 = genKVs(row, family, value, 1, 100);
    KeyValue[] kvs2 = genKVs(row, family, value, 200, 100);
    KeyValue[] kvs = new KeyValue[kvs1.length + kvs2.length];
    System.arraycopy(kvs1, 0, kvs, 0, kvs1.length);
    System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length);
    Arrays.sort(kvs, CellComparator.COMPARATOR);
    ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024);
    Result r = Result.create(kvs);
    for (int i = 0; i < 100; ++i) {
        final byte[] qf = Bytes.toBytes(i);
        loadValueBuffer.clear();
        r.loadValue(family, qf, loadValueBuffer);
        loadValueBuffer.flip();
        assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), loadValueBuffer);
        assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), r.getValueAsByteBuffer(family, qf));
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ByteBuffer(java.nio.ByteBuffer)

Aggregations

KeyValue (org.apache.hadoop.hbase.KeyValue)552 Test (org.junit.Test)289 Cell (org.apache.hadoop.hbase.Cell)193 ArrayList (java.util.ArrayList)172 Put (org.apache.hadoop.hbase.client.Put)98 Scan (org.apache.hadoop.hbase.client.Scan)85 Result (org.apache.hadoop.hbase.client.Result)70 Configuration (org.apache.hadoop.conf.Configuration)64 Path (org.apache.hadoop.fs.Path)55 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)36 Tag (org.apache.hadoop.hbase.Tag)35 ByteBuffer (java.nio.ByteBuffer)34 List (java.util.List)34 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)34 IOException (java.io.IOException)32 TableName (org.apache.hadoop.hbase.TableName)32 TreeMap (java.util.TreeMap)29 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)28 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)28 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)27