use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class HRegion method reckonAppend.
private Cell reckonAppend(final Cell delta, final Cell currentValue, final long now, Append mutation) throws IOException {
// Forward any tags found on the delta.
List<Tag> tags = TagUtil.carryForwardTags(delta);
long ts = now;
Cell newCell = null;
byte[] row = mutation.getRow();
if (currentValue != null) {
tags = TagUtil.carryForwardTags(tags, currentValue);
ts = Math.max(now, currentValue.getTimestamp() + 1);
tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
byte[] tagBytes = TagUtil.fromList(tags);
// Allocate an empty cell and copy in all parts.
// TODO: This is intimate knowledge of how a KeyValue is made. Undo!!! Prevents our doing
// other Cell types. Copying on-heap too if an off-heap Cell.
newCell = new KeyValue(row.length, delta.getFamilyLength(), delta.getQualifierLength(), ts, KeyValue.Type.Put, delta.getValueLength() + currentValue.getValueLength(), tagBytes == null ? 0 : tagBytes.length);
// Copy in row, family, and qualifier
System.arraycopy(row, 0, newCell.getRowArray(), newCell.getRowOffset(), row.length);
System.arraycopy(delta.getFamilyArray(), delta.getFamilyOffset(), newCell.getFamilyArray(), newCell.getFamilyOffset(), delta.getFamilyLength());
System.arraycopy(delta.getQualifierArray(), delta.getQualifierOffset(), newCell.getQualifierArray(), newCell.getQualifierOffset(), delta.getQualifierLength());
// Copy in the value
CellUtil.copyValueTo(currentValue, newCell.getValueArray(), newCell.getValueOffset());
System.arraycopy(delta.getValueArray(), delta.getValueOffset(), newCell.getValueArray(), newCell.getValueOffset() + currentValue.getValueLength(), delta.getValueLength());
// Copy in tag data
if (tagBytes != null) {
System.arraycopy(tagBytes, 0, newCell.getTagsArray(), newCell.getTagsOffset(), tagBytes.length);
}
} else {
// Append's KeyValue.Type==Put and ts==HConstants.LATEST_TIMESTAMP
CellUtil.updateLatestStamp(delta, now);
newCell = delta;
tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL());
if (tags != null) {
newCell = CellUtil.createCell(delta, tags);
}
}
return newCell;
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class TestPutDeleteEtcCellIteration method testIncrementIteration.
@Test
public void testIncrementIteration() throws IOException {
Increment increment = new Increment(ROW);
for (int i = 0; i < COUNT; i++) {
byte[] bytes = Bytes.toBytes(i);
increment.addColumn(bytes, bytes, i);
}
int index = 0;
for (CellScanner cellScanner = increment.cellScanner(); cellScanner.advance(); ) {
Cell cell = cellScanner.current();
int value = index;
byte[] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue) cell;
assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
long a = Bytes.toLong(CellUtil.cloneValue(kv));
assertEquals(value, a);
}
assertEquals(COUNT, index);
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class TestFromClientSide method testAddKeyValue.
@Test
public void testAddKeyValue() throws IOException {
final byte[] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte[] value = Bytes.toBytes("abcd");
final byte[] row1 = Bytes.toBytes("row1");
final byte[] row2 = Bytes.toBytes("row2");
byte[] qualifier = Bytes.toBytes("qf1");
Put put = new Put(row1);
// Adding KeyValue with the same row
KeyValue kv = new KeyValue(row1, CONTENTS_FAMILY, qualifier, value);
boolean ok = true;
try {
put.add(kv);
} catch (IOException e) {
ok = false;
}
assertEquals(true, ok);
// Adding KeyValue with the different row
kv = new KeyValue(row2, CONTENTS_FAMILY, qualifier, value);
ok = false;
try {
put.add(kv);
} catch (IOException e) {
ok = true;
}
assertEquals(true, ok);
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class TestResult method testCompareResults.
/**
* Verify that Result.compareResults(...) behaves correctly.
*/
public void testCompareResults() throws Exception {
byte[] value1 = Bytes.toBytes("value1");
byte[] qual = Bytes.toBytes("qual");
KeyValue kv1 = new KeyValue(row, family, qual, value);
KeyValue kv2 = new KeyValue(row, family, qual, value1);
Result r1 = Result.create(new KeyValue[] { kv1 });
Result r2 = Result.create(new KeyValue[] { kv2 });
// no exception thrown
Result.compareResults(r1, r1);
try {
// these are different (HBASE-4800)
Result.compareResults(r1, r2);
fail();
} catch (Exception x) {
assertTrue(x.getMessage().startsWith("This result was different:"));
}
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class TestResult method testMultiVersionLoadValue.
public void testMultiVersionLoadValue() throws Exception {
KeyValue[] kvs1 = genKVs(row, family, value, 1, 100);
KeyValue[] kvs2 = genKVs(row, family, value, 200, 100);
KeyValue[] kvs = new KeyValue[kvs1.length + kvs2.length];
System.arraycopy(kvs1, 0, kvs, 0, kvs1.length);
System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length);
Arrays.sort(kvs, CellComparator.COMPARATOR);
ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024);
Result r = Result.create(kvs);
for (int i = 0; i < 100; ++i) {
final byte[] qf = Bytes.toBytes(i);
loadValueBuffer.clear();
r.loadValue(family, qf, loadValueBuffer);
loadValueBuffer.flip();
assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), loadValueBuffer);
assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), r.getValueAsByteBuffer(family, qf));
}
}
Aggregations