use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class Delete method addColumn.
/**
* Delete the specified version of the specified column.
* @param family family name
* @param qualifier column qualifier
* @param timestamp version timestamp
* @return this for invocation chaining
*/
public Delete addColumn(byte[] family, byte[] qualifier, long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = familyMap.get(family);
if (list == null) {
list = new ArrayList<>(1);
}
KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.Delete);
list.add(kv);
familyMap.put(family, list);
return this;
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class Put method addColumn.
/**
* Add the specified column and value, with the specified timestamp as
* its version to this Put operation.
* @param family family name
* @param qualifier column qualifier
* @param ts version timestamp
* @param value column value
* @return this
*/
public Put addColumn(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
if (ts < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
}
List<Cell> list = getCellList(family);
KeyValue kv = createPutKeyValue(family, qualifier, ts, value, null);
list.add(kv);
familyMap.put(CellUtil.cloneFamily(kv), list);
return this;
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class RedundantKVGenerator method generateTestKeyValues.
/**
* Generate test data useful to test encoders.
* @param howMany How many Key values should be generated.
* @return sorted list of key values
*/
public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
List<KeyValue> result = new ArrayList<>();
List<byte[]> rows = generateRows();
Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
if (family == null) {
family = new byte[columnFamilyLength];
randomizer.nextBytes(family);
}
long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
byte[] value = new byte[valueLength];
for (int i = 0; i < howMany; ++i) {
long timestamp = baseTimestamp;
if (timestampDiffSize > 0) {
timestamp += randomizer.nextInt(timestampDiffSize);
}
Integer rowId = randomizer.nextInt(rows.size());
byte[] row = rows.get(rowId);
// generate qualifier, sometimes it is same, sometimes similar,
// occasionally completely different
byte[] qualifier;
float qualifierChance = randomizer.nextFloat();
if (!rowsToQualifier.containsKey(rowId) || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
int qualifierLength = averageQualifierLength;
qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1) - qualifierLengthVariance;
qualifier = new byte[qualifierLength];
randomizer.nextBytes(qualifier);
// add it to map
if (!rowsToQualifier.containsKey(rowId)) {
rowsToQualifier.put(rowId, new ArrayList<>());
}
rowsToQualifier.get(rowId).add(qualifier);
} else if (qualifierChance > chanceForSameQualifier) {
// similar qualifier
List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
qualifier = new byte[originalQualifier.length];
int commonPrefix = randomizer.nextInt(qualifier.length);
System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
for (int j = commonPrefix; j < qualifier.length; ++j) {
qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
}
rowsToQualifier.get(rowId).add(qualifier);
} else {
// same qualifier
List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
}
if (randomizer.nextFloat() < chanceForZeroValue) {
for (int j = 0; j < value.length; ++j) {
value[j] = (byte) 0;
}
} else {
randomizer.nextBytes(value);
}
if (useTags) {
result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
} else {
result.add(new KeyValue(row, family, qualifier, timestamp, value));
}
}
Collections.sort(result, CellComparator.COMPARATOR);
return result;
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class RedundantKVGenerator method convertKvToByteBuffer.
/**
* Convert list of KeyValues to byte buffer.
* @param keyValues list of KeyValues to be converted.
* @return buffer with content from key values
*/
public static ByteBuffer convertKvToByteBuffer(List<KeyValue> keyValues, boolean includesMemstoreTS) {
int totalSize = 0;
for (KeyValue kv : keyValues) {
totalSize += kv.getLength();
if (includesMemstoreTS) {
totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
}
}
ByteBuffer result = ByteBuffer.allocate(totalSize);
for (KeyValue kv : keyValues) {
result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includesMemstoreTS) {
ByteBufferUtils.writeVLong(result, kv.getSequenceId());
}
}
return result;
}
use of org.apache.hadoop.hbase.KeyValue in project hbase by apache.
the class TestCellCodecWithTags method testCellWithTag.
@Test
public void testCellWithTag() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
Codec codec = new CellCodecWithTags();
Codec.Encoder encoder = codec.getEncoder(dos);
final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")) });
final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
encoder.write(cell1);
encoder.write(cell2);
encoder.write(cell3);
encoder.flush();
dos.close();
long offset = cos.getCount();
CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
DataInputStream dis = new DataInputStream(cis);
Codec.Decoder decoder = codec.getDecoder(dis);
assertTrue(decoder.advance());
Cell c = decoder.current();
assertTrue(CellUtil.equals(c, cell1));
List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(2, tags.size());
Tag tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell2));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(1, tags.size());
tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell3));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(3, tags.size());
tag = tags.get(0);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
tag = tags.get(2);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
assertFalse(decoder.advance());
dis.close();
assertEquals(offset, cis.getCount());
}
Aggregations