use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestHFileOutputFormat2 method test_WritingTagData.
/**
* Test that {@link HFileOutputFormat2} RecordWriter writes tags such as ttl into
* hfile.
*/
@Test
public void test_WritingTagData() throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
final String HFILE_FORMAT_VERSION_CONF_KEY = "hfile.format.version";
conf.setInt(HFILE_FORMAT_VERSION_CONF_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS);
RecordWriter<ImmutableBytesWritable, Cell> writer = null;
TaskAttemptContext context = null;
Path dir = util.getDataTestDir("WritingTagData");
try {
Job job = new Job(conf);
FileOutputFormat.setOutputPath(job, dir);
context = createTestTaskAttemptContext(job);
HFileOutputFormat2 hof = new HFileOutputFormat2();
writer = hof.getRecordWriter(context);
final byte[] b = Bytes.toBytes("b");
List<Tag> tags = new ArrayList<>();
tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670)));
KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags);
writer.write(new ImmutableBytesWritable(), kv);
writer.close(context);
writer = null;
FileSystem fs = dir.getFileSystem(conf);
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(dir, true);
while (iterator.hasNext()) {
LocatedFileStatus keyFileStatus = iterator.next();
HFile.Reader reader = HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), conf);
HFileScanner scanner = reader.getScanner(false, false, false);
scanner.seekTo();
Cell cell = scanner.getCell();
List<Tag> tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
assertTrue(tagsFromCell.size() > 0);
for (Tag tag : tagsFromCell) {
assertTrue(tag.getType() == TagType.TTL_TAG_TYPE);
}
}
} finally {
if (writer != null && context != null)
writer.close(context);
dir.getFileSystem(conf).delete(dir, true);
}
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestResultSizeEstimation method testResultSizeEstimationWithTags.
@Test
public void testResultSizeEstimationWithTags() throws Exception {
byte[] ROW1 = Bytes.toBytes("testRow1");
byte[] ROW2 = Bytes.toBytes("testRow2");
byte[] FAMILY = Bytes.toBytes("testFamily");
byte[] QUALIFIER = Bytes.toBytes("testQualifier");
byte[] VALUE = Bytes.toBytes("testValue");
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[][] FAMILIES = new byte[][] { FAMILY };
Table table = TEST_UTIL.createTable(tableName, FAMILIES);
Put p = new Put(ROW1);
p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE, new Tag[] { new ArrayBackedTag((byte) 1, new byte[TAG_DATA_SIZE]) }));
table.put(p);
p = new Put(ROW2);
p.add(new KeyValue(ROW2, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE, new Tag[] { new ArrayBackedTag((byte) 1, new byte[TAG_DATA_SIZE]) }));
table.put(p);
Scan s = new Scan();
s.setMaxResultSize(SCANNER_DATA_LIMIT);
ResultScanner rs = table.getScanner(s);
int count = 0;
while (rs.next() != null) {
count++;
}
assertEquals("Result size estimation did not work properly", 2, count);
rs.close();
table.close();
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class PutSortReducer method reduce.
@Override
protected void reduce(ImmutableBytesWritable row, java.lang.Iterable<Put> puts, Reducer<ImmutableBytesWritable, Put, ImmutableBytesWritable, KeyValue>.Context<ImmutableBytesWritable, Put, ImmutableBytesWritable, KeyValue> context) throws java.io.IOException, InterruptedException {
// although reduce() is called per-row, handle pathological case
long threshold = context.getConfiguration().getLong("putsortreducer.row.threshold", 1L * (1 << 30));
Iterator<Put> iter = puts.iterator();
while (iter.hasNext()) {
TreeSet<KeyValue> map = new TreeSet<>(CellComparator.COMPARATOR);
long curSize = 0;
// stop at the end or the RAM threshold
List<Tag> tags = new ArrayList<>();
while (iter.hasNext() && curSize < threshold) {
// clear the tags
tags.clear();
Put p = iter.next();
long t = p.getTTL();
if (t != Long.MAX_VALUE) {
// add TTL tag if found
tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(t)));
}
byte[] acl = p.getACL();
if (acl != null) {
// add ACL tag if found
tags.add(new ArrayBackedTag(TagType.ACL_TAG_TYPE, acl));
}
try {
CellVisibility cellVisibility = p.getCellVisibility();
if (cellVisibility != null) {
// add the visibility labels if any
tags.addAll(kvCreator.getVisibilityExpressionResolver().createVisibilityExpTags(cellVisibility.getExpression()));
}
} catch (DeserializationException e) {
// just ignoring the bad one?
throw new IOException("Invalid visibility expression found in mutation " + p, e);
}
for (List<Cell> cells : p.getFamilyCellMap().values()) {
for (Cell cell : cells) {
// Creating the KV which needs to be directly written to HFiles. Using the Facade
// KVCreator for creation of kvs.
KeyValue kv = null;
TagUtil.carryForwardTags(tags, cell);
if (!tags.isEmpty()) {
kv = (KeyValue) kvCreator.create(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), cell.getTimestamp(), cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), tags);
} else {
kv = KeyValueUtil.ensureKeyValue(cell);
}
if (map.add(kv)) {
// don't count duplicated kv into size
curSize += kv.heapSize();
}
}
}
}
context.setStatus("Read " + map.size() + " entries of " + map.getClass() + "(" + StringUtils.humanReadableInt(curSize) + ")");
int index = 0;
for (KeyValue kv : map) {
context.write(row, kv);
if (++index % 100 == 0)
context.setStatus("Wrote " + index);
}
// if we have more entries to process
if (iter.hasNext()) {
// force flush because we cannot guarantee intra-row sorted order
context.write(null, null);
}
}
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestWALCellCodecWithCompression method createOffheapKV.
private ByteBufferKeyValue createOffheapKV(int noOfTags) {
byte[] row = Bytes.toBytes("myRow");
byte[] cf = Bytes.toBytes("myCF");
byte[] q = Bytes.toBytes("myQualifier");
byte[] value = Bytes.toBytes("myValue");
List<Tag> tags = new ArrayList<>(noOfTags);
for (int i = 1; i <= noOfTags; i++) {
tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i)));
}
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
ByteBuffer dbb = ByteBuffer.allocateDirect(kv.getBuffer().length);
dbb.put(kv.getBuffer());
return new ByteBufferKeyValue(dbb, 0, kv.getBuffer().length);
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestWALCellCodecWithCompression method createKV.
private KeyValue createKV(int noOfTags) {
byte[] row = Bytes.toBytes("myRow");
byte[] cf = Bytes.toBytes("myCF");
byte[] q = Bytes.toBytes("myQualifier");
byte[] value = Bytes.toBytes("myValue");
List<Tag> tags = new ArrayList<>(noOfTags);
for (int i = 1; i <= noOfTags; i++) {
tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i)));
}
return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
}
Aggregations