use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class AccessController method postMutationBeforeWAL.
@Override
public Cell postMutationBeforeWAL(ObserverContext<RegionCoprocessorEnvironment> ctx, MutationType opType, Mutation mutation, Cell oldCell, Cell newCell) throws IOException {
// work to do here
if (!cellFeaturesEnabled) {
return newCell;
}
// Collect any ACLs from the old cell
List<Tag> tags = Lists.newArrayList();
List<Tag> aclTags = Lists.newArrayList();
ListMultimap<String, Permission> perms = ArrayListMultimap.create();
if (oldCell != null) {
Iterator<Tag> tagIterator = CellUtil.tagsIterator(oldCell);
while (tagIterator.hasNext()) {
Tag tag = tagIterator.next();
if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) {
// Not an ACL tag, just carry it through
if (LOG.isTraceEnabled()) {
LOG.trace("Carrying forward tag from " + oldCell + ": type " + tag.getType() + " length " + tag.getValueLength());
}
tags.add(tag);
} else {
aclTags.add(tag);
}
}
}
// Do we have an ACL on the operation?
byte[] aclBytes = mutation.getACL();
if (aclBytes != null) {
// Yes, use it
tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, aclBytes));
} else {
// No, use what we carried forward
if (perms != null) {
// Permission
if (LOG.isTraceEnabled()) {
LOG.trace("Carrying forward ACLs from " + oldCell + ": " + perms);
}
tags.addAll(aclTags);
}
}
// If we have no tags to add, just return
if (tags.isEmpty()) {
return newCell;
}
Cell rewriteCell = CellUtil.createCell(newCell, tags);
return rewriteCell;
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class VisibilityReplicationEndpoint method replicate.
@Override
public boolean replicate(ReplicateContext replicateContext) {
if (!delegator.canReplicateToSameCluster()) {
// Only when the replication is inter cluster replication we need to
// convert the visibility tags to
// string based tags. But for intra cluster replication like region
// replicas it is not needed.
List<Entry> entries = replicateContext.getEntries();
List<Tag> visTags = new ArrayList<>();
List<Tag> nonVisTags = new ArrayList<>();
List<Entry> newEntries = new ArrayList<>(entries.size());
for (Entry entry : entries) {
WALEdit newEdit = new WALEdit();
ArrayList<Cell> cells = entry.getEdit().getCells();
for (Cell cell : cells) {
if (cell.getTagsLength() > 0) {
visTags.clear();
nonVisTags.clear();
Byte serializationFormat = VisibilityUtils.extractAndPartitionTags(cell, visTags, nonVisTags);
if (!visTags.isEmpty()) {
try {
byte[] modifiedVisExpression = visibilityLabelsService.encodeVisibilityForReplication(visTags, serializationFormat);
if (modifiedVisExpression != null) {
nonVisTags.add(new ArrayBackedTag(TagType.STRING_VIS_TAG_TYPE, modifiedVisExpression));
}
} catch (Exception ioe) {
LOG.error("Exception while reading the visibility labels from the cell. The replication " + "would happen as per the existing format and not as " + "string type for the cell " + cell + ".", ioe);
// just return the old entries as it is without applying the string type change
newEdit.add(cell);
continue;
}
// Recreate the cell with the new tags and the existing tags
Cell newCell = CellUtil.createCell(cell, nonVisTags);
newEdit.add(newCell);
} else {
newEdit.add(cell);
}
} else {
newEdit.add(cell);
}
}
newEntries.add(new Entry(entry.getKey(), newEdit));
}
replicateContext.setEntries(newEntries);
return delegator.replicate(replicateContext);
} else {
return delegator.replicate(replicateContext);
}
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestTagCompressionContext method createKVWithTags.
private KeyValue createKVWithTags(int noOfTags) {
List<Tag> tags = new ArrayList<>();
for (int i = 0; i < noOfTags; i++) {
tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
}
KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags);
return kv;
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestByteRangeWithKVSerialization method testWritingAndReadingCells.
@Test
public void testWritingAndReadingCells() throws Exception {
final byte[] FAMILY = Bytes.toBytes("f1");
final byte[] QUALIFIER = Bytes.toBytes("q1");
final byte[] VALUE = Bytes.toBytes("v");
int kvCount = 1000000;
List<KeyValue> kvs = new ArrayList<>(kvCount);
int totalSize = 0;
Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") };
for (int i = 0; i < kvCount; i++) {
KeyValue kv = new KeyValue(Bytes.toBytes(i), FAMILY, QUALIFIER, i, VALUE, tags);
kv.setSequenceId(i);
kvs.add(kv);
totalSize += kv.getLength() + Bytes.SIZEOF_LONG;
}
PositionedByteRange pbr = new SimplePositionedMutableByteRange(totalSize);
for (KeyValue kv : kvs) {
writeCell(pbr, kv);
}
PositionedByteRange pbr1 = new SimplePositionedMutableByteRange(pbr.getBytes(), 0, pbr.getPosition());
for (int i = 0; i < kvCount; i++) {
KeyValue kv = readCell(pbr1);
KeyValue kv1 = kvs.get(i);
Assert.assertTrue(kv.equals(kv1));
Assert.assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength()));
Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(), kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(), kv1.getTagsLength()));
Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId());
}
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class RedundantKVGenerator method generateTestKeyValues.
/**
* Generate test data useful to test encoders.
* @param howMany How many Key values should be generated.
* @return sorted list of key values
*/
public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
List<KeyValue> result = new ArrayList<>();
List<byte[]> rows = generateRows();
Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
if (family == null) {
family = new byte[columnFamilyLength];
randomizer.nextBytes(family);
}
long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
byte[] value = new byte[valueLength];
for (int i = 0; i < howMany; ++i) {
long timestamp = baseTimestamp;
if (timestampDiffSize > 0) {
timestamp += randomizer.nextInt(timestampDiffSize);
}
Integer rowId = randomizer.nextInt(rows.size());
byte[] row = rows.get(rowId);
// generate qualifier, sometimes it is same, sometimes similar,
// occasionally completely different
byte[] qualifier;
float qualifierChance = randomizer.nextFloat();
if (!rowsToQualifier.containsKey(rowId) || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
int qualifierLength = averageQualifierLength;
qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1) - qualifierLengthVariance;
qualifier = new byte[qualifierLength];
randomizer.nextBytes(qualifier);
// add it to map
if (!rowsToQualifier.containsKey(rowId)) {
rowsToQualifier.put(rowId, new ArrayList<>());
}
rowsToQualifier.get(rowId).add(qualifier);
} else if (qualifierChance > chanceForSameQualifier) {
// similar qualifier
List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
qualifier = new byte[originalQualifier.length];
int commonPrefix = randomizer.nextInt(qualifier.length);
System.arraycopy(originalQualifier, 0, qualifier, 0, commonPrefix);
for (int j = commonPrefix; j < qualifier.length; ++j) {
qualifier[j] = (byte) (randomizer.nextInt() & 0xff);
}
rowsToQualifier.get(rowId).add(qualifier);
} else {
// same qualifier
List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
}
if (randomizer.nextFloat() < chanceForZeroValue) {
for (int j = 0; j < value.length; ++j) {
value[j] = (byte) 0;
}
} else {
randomizer.nextBytes(value);
}
if (useTags) {
result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
} else {
result.add(new KeyValue(row, family, qualifier, timestamp, value));
}
}
Collections.sort(result, CellComparator.COMPARATOR);
return result;
}
Aggregations