use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class VisibilityUtils method createVisibilityExpTags.
public static List<Tag> createVisibilityExpTags(String visExpression, boolean withSerializationFormat, boolean checkAuths, Set<Integer> auths, VisibilityLabelOrdinalProvider ordinalProvider) throws IOException {
ExpressionNode node = null;
try {
node = EXP_PARSER.parse(visExpression);
} catch (ParseException e) {
throw new IOException(e);
}
node = EXP_EXPANDER.expand(node);
List<Tag> tags = new ArrayList<>();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
List<Integer> labelOrdinals = new ArrayList<>();
// tag indicates we are supporting deletes with cell visibility
if (withSerializationFormat) {
tags.add(VisibilityUtils.SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG);
}
if (node.isSingleNode()) {
getLabelOrdinals(node, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
} else {
NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node;
if (nlNode.getOperator() == Operator.OR) {
for (ExpressionNode child : nlNode.getChildExps()) {
getLabelOrdinals(child, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
labelOrdinals.clear();
}
} else {
getLabelOrdinals(nlNode, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
}
}
return tags;
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class AccessController method addCellPermissions.
private static void addCellPermissions(final byte[] perms, Map<byte[], List<Cell>> familyMap) {
// with new cells including the ACL data
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
List<Cell> newCells = Lists.newArrayList();
for (Cell cell : e.getValue()) {
// Prepend the supplied perms in a new ACL tag to an update list of tags for the cell
List<Tag> tags = new ArrayList<>();
tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms));
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell);
while (tagIterator.hasNext()) {
tags.add(tagIterator.next());
}
newCells.add(CellUtil.createCell(cell, tags));
}
// This is supposed to be safe, won't CME
e.setValue(newCells);
}
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class ExpAsStringVisibilityLabelServiceImpl method createTag.
private Tag createTag(ExpressionNode node) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
List<String> labels = new ArrayList<>();
List<String> notLabels = new ArrayList<>();
extractLabels(node, labels, notLabels);
Collections.sort(labels);
Collections.sort(notLabels);
// For a NOT node we will write the label length as -ve.
for (String label : notLabels) {
byte[] bLabel = Bytes.toBytes(label);
short length = (short) bLabel.length;
length = (short) (-1 * length);
dos.writeShort(length);
dos.write(bLabel);
}
for (String label : labels) {
byte[] bLabel = Bytes.toBytes(label);
dos.writeShort(bLabel.length);
dos.write(bLabel);
}
return new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray());
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestHFileBlock method writeTestKeyValues.
static int writeTestKeyValues(HFileBlock.Writer hbw, int seed, boolean includesMemstoreTS, boolean useTag) throws IOException {
List<KeyValue> keyValues = new ArrayList<>();
// just any fixed number
Random randomizer = new Random(42l + seed);
// generate keyValues
for (int i = 0; i < NUM_KEYVALUES; ++i) {
byte[] row;
long timestamp;
byte[] family;
byte[] qualifier;
byte[] value;
// generate it or repeat, it should compress well
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size())));
} else {
row = new byte[FIELD_LENGTH];
randomizer.nextBytes(row);
}
if (0 == i) {
family = new byte[FIELD_LENGTH];
randomizer.nextBytes(family);
} else {
family = CellUtil.cloneFamily(keyValues.get(0));
}
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size())));
} else {
qualifier = new byte[FIELD_LENGTH];
randomizer.nextBytes(qualifier);
}
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size())));
} else {
value = new byte[FIELD_LENGTH];
randomizer.nextBytes(value);
}
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
timestamp = keyValues.get(randomizer.nextInt(keyValues.size())).getTimestamp();
} else {
timestamp = randomizer.nextLong();
}
if (!useTag) {
keyValues.add(new KeyValue(row, family, qualifier, timestamp, value));
} else {
keyValues.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("myTagVal")) }));
}
}
// sort it and write to stream
int totalSize = 0;
Collections.sort(keyValues, CellComparator.COMPARATOR);
for (KeyValue kv : keyValues) {
totalSize += kv.getLength();
if (includesMemstoreTS) {
long memstoreTS = randomizer.nextLong();
kv.setSequenceId(memstoreTS);
totalSize += WritableUtils.getVIntSize(memstoreTS);
}
hbw.write(kv);
}
return totalSize;
}
use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.
the class TestCacheOnWrite method writeStoreFile.
private void writeStoreFile(boolean useTags) throws IOException {
Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "test_cache_on_write");
HFileContext meta = new HFileContextBuilder().withCompression(compress).withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL).withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()).withIncludesTags(useTags).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withComparator(CellComparator.COMPARATOR).withFileContext(meta).withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build();
byte[] cf = Bytes.toBytes("fam");
for (int i = 0; i < NUM_KV; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);
byte[] qualifier = RandomKeyValueUtil.randomRowOrQualifier(rand);
byte[] value = RandomKeyValueUtil.randomValue(rand);
KeyValue kv;
if (useTags) {
Tag t = new ArrayBackedTag((byte) 1, "visibility");
List<Tag> tagList = new ArrayList<>();
tagList.add(t);
Tag[] tags = new Tag[1];
tags[0] = t;
kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, rand.nextLong(), generateKeyType(rand), value, 0, value.length, tagList);
} else {
kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, rand.nextLong(), generateKeyType(rand), value, 0, value.length);
}
sfw.append(kv);
}
sfw.close();
storeFilePath = sfw.getPath();
}
Aggregations