Search in sources :

Example 21 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class VisibilityUtils method createVisibilityExpTags.

public static List<Tag> createVisibilityExpTags(String visExpression, boolean withSerializationFormat, boolean checkAuths, Set<Integer> auths, VisibilityLabelOrdinalProvider ordinalProvider) throws IOException {
    ExpressionNode node = null;
    try {
        node = EXP_PARSER.parse(visExpression);
    } catch (ParseException e) {
        throw new IOException(e);
    }
    node = EXP_EXPANDER.expand(node);
    List<Tag> tags = new ArrayList<>();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    List<Integer> labelOrdinals = new ArrayList<>();
    // tag indicates we are supporting deletes with cell visibility
    if (withSerializationFormat) {
        tags.add(VisibilityUtils.SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG);
    }
    if (node.isSingleNode()) {
        getLabelOrdinals(node, labelOrdinals, auths, checkAuths, ordinalProvider);
        writeLabelOrdinalsToStream(labelOrdinals, dos);
        tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
        baos.reset();
    } else {
        NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node;
        if (nlNode.getOperator() == Operator.OR) {
            for (ExpressionNode child : nlNode.getChildExps()) {
                getLabelOrdinals(child, labelOrdinals, auths, checkAuths, ordinalProvider);
                writeLabelOrdinalsToStream(labelOrdinals, dos);
                tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
                baos.reset();
                labelOrdinals.clear();
            }
        } else {
            getLabelOrdinals(nlNode, labelOrdinals, auths, checkAuths, ordinalProvider);
            writeLabelOrdinalsToStream(labelOrdinals, dos);
            tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
            baos.reset();
        }
    }
    return tags;
}
Also used : ExpressionNode(org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode) NonLeafExpressionNode(org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode) LeafExpressionNode(org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode) DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ByteArrayOutputStream(java.io.ByteArrayOutputStream) NonLeafExpressionNode(org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag)

Example 22 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class AccessController method addCellPermissions.

private static void addCellPermissions(final byte[] perms, Map<byte[], List<Cell>> familyMap) {
    // with new cells including the ACL data
    for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
        List<Cell> newCells = Lists.newArrayList();
        for (Cell cell : e.getValue()) {
            // Prepend the supplied perms in a new ACL tag to an update list of tags for the cell
            List<Tag> tags = new ArrayList<>();
            tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms));
            Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell);
            while (tagIterator.hasNext()) {
                tags.add(tagIterator.next());
            }
            newCells.add(CellUtil.createCell(cell, tags));
        }
        // This is supposed to be safe, won't CME
        e.setValue(newCells);
    }
}
Also used : ArrayList(java.util.ArrayList) FilterList(org.apache.hadoop.hbase.filter.FilterList) ArrayList(java.util.ArrayList) List(java.util.List) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Map(java.util.Map) TreeMap(java.util.TreeMap) HashMap(java.util.HashMap) Cell(org.apache.hadoop.hbase.Cell)

Example 23 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class ExpAsStringVisibilityLabelServiceImpl method createTag.

private Tag createTag(ExpressionNode node) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    List<String> labels = new ArrayList<>();
    List<String> notLabels = new ArrayList<>();
    extractLabels(node, labels, notLabels);
    Collections.sort(labels);
    Collections.sort(notLabels);
    // For a NOT node we will write the label length as -ve.
    for (String label : notLabels) {
        byte[] bLabel = Bytes.toBytes(label);
        short length = (short) bLabel.length;
        length = (short) (-1 * length);
        dos.writeShort(length);
        dos.write(bLabel);
    }
    for (String label : labels) {
        byte[] bLabel = Bytes.toBytes(label);
        dos.writeShort(bLabel.length);
        dos.write(bLabel);
    }
    return new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray());
}
Also used : DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag)

Example 24 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class TestHFileBlock method writeTestKeyValues.

static int writeTestKeyValues(HFileBlock.Writer hbw, int seed, boolean includesMemstoreTS, boolean useTag) throws IOException {
    List<KeyValue> keyValues = new ArrayList<>();
    // just any fixed number
    Random randomizer = new Random(42l + seed);
    // generate keyValues
    for (int i = 0; i < NUM_KEYVALUES; ++i) {
        byte[] row;
        long timestamp;
        byte[] family;
        byte[] qualifier;
        byte[] value;
        // generate it or repeat, it should compress well
        if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
            row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size())));
        } else {
            row = new byte[FIELD_LENGTH];
            randomizer.nextBytes(row);
        }
        if (0 == i) {
            family = new byte[FIELD_LENGTH];
            randomizer.nextBytes(family);
        } else {
            family = CellUtil.cloneFamily(keyValues.get(0));
        }
        if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
            qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size())));
        } else {
            qualifier = new byte[FIELD_LENGTH];
            randomizer.nextBytes(qualifier);
        }
        if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
            value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size())));
        } else {
            value = new byte[FIELD_LENGTH];
            randomizer.nextBytes(value);
        }
        if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
            timestamp = keyValues.get(randomizer.nextInt(keyValues.size())).getTimestamp();
        } else {
            timestamp = randomizer.nextLong();
        }
        if (!useTag) {
            keyValues.add(new KeyValue(row, family, qualifier, timestamp, value));
        } else {
            keyValues.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("myTagVal")) }));
        }
    }
    // sort it and write to stream
    int totalSize = 0;
    Collections.sort(keyValues, CellComparator.COMPARATOR);
    for (KeyValue kv : keyValues) {
        totalSize += kv.getLength();
        if (includesMemstoreTS) {
            long memstoreTS = randomizer.nextLong();
            kv.setSequenceId(memstoreTS);
            totalSize += WritableUtils.getVIntSize(memstoreTS);
        }
        hbw.write(kv);
    }
    return totalSize;
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Random(java.util.Random) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag)

Example 25 with ArrayBackedTag

use of org.apache.hadoop.hbase.ArrayBackedTag in project hbase by apache.

the class TestCacheOnWrite method writeStoreFile.

private void writeStoreFile(boolean useTags) throws IOException {
    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "test_cache_on_write");
    HFileContext meta = new HFileContextBuilder().withCompression(compress).withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL).withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()).withIncludesTags(useTags).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withComparator(CellComparator.COMPARATOR).withFileContext(meta).withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build();
    byte[] cf = Bytes.toBytes("fam");
    for (int i = 0; i < NUM_KV; ++i) {
        byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);
        byte[] qualifier = RandomKeyValueUtil.randomRowOrQualifier(rand);
        byte[] value = RandomKeyValueUtil.randomValue(rand);
        KeyValue kv;
        if (useTags) {
            Tag t = new ArrayBackedTag((byte) 1, "visibility");
            List<Tag> tagList = new ArrayList<>();
            tagList.add(t);
            Tag[] tags = new Tag[1];
            tags[0] = t;
            kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, rand.nextLong(), generateKeyType(rand), value, 0, value.length, tagList);
        } else {
            kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, rand.nextLong(), generateKeyType(rand), value, 0, value.length);
        }
        sfw.append(kv);
    }
    sfw.close();
    storeFilePath = sfw.getPath();
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag)

Aggregations

ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)37 KeyValue (org.apache.hadoop.hbase.KeyValue)31 Tag (org.apache.hadoop.hbase.Tag)31 ArrayList (java.util.ArrayList)23 Cell (org.apache.hadoop.hbase.Cell)14 Test (org.junit.Test)9 Put (org.apache.hadoop.hbase.client.Put)7 IOException (java.io.IOException)6 ByteBufferKeyValue (org.apache.hadoop.hbase.ByteBufferKeyValue)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 DataOutputStream (java.io.DataOutputStream)4 Path (org.apache.hadoop.fs.Path)4 ByteArrayInputStream (java.io.ByteArrayInputStream)3 DataInputStream (java.io.DataInputStream)3 ByteBuffer (java.nio.ByteBuffer)3 HashMap (java.util.HashMap)3 List (java.util.List)3 Random (java.util.Random)3 CountingInputStream (com.google.common.io.CountingInputStream)2 CountingOutputStream (com.google.common.io.CountingOutputStream)2