Search in sources :

Example 16 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class TestScan method testScanCopyConstructor.

@Test
public void testScanCopyConstructor() throws Exception {
    Scan scan = new Scan();
    scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q")).setACL("test_user", new Permission(Permission.Action.READ)).setAllowPartialResults(true).setAsyncPrefetch(false).setAttribute("test_key", Bytes.toBytes("test_value")).setAuthorizations(new Authorizations("test_label")).setBatch(10).setCacheBlocks(false).setCaching(10).setConsistency(Consistency.TIMELINE).setFilter(new FilterList()).setId("scan_copy_constructor").setIsolationLevel(IsolationLevel.READ_COMMITTED).setLimit(100).setLoadColumnFamiliesOnDemand(false).setMaxResultSize(100).setMaxResultsPerColumnFamily(1000).readVersions(9999).setMvccReadPoint(5).setNeedCursorResult(true).setPriority(1).setRaw(true).setReplicaId(3).setReversed(true).setRowOffsetPerColumnFamily(5).setRowPrefixFilter(Bytes.toBytes("row_")).setScanMetricsEnabled(true).setReadType(ReadType.STREAM).withStartRow(Bytes.toBytes("row_1")).withStopRow(Bytes.toBytes("row_2")).setTimeRange(0, 13);
    // create a copy of existing scan object
    Scan scanCopy = new Scan(scan);
    // validate fields of copied scan object match with the original scan object
    assertEquals(scan.getACL(), scanCopy.getACL());
    assertEquals(scan.getAllowPartialResults(), scanCopy.getAllowPartialResults());
    assertEquals(scan.getAttribute("test_key"), scanCopy.getAttribute("test_key"));
    assertEquals(scan.getAttributeSize(), scanCopy.getAttributeSize());
    assertEquals(scan.getAttributesMap(), scanCopy.getAttributesMap());
    assertEquals(scan.getAuthorizations().getLabels(), scanCopy.getAuthorizations().getLabels());
    assertEquals(scan.getBatch(), scanCopy.getBatch());
    assertEquals(scan.getCacheBlocks(), scanCopy.getCacheBlocks());
    assertEquals(scan.getCaching(), scanCopy.getCaching());
    assertEquals(scan.getConsistency(), scanCopy.getConsistency());
    assertEquals(scan.getFamilies().length, scanCopy.getFamilies().length);
    assertEquals(scan.getFamilies()[0], scanCopy.getFamilies()[0]);
    assertEquals(scan.getFamilyMap(), scanCopy.getFamilyMap());
    assertEquals(scan.getFilter(), scanCopy.getFilter());
    assertEquals(scan.getId(), scanCopy.getId());
    assertEquals(scan.getIsolationLevel(), scanCopy.getIsolationLevel());
    assertEquals(scan.getLimit(), scanCopy.getLimit());
    assertEquals(scan.getLoadColumnFamiliesOnDemandValue(), scanCopy.getLoadColumnFamiliesOnDemandValue());
    assertEquals(scan.getMaxResultSize(), scanCopy.getMaxResultSize());
    assertEquals(scan.getMaxResultsPerColumnFamily(), scanCopy.getMaxResultsPerColumnFamily());
    assertEquals(scan.getMaxVersions(), scanCopy.getMaxVersions());
    assertEquals(scan.getMvccReadPoint(), scanCopy.getMvccReadPoint());
    assertEquals(scan.getPriority(), scanCopy.getPriority());
    assertEquals(scan.getReadType(), scanCopy.getReadType());
    assertEquals(scan.getReplicaId(), scanCopy.getReplicaId());
    assertEquals(scan.getRowOffsetPerColumnFamily(), scanCopy.getRowOffsetPerColumnFamily());
    assertEquals(scan.getStartRow(), scanCopy.getStartRow());
    assertEquals(scan.getStopRow(), scanCopy.getStopRow());
    assertEquals(scan.getTimeRange(), scanCopy.getTimeRange());
    assertTrue("Make sure copy constructor adds all the fields in the copied object", EqualsBuilder.reflectionEquals(scan, scanCopy));
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Permission(org.apache.hadoop.hbase.security.access.Permission) FilterList(org.apache.hadoop.hbase.filter.FilterList) Test(org.junit.Test)

Example 17 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class TestScan method testSetAuthorizations.

@Test
public void testSetAuthorizations() {
    Scan scan = new Scan();
    try {
        scan.setAuthorizations(new Authorizations("\u002b|\u0029"));
        scan.setAuthorizations(new Authorizations("A", "B", "0123", "A0", "1A1", "_a"));
        scan.setAuthorizations(new Authorizations("A|B"));
        scan.setAuthorizations(new Authorizations("A&B"));
        scan.setAuthorizations(new Authorizations("!B"));
        scan.setAuthorizations(new Authorizations("A", "(A)"));
        scan.setAuthorizations(new Authorizations("A", "{A"));
        scan.setAuthorizations(new Authorizations(" "));
        scan.setAuthorizations(new Authorizations(":B"));
        scan.setAuthorizations(new Authorizations("-B"));
        scan.setAuthorizations(new Authorizations(".B"));
        scan.setAuthorizations(new Authorizations("/B"));
    } catch (IllegalArgumentException e) {
        fail("should not throw exception");
    }
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Test(org.junit.Test)

Example 18 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class ThriftUtilities method scanFromThrift.

public static Scan scanFromThrift(TScan in) throws IOException {
    Scan out = new Scan();
    if (in.isSetStartRow()) {
        out.withStartRow(in.getStartRow());
    }
    if (in.isSetStopRow()) {
        out.withStopRow(in.getStopRow());
    }
    if (in.isSetCaching()) {
        out.setCaching(in.getCaching());
    }
    if (in.isSetMaxVersions()) {
        out.readVersions(in.getMaxVersions());
    }
    if (in.isSetColumns()) {
        for (TColumn column : in.getColumns()) {
            if (column.isSetQualifier()) {
                out.addColumn(column.getFamily(), column.getQualifier());
            } else {
                out.addFamily(column.getFamily());
            }
        }
    }
    TTimeRange timeRange = in.getTimeRange();
    if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
        out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
    }
    if (in.isSetBatchSize()) {
        out.setBatch(in.getBatchSize());
    }
    if (in.isSetFilterString()) {
        ParseFilter parseFilter = new ParseFilter();
        out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
    }
    if (in.isSetAttributes()) {
        addAttributes(out, in.getAttributes());
    }
    if (in.isSetAuthorizations()) {
        out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
    }
    if (in.isSetReversed()) {
        out.setReversed(in.isReversed());
    }
    if (in.isSetCacheBlocks()) {
        out.setCacheBlocks(in.isCacheBlocks());
    }
    if (in.isSetColFamTimeRangeMap()) {
        Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
        if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
            for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
                out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
            }
        }
    }
    if (in.isSetReadType()) {
        out.setReadType(readTypeFromThrift(in.getReadType()));
    }
    if (in.isSetLimit()) {
        out.setLimit(in.getLimit());
    }
    if (in.isSetConsistency()) {
        out.setConsistency(consistencyFromThrift(in.getConsistency()));
    }
    if (in.isSetTargetReplicaId()) {
        out.setReplicaId(in.getTargetReplicaId());
    }
    if (in.isSetFilterBytes()) {
        out.setFilter(filterFromThrift(in.getFilterBytes()));
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) ByteBuffer(java.nio.ByteBuffer) Map(java.util.Map)

Example 19 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class ThriftUtilities method getFromHBase.

public static TGet getFromHBase(Get in) {
    TGet out = new TGet();
    out.setRow(in.getRow());
    TTimeRange tTimeRange = new TTimeRange();
    tTimeRange.setMaxStamp(in.getTimeRange().getMax()).setMinStamp(in.getTimeRange().getMin());
    out.setTimeRange(tTimeRange);
    out.setMaxVersions(in.getMaxVersions());
    for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
        out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
    }
    try {
        Authorizations authorizations = in.getAuthorizations();
        if (authorizations != null) {
            TAuthorization tAuthorization = new TAuthorization();
            tAuthorization.setLabels(authorizations.getLabels());
            out.setAuthorizations(tAuthorization);
        }
    } catch (DeserializationException e) {
        throw new RuntimeException(e);
    }
    out.setConsistency(consistencyFromHBase(in.getConsistency()));
    out.setTargetReplicaId(in.getReplicaId());
    out.setCacheBlocks(in.getCacheBlocks());
    out.setStoreLimit(in.getMaxResultsPerColumnFamily());
    out.setStoreOffset(in.getRowOffsetPerColumnFamily());
    out.setExistence_only(in.isCheckExistenceOnly());
    for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
        if (family.getValue() != null && !family.getValue().isEmpty()) {
            for (byte[] qualifier : family.getValue()) {
                TColumn column = new TColumn();
                column.setFamily(family.getKey());
                column.setQualifier(qualifier);
                out.addToColumns(column);
            }
        } else {
            TColumn column = new TColumn();
            column.setFamily(family.getKey());
            out.addToColumns(column);
        }
    }
    if (in.getFilter() != null) {
        try {
            out.setFilterBytes(filterFromHBase(in.getFilter()));
        } catch (IOException ioE) {
            throw new RuntimeException(ioE);
        }
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) NavigableSet(java.util.NavigableSet) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TAuthorization(org.apache.hadoop.hbase.thrift2.generated.TAuthorization) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Map(java.util.Map)

Aggregations

Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)19 Scan (org.apache.hadoop.hbase.client.Scan)9 IOException (java.io.IOException)6 Test (org.junit.Test)6 Map (java.util.Map)5 Result (org.apache.hadoop.hbase.client.Result)5 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)5 Table (org.apache.hadoop.hbase.client.Table)5 Permission (org.apache.hadoop.hbase.security.access.Permission)4 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)4 NavigableSet (java.util.NavigableSet)3 Path (org.apache.hadoop.fs.Path)3 Connection (org.apache.hadoop.hbase.client.Connection)3 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)3 FilterList (org.apache.hadoop.hbase.filter.FilterList)3 Delete (org.apache.hadoop.hbase.client.Delete)2 Filter (org.apache.hadoop.hbase.filter.Filter)2 ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)2 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)2 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)2