Search in sources :

Example 11 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project Gaffer by gchq.

the class TableUtils method deleteAllRows.

public static void deleteAllRows(final HBaseStore store, final String... auths) throws StoreException {
    final Connection connection = store.getConnection();
    try {
        if (connection.getAdmin().tableExists(store.getTableName())) {
            connection.getAdmin().flush(store.getTableName());
            final Table table = connection.getTable(store.getTableName());
            final Scan scan = new Scan();
            scan.setAuthorizations(new Authorizations(auths));
            try (ResultScanner scanner = table.getScanner(scan)) {
                final List<Delete> deletes = new ArrayList<>();
                for (final Result result : scanner) {
                    deletes.add(new Delete(result.getRow()));
                }
                table.delete(deletes);
                connection.getAdmin().flush(store.getTableName());
            }
            try (ResultScanner scanner = table.getScanner(scan)) {
                if (scanner.iterator().hasNext()) {
                    throw new StoreException("Some rows in table " + store.getTableName() + " failed to delete");
                }
            }
        }
    } catch (final IOException e) {
        throw new StoreException("Failed to delete all rows in table " + store.getTableName(), e);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) ValidationResult(uk.gov.gchq.koryphe.ValidationResult) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 12 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class TestImportTSVWithVisibilityLabels method validateTable.

/**
 * Confirm ImportTsv via data in online table.
 */
private static void validateTable(Configuration conf, TableName tableName, String family, int valueMultiplier) throws IOException {
    LOG.debug("Validating table.");
    Table table = util.getConnection().getTable(tableName);
    boolean verified = false;
    long pause = conf.getLong("hbase.client.pause", 5 * 1000);
    int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
    for (int i = 0; i < numRetries; i++) {
        try {
            Scan scan = new Scan();
            // Scan entire family.
            scan.addFamily(Bytes.toBytes(family));
            scan.setAuthorizations(new Authorizations("secret", "private"));
            ResultScanner resScanner = table.getScanner(scan);
            Result[] next = resScanner.next(5);
            assertEquals(1, next.length);
            for (Result res : resScanner) {
                LOG.debug("Getting results " + res.size());
                assertTrue(res.size() == 2);
                List<Cell> kvs = res.listCells();
                assertTrue(CellUtil.matchingRows(kvs.get(0), Bytes.toBytes("KEY")));
                assertTrue(CellUtil.matchingRows(kvs.get(1), Bytes.toBytes("KEY")));
                assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
                assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
            // Only one result set is expected, so let it loop.
            }
            verified = true;
            break;
        } catch (NullPointerException e) {
        // If here, a cell was empty. Presume its because updates came in
        // after the scanner had been opened. Wait a while and retry.
        }
        try {
            Thread.sleep(pause);
        } catch (InterruptedException e) {
        // continue
        }
    }
    table.close();
    assertTrue(verified);
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Result(org.apache.hadoop.hbase.client.Result) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Example 13 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class ScannerModel method fromScan.

/**
 * @param scan the scan specification
 * @throws Exception
 */
public static ScannerModel fromScan(Scan scan) throws Exception {
    ScannerModel model = new ScannerModel();
    model.setStartRow(scan.getStartRow());
    model.setEndRow(scan.getStopRow());
    Map<byte[], NavigableSet<byte[]>> families = scan.getFamilyMap();
    if (families != null) {
        for (Map.Entry<byte[], NavigableSet<byte[]>> entry : families.entrySet()) {
            if (entry.getValue() != null) {
                for (byte[] qualifier : entry.getValue()) {
                    model.addColumn(Bytes.add(entry.getKey(), COLUMN_DIVIDER, qualifier));
                }
            } else {
                model.addColumn(entry.getKey());
            }
        }
    }
    model.setStartTime(scan.getTimeRange().getMin());
    model.setEndTime(scan.getTimeRange().getMax());
    int caching = scan.getCaching();
    if (caching > 0) {
        model.setCaching(caching);
    }
    int batch = scan.getBatch();
    if (batch > 0) {
        model.setBatch(batch);
    }
    int maxVersions = scan.getMaxVersions();
    if (maxVersions > 0) {
        model.setMaxVersions(maxVersions);
    }
    if (scan.getLimit() > 0) {
        model.setLimit(scan.getLimit());
    }
    Filter filter = scan.getFilter();
    if (filter != null) {
        model.setFilter(stringifyFilter(filter));
    }
    // Add the visbility labels if found in the attributes
    Authorizations authorizations = scan.getAuthorizations();
    if (authorizations != null) {
        List<String> labels = authorizations.getLabels();
        for (String label : labels) {
            model.addLabel(label);
        }
    }
    return model;
}
Also used : NavigableSet(java.util.NavigableSet) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) InclusiveStopFilter(org.apache.hadoop.hbase.filter.InclusiveStopFilter) RandomRowFilter(org.apache.hadoop.hbase.filter.RandomRowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) ColumnCountGetFilter(org.apache.hadoop.hbase.filter.ColumnCountGetFilter) SingleColumnValueExcludeFilter(org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter) WhileMatchFilter(org.apache.hadoop.hbase.filter.WhileMatchFilter) DependentColumnFilter(org.apache.hadoop.hbase.filter.DependentColumnFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) KeyOnlyFilter(org.apache.hadoop.hbase.filter.KeyOnlyFilter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) ColumnPrefixFilter(org.apache.hadoop.hbase.filter.ColumnPrefixFilter) ColumnPaginationFilter(org.apache.hadoop.hbase.filter.ColumnPaginationFilter) MultiRowRangeFilter(org.apache.hadoop.hbase.filter.MultiRowRangeFilter) ValueFilter(org.apache.hadoop.hbase.filter.ValueFilter) SkipFilter(org.apache.hadoop.hbase.filter.SkipFilter) TimestampsFilter(org.apache.hadoop.hbase.filter.TimestampsFilter) ColumnRangeFilter(org.apache.hadoop.hbase.filter.ColumnRangeFilter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) MultipleColumnPrefixFilter(org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) Map(java.util.Map)

Example 14 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class DefaultVisibilityExpressionResolver method init.

@Override
public void init() {
    // Reading all the labels and ordinal.
    // This scan should be done by user with global_admin privileges.. Ensure that it works
    Table labelsTable = null;
    Connection connection = null;
    try {
        connection = ConnectionFactory.createConnection(conf);
        try {
            labelsTable = connection.getTable(LABELS_TABLE_NAME);
        } catch (IOException e) {
            LOG.error("Error opening 'labels' table", e);
            return;
        }
        Scan scan = new Scan();
        scan.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL));
        scan.addColumn(LABELS_TABLE_FAMILY, LABEL_QUALIFIER);
        ResultScanner scanner = null;
        try {
            scanner = labelsTable.getScanner(scan);
            Result next = null;
            while ((next = scanner.next()) != null) {
                byte[] row = next.getRow();
                byte[] value = next.getValue(LABELS_TABLE_FAMILY, LABEL_QUALIFIER);
                labels.put(Bytes.toString(value), Bytes.toInt(row));
            }
        } catch (TableNotFoundException e) {
            // Table not found. So just return
            return;
        } catch (IOException e) {
            LOG.error("Error scanning 'labels' table", e);
        } finally {
            if (scanner != null)
                scanner.close();
        }
    } catch (IOException ioe) {
        LOG.error("Failed reading 'labels' tags", ioe);
        return;
    } finally {
        if (labelsTable != null) {
            try {
                labelsTable.close();
            } catch (IOException ioe) {
                LOG.warn("Error closing 'labels' table", ioe);
            }
        }
        if (connection != null)
            try {
                connection.close();
            } catch (IOException ioe) {
                LOG.warn("Failed close of temporary connection", ioe);
            }
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result)

Example 15 with Authorizations

use of org.apache.hadoop.hbase.security.visibility.Authorizations in project hbase by apache.

the class ExportUtils method getScanFromCommandLine.

static Scan getScanFromCommandLine(Configuration conf, String[] args) throws IOException {
    Scan s = new Scan();
    // Optional arguments.
    // Set Scan Versions
    int versions = args.length > 2 ? Integer.parseInt(args[2]) : 1;
    s.readVersions(versions);
    // Set Scan Range
    long startTime = args.length > 3 ? Long.parseLong(args[3]) : 0L;
    long endTime = args.length > 4 ? Long.parseLong(args[4]) : Long.MAX_VALUE;
    s.setTimeRange(startTime, endTime);
    // Set cache blocks
    s.setCacheBlocks(false);
    // set Start and Stop row
    if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
        s.withStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
    }
    if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
        s.withStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP)));
    }
    // Set Scan Column Family
    boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
    if (raw) {
        s.setRaw(raw);
    }
    for (String columnFamily : conf.getTrimmedStrings(TableInputFormat.SCAN_COLUMN_FAMILY)) {
        s.addFamily(Bytes.toBytes(columnFamily));
    }
    // Set RowFilter or Prefix Filter if applicable.
    Filter exportFilter = getExportFilter(args);
    if (exportFilter != null) {
        LOG.info("Setting Scan Filter for Export.");
        s.setFilter(exportFilter);
    }
    List<String> labels = null;
    if (conf.get(EXPORT_VISIBILITY_LABELS) != null) {
        labels = Arrays.asList(conf.getStrings(EXPORT_VISIBILITY_LABELS));
        if (!labels.isEmpty()) {
            s.setAuthorizations(new Authorizations(labels));
        }
    }
    int batching = conf.getInt(EXPORT_BATCHING, -1);
    if (batching != -1) {
        try {
            s.setBatch(batching);
        } catch (IncompatibleFilterException e) {
            LOG.error("Batching could not be set", e);
        }
    }
    int caching = conf.getInt(EXPORT_CACHING, 100);
    if (caching != -1) {
        try {
            s.setCaching(caching);
        } catch (IncompatibleFilterException e) {
            LOG.error("Caching could not be set", e);
        }
    }
    LOG.info("versions=" + versions + ", starttime=" + startTime + ", endtime=" + endTime + ", keepDeletedCells=" + raw + ", visibility labels=" + labels);
    return s;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) IncompatibleFilterException(org.apache.hadoop.hbase.filter.IncompatibleFilterException)

Aggregations

Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)19 Scan (org.apache.hadoop.hbase.client.Scan)9 IOException (java.io.IOException)6 Test (org.junit.Test)6 Map (java.util.Map)5 Result (org.apache.hadoop.hbase.client.Result)5 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)5 Table (org.apache.hadoop.hbase.client.Table)5 Permission (org.apache.hadoop.hbase.security.access.Permission)4 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)4 NavigableSet (java.util.NavigableSet)3 Path (org.apache.hadoop.fs.Path)3 Connection (org.apache.hadoop.hbase.client.Connection)3 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)3 FilterList (org.apache.hadoop.hbase.filter.FilterList)3 Delete (org.apache.hadoop.hbase.client.Delete)2 Filter (org.apache.hadoop.hbase.filter.Filter)2 ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)2 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)2 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)2