Search in sources :

Example 1 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.

the class HBaseReadWrite method printRolesForUsers.

List<String> printRolesForUsers(String regex) throws IOException {
    Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    Iterator<Result> iter = scan(USER_TO_ROLE_TABLE, null, null, CATALOG_CF, CATALOG_COL, filter);
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        Result result = iter.next();
        lines.add(new String(result.getRow(), HBaseUtils.ENCODING) + ": " + org.apache.commons.lang.StringUtils.join(HBaseUtils.deserializeRoleList(result.getValue(CATALOG_CF, CATALOG_COL)), ','));
    }
    if (lines.size() == 0)
        lines = noMatch(regex, "user");
    return lines;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Result(org.apache.hadoop.hbase.client.Result)

Example 2 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.

the class HBaseReadWrite method printTables.

/**
   * Print tables
   * @param regex to use to find the tables.  Remember that dbname is in each
   *              table name.
   * @return tables as strings
   * @throws IOException
   * @throws TException
   */
List<String> printTables(String regex) throws IOException, TException {
    Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    @SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
    Scan scan = new Scan();
    scan.addColumn(CATALOG_CF, CATALOG_COL);
    scan.addFamily(STATS_CF);
    scan.setFilter(filter);
    Iterator<Result> iter = htab.getScanner(scan).iterator();
    if (!iter.hasNext())
        return noMatch(regex, "table");
    List<String> lines = new ArrayList<>();
    while (iter.hasNext()) {
        lines.add(printOneTable(iter.next()));
    }
    return lines;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.

the class HBaseReadWrite method scanPartitionsInternal.

private PartitionScanInfo scanPartitionsInternal(String dbName, String tableName, List<String> partVals, int maxPartitions) throws IOException, NoSuchObjectException {
    // First, build as much of the key as we can so that we make the scan as tight as possible.
    List<String> keyElements = new ArrayList<>();
    keyElements.add(dbName);
    keyElements.add(tableName);
    int firstStar = -1;
    for (int i = 0; i < partVals.size(); i++) {
        if ("*".equals(partVals.get(i))) {
            firstStar = i;
            break;
        } else {
            // means star
            if (partVals.get(i).equals("")) {
                break;
            } else {
                keyElements.add(partVals.get(i));
            }
        }
    }
    byte[] keyPrefix;
    // We need to fetch the table to determine if the user fully specified the partitions or
    // not, as it affects how we build the key.
    Table table = getTable(dbName, tableName);
    if (table == null) {
        throw new NoSuchObjectException("Unable to find table " + dbName + "." + tableName);
    }
    keyPrefix = HBaseUtils.buildPartitionKey(dbName, tableName, HBaseUtils.getPartitionKeyTypes(table.getPartitionKeys().subList(0, keyElements.size() - 2)), keyElements.subList(2, keyElements.size()));
    // Now, build a filter out of the remaining keys
    List<PartitionKeyComparator.Range> ranges = new ArrayList<PartitionKeyComparator.Range>();
    List<Operator> ops = new ArrayList<Operator>();
    if (!(partVals.size() == table.getPartitionKeys().size() && firstStar == -1)) {
        for (int i = Math.max(0, firstStar); i < table.getPartitionKeys().size() && i < partVals.size(); i++) {
            if ("*".equals(partVals.get(i))) {
                PartitionKeyComparator.Operator op = new PartitionKeyComparator.Operator(PartitionKeyComparator.Operator.Type.LIKE, table.getPartitionKeys().get(i).getName(), ".*");
                ops.add(op);
            } else {
                PartitionKeyComparator.Range range = new PartitionKeyComparator.Range(table.getPartitionKeys().get(i).getName(), new PartitionKeyComparator.Mark(partVals.get(i), true), new PartitionKeyComparator.Mark(partVals.get(i), true));
                ranges.add(range);
            }
        }
    }
    Filter filter = null;
    if (!ranges.isEmpty() || !ops.isEmpty()) {
        filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new PartitionKeyComparator(StringUtils.join(HBaseUtils.getPartitionNames(table.getPartitionKeys()), ","), StringUtils.join(HBaseUtils.getPartitionKeyTypes(table.getPartitionKeys()), ","), ranges, ops));
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Scanning partitions with prefix <" + new String(keyPrefix) + "> and filter <" + filter + ">");
    }
    return new PartitionScanInfo(dbName, tableName, keyPrefix, HBaseUtils.getEndPrefix(keyPrefix), maxPartitions, filter);
}
Also used : Operator(org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator) Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Operator(org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator)

Example 4 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.

the class HBaseReadWrite method scanRoles.

private List<Role> scanRoles(String regex) throws IOException {
    Filter filter = null;
    if (regex != null) {
        filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    }
    Iterator<Result> iter = scan(ROLE_TABLE, null, null, CATALOG_CF, CATALOG_COL, filter);
    List<Role> roles = new ArrayList<>();
    while (iter.hasNext()) {
        Result result = iter.next();
        roles.add(HBaseUtils.deserializeRole(result.getRow(), result.getValue(CATALOG_CF, CATALOG_COL)));
    }
    return roles;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) Role(org.apache.hadoop.hive.metastore.api.Role) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Result(org.apache.hadoop.hbase.client.Result)

Example 5 with RowFilter

use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.

the class HBaseReadWrite method scanDatabases.

/**
   * Get a list of databases.
   * @param regex Regular expression to use in searching for database names.  It is expected to
   *              be a Java regular expression.  If it is null then all databases will be returned.
   * @return list of databases matching the regular expression.
   * @throws IOException
   */
List<Database> scanDatabases(String regex) throws IOException {
    Filter filter = null;
    if (regex != null) {
        filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
    }
    Iterator<Result> iter = scan(DB_TABLE, CATALOG_CF, CATALOG_COL, filter);
    List<Database> databases = new ArrayList<>();
    while (iter.hasNext()) {
        Result result = iter.next();
        databases.add(HBaseUtils.deserializeDatabase(result.getRow(), result.getValue(CATALOG_CF, CATALOG_COL)));
    }
    return databases;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) BloomFilter(org.apache.hive.common.util.BloomFilter) ArrayList(java.util.ArrayList) Database(org.apache.hadoop.hive.metastore.api.Database) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

RowFilter (org.apache.hadoop.hbase.filter.RowFilter)39 Filter (org.apache.hadoop.hbase.filter.Filter)31 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)20 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)17 ArrayList (java.util.ArrayList)15 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)13 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)12 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)11 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)11 Scan (org.apache.hadoop.hbase.client.Scan)10 Test (org.junit.Test)10 Result (org.apache.hadoop.hbase.client.Result)9 BloomFilter (org.apache.hive.common.util.BloomFilter)8 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 HBaseScanSpec (org.apache.drill.exec.store.hbase.HBaseScanSpec)5 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)5 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)5 FilterList (org.apache.hadoop.hbase.filter.FilterList)5 NullComparator (org.apache.hadoop.hbase.filter.NullComparator)5 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)4