use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class HBaseReadWrite method printRolesForUsers.
List<String> printRolesForUsers(String regex) throws IOException {
Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
Iterator<Result> iter = scan(USER_TO_ROLE_TABLE, null, null, CATALOG_CF, CATALOG_COL, filter);
List<String> lines = new ArrayList<>();
while (iter.hasNext()) {
Result result = iter.next();
lines.add(new String(result.getRow(), HBaseUtils.ENCODING) + ": " + org.apache.commons.lang.StringUtils.join(HBaseUtils.deserializeRoleList(result.getValue(CATALOG_CF, CATALOG_COL)), ','));
}
if (lines.size() == 0)
lines = noMatch(regex, "user");
return lines;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class HBaseReadWrite method printTables.
/**
* Print tables
* @param regex to use to find the tables. Remember that dbname is in each
* table name.
* @return tables as strings
* @throws IOException
* @throws TException
*/
List<String> printTables(String regex) throws IOException, TException {
Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
@SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
Scan scan = new Scan();
scan.addColumn(CATALOG_CF, CATALOG_COL);
scan.addFamily(STATS_CF);
scan.setFilter(filter);
Iterator<Result> iter = htab.getScanner(scan).iterator();
if (!iter.hasNext())
return noMatch(regex, "table");
List<String> lines = new ArrayList<>();
while (iter.hasNext()) {
lines.add(printOneTable(iter.next()));
}
return lines;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class HBaseReadWrite method scanPartitionsInternal.
private PartitionScanInfo scanPartitionsInternal(String dbName, String tableName, List<String> partVals, int maxPartitions) throws IOException, NoSuchObjectException {
// First, build as much of the key as we can so that we make the scan as tight as possible.
List<String> keyElements = new ArrayList<>();
keyElements.add(dbName);
keyElements.add(tableName);
int firstStar = -1;
for (int i = 0; i < partVals.size(); i++) {
if ("*".equals(partVals.get(i))) {
firstStar = i;
break;
} else {
// means star
if (partVals.get(i).equals("")) {
break;
} else {
keyElements.add(partVals.get(i));
}
}
}
byte[] keyPrefix;
// We need to fetch the table to determine if the user fully specified the partitions or
// not, as it affects how we build the key.
Table table = getTable(dbName, tableName);
if (table == null) {
throw new NoSuchObjectException("Unable to find table " + dbName + "." + tableName);
}
keyPrefix = HBaseUtils.buildPartitionKey(dbName, tableName, HBaseUtils.getPartitionKeyTypes(table.getPartitionKeys().subList(0, keyElements.size() - 2)), keyElements.subList(2, keyElements.size()));
// Now, build a filter out of the remaining keys
List<PartitionKeyComparator.Range> ranges = new ArrayList<PartitionKeyComparator.Range>();
List<Operator> ops = new ArrayList<Operator>();
if (!(partVals.size() == table.getPartitionKeys().size() && firstStar == -1)) {
for (int i = Math.max(0, firstStar); i < table.getPartitionKeys().size() && i < partVals.size(); i++) {
if ("*".equals(partVals.get(i))) {
PartitionKeyComparator.Operator op = new PartitionKeyComparator.Operator(PartitionKeyComparator.Operator.Type.LIKE, table.getPartitionKeys().get(i).getName(), ".*");
ops.add(op);
} else {
PartitionKeyComparator.Range range = new PartitionKeyComparator.Range(table.getPartitionKeys().get(i).getName(), new PartitionKeyComparator.Mark(partVals.get(i), true), new PartitionKeyComparator.Mark(partVals.get(i), true));
ranges.add(range);
}
}
}
Filter filter = null;
if (!ranges.isEmpty() || !ops.isEmpty()) {
filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new PartitionKeyComparator(StringUtils.join(HBaseUtils.getPartitionNames(table.getPartitionKeys()), ","), StringUtils.join(HBaseUtils.getPartitionKeyTypes(table.getPartitionKeys()), ","), ranges, ops));
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning partitions with prefix <" + new String(keyPrefix) + "> and filter <" + filter + ">");
}
return new PartitionScanInfo(dbName, tableName, keyPrefix, HBaseUtils.getEndPrefix(keyPrefix), maxPartitions, filter);
}
use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class HBaseReadWrite method scanRoles.
private List<Role> scanRoles(String regex) throws IOException {
Filter filter = null;
if (regex != null) {
filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
}
Iterator<Result> iter = scan(ROLE_TABLE, null, null, CATALOG_CF, CATALOG_COL, filter);
List<Role> roles = new ArrayList<>();
while (iter.hasNext()) {
Result result = iter.next();
roles.add(HBaseUtils.deserializeRole(result.getRow(), result.getValue(CATALOG_CF, CATALOG_COL)));
}
return roles;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class HBaseReadWrite method scanDatabases.
/**
* Get a list of databases.
* @param regex Regular expression to use in searching for database names. It is expected to
* be a Java regular expression. If it is null then all databases will be returned.
* @return list of databases matching the regular expression.
* @throws IOException
*/
List<Database> scanDatabases(String regex) throws IOException {
Filter filter = null;
if (regex != null) {
filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex));
}
Iterator<Result> iter = scan(DB_TABLE, CATALOG_CF, CATALOG_COL, filter);
List<Database> databases = new ArrayList<>();
while (iter.hasNext()) {
Result result = iter.next();
databases.add(HBaseUtils.deserializeDatabase(result.getRow(), result.getValue(CATALOG_CF, CATALOG_COL)));
}
return databases;
}
Aggregations