use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method removeRoleGrants.
/**
* Remove all of the grants for a role. This is not cheap.
* @param roleName Role to remove from all other roles and grants
* @throws IOException
*/
void removeRoleGrants(String roleName) throws IOException {
buildRoleCache();
List<Put> puts = new ArrayList<>();
// First, walk the role table and remove any references to this role
for (Map.Entry<String, HbaseMetastoreProto.RoleGrantInfoList> e : roleCache.entrySet()) {
boolean madeAChange = false;
List<HbaseMetastoreProto.RoleGrantInfo> rgil = new ArrayList<>();
rgil.addAll(e.getValue().getGrantInfoList());
for (int i = 0; i < rgil.size(); i++) {
if (HBaseUtils.convertPrincipalTypes(rgil.get(i).getPrincipalType()) == PrincipalType.ROLE && rgil.get(i).getPrincipalName().equals(roleName)) {
rgil.remove(i);
madeAChange = true;
break;
}
}
if (madeAChange) {
Put put = new Put(HBaseUtils.buildKey(e.getKey()));
HbaseMetastoreProto.RoleGrantInfoList proto = HbaseMetastoreProto.RoleGrantInfoList.newBuilder().addAllGrantInfo(rgil).build();
put.add(CATALOG_CF, ROLES_COL, proto.toByteArray());
puts.add(put);
roleCache.put(e.getKey(), proto);
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(ROLE_TABLE);
htab.put(puts);
conn.flush(htab);
}
// Remove any global privileges held by this role
PrincipalPrivilegeSet global = getGlobalPrivs();
if (global != null && global.getRolePrivileges() != null && global.getRolePrivileges().remove(roleName) != null) {
putGlobalPrivs(global);
}
// Now, walk the db table
puts.clear();
List<Database> dbs = scanDatabases(null);
// rare, but can happen
if (dbs == null)
dbs = new ArrayList<>();
for (Database db : dbs) {
if (db.getPrivileges() != null && db.getPrivileges().getRolePrivileges() != null && db.getPrivileges().getRolePrivileges().remove(roleName) != null) {
byte[][] serialized = HBaseUtils.serializeDatabase(db);
Put put = new Put(serialized[0]);
put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
puts.add(put);
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(DB_TABLE);
htab.put(puts);
conn.flush(htab);
}
// Finally, walk the table table
puts.clear();
for (Database db : dbs) {
List<Table> tables = scanTables(db.getName(), null);
if (tables != null) {
for (Table table : tables) {
if (table.getPrivileges() != null && table.getPrivileges().getRolePrivileges() != null && table.getPrivileges().getRolePrivileges().remove(roleName) != null) {
byte[][] serialized = HBaseUtils.serializeTable(table, HBaseUtils.hashStorageDescriptor(table.getSd(), md));
Put put = new Put(serialized[0]);
put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
puts.add(put);
}
}
}
}
if (puts.size() > 0) {
HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
htab.put(puts);
conn.flush(htab);
}
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method getTables.
/**
* Fetch a list of table objects.
* @param dbName Database that all fetched tables are in
* @param tableNames list of table names
* @return list of tables, in the same order as the provided names.
* @throws IOException
*/
List<Table> getTables(String dbName, List<String> tableNames) throws IOException {
// I could implement getTable in terms of this method. But it is such a core function
// that I don't want to slow it down for the much less common fetching of multiple tables.
List<Table> results = new ArrayList<>(tableNames.size());
ObjectPair<String, String>[] hashKeys = new ObjectPair[tableNames.size()];
boolean atLeastOneMissing = false;
for (int i = 0; i < tableNames.size(); i++) {
hashKeys[i] = new ObjectPair<>(dbName, tableNames.get(i));
// The result may be null, but we still want to add it so that we have a slot in the list
// for it.
results.add(tableCache.get(hashKeys[i]));
if (results.get(i) == null)
atLeastOneMissing = true;
}
if (!atLeastOneMissing)
return results;
// Now build a single get that will fetch the remaining tables
List<Get> gets = new ArrayList<>();
HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
for (int i = 0; i < tableNames.size(); i++) {
if (results.get(i) != null)
continue;
byte[] key = HBaseUtils.buildKey(dbName, tableNames.get(i));
Get g = new Get(key);
g.addColumn(CATALOG_CF, CATALOG_COL);
gets.add(g);
}
Result[] res = htab.get(gets);
for (int i = 0, nextGet = 0; i < tableNames.size(); i++) {
if (results.get(i) != null)
continue;
byte[] serialized = res[nextGet++].getValue(CATALOG_CF, CATALOG_COL);
if (serialized != null) {
HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializeTable(dbName, tableNames.get(i), serialized);
StorageDescriptor sd = getStorageDescriptor(sdParts.sdHash);
HBaseUtils.assembleStorageDescriptor(sd, sdParts);
tableCache.put(hashKeys[i], sdParts.containingTable);
results.set(i, sdParts.containingTable);
}
}
return results;
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseTestSetup method setUpFixtures.
private void setUpFixtures(HiveConf conf) throws Exception {
/* We are not starting zookeeper server here because
* QTestUtil already starts it.
*/
int zkPort = conf.getInt("hive.zookeeper.client.port", -1);
if ((zkPort == zooKeeperPort) && (hbaseCluster != null)) {
return;
}
zooKeeperPort = zkPort;
String tmpdir = System.getProperty("test.tmp.dir");
this.tearDown();
conf.set("hbase.master", "local");
hbaseRoot = "file:///" + tmpdir + "/hbase";
conf.set("hbase.rootdir", hbaseRoot);
conf.set("hbase.zookeeper.property.clientPort", Integer.toString(zooKeeperPort));
Configuration hbaseConf = HBaseConfiguration.create(conf);
hbaseConf.setInt("hbase.master.port", findFreePort());
hbaseConf.setInt("hbase.master.info.port", -1);
hbaseConf.setInt("hbase.regionserver.port", findFreePort());
hbaseConf.setInt("hbase.regionserver.info.port", -1);
hbaseCluster = new MiniHBaseCluster(hbaseConf, NUM_REGIONSERVERS);
conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort());
hbaseConn = HConnectionManager.createConnection(hbaseConf);
// opening the META table ensures that cluster is running
HTableInterface meta = null;
try {
meta = hbaseConn.getTable(TableName.META_TABLE_NAME);
} finally {
if (meta != null)
meta.close();
}
createHBaseTable();
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method read.
private byte[] read(String table, byte[] key, byte[] colFam, byte[] colName) throws IOException {
HTableInterface htab = conn.getHBaseTable(table);
Get g = new Get(key);
g.addColumn(colFam, colName);
Result res = htab.get(g);
return res.getValue(colFam, colName);
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method store.
/**********************************************************************************************
* General access methods
*********************************************************************************************/
private void store(String table, byte[] key, byte[] colFam, byte[] colName, byte[] obj) throws IOException {
HTableInterface htab = conn.getHBaseTable(table);
Put p = new Put(key);
p.add(colFam, colName, obj);
htab.put(p);
conn.flush(htab);
}
Aggregations