Search in sources :

Example 1 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method removeRoleGrants.

/**
   * Remove all of the grants for a role.  This is not cheap.
   * @param roleName Role to remove from all other roles and grants
   * @throws IOException
   */
void removeRoleGrants(String roleName) throws IOException {
    buildRoleCache();
    List<Put> puts = new ArrayList<>();
    // First, walk the role table and remove any references to this role
    for (Map.Entry<String, HbaseMetastoreProto.RoleGrantInfoList> e : roleCache.entrySet()) {
        boolean madeAChange = false;
        List<HbaseMetastoreProto.RoleGrantInfo> rgil = new ArrayList<>();
        rgil.addAll(e.getValue().getGrantInfoList());
        for (int i = 0; i < rgil.size(); i++) {
            if (HBaseUtils.convertPrincipalTypes(rgil.get(i).getPrincipalType()) == PrincipalType.ROLE && rgil.get(i).getPrincipalName().equals(roleName)) {
                rgil.remove(i);
                madeAChange = true;
                break;
            }
        }
        if (madeAChange) {
            Put put = new Put(HBaseUtils.buildKey(e.getKey()));
            HbaseMetastoreProto.RoleGrantInfoList proto = HbaseMetastoreProto.RoleGrantInfoList.newBuilder().addAllGrantInfo(rgil).build();
            put.add(CATALOG_CF, ROLES_COL, proto.toByteArray());
            puts.add(put);
            roleCache.put(e.getKey(), proto);
        }
    }
    if (puts.size() > 0) {
        HTableInterface htab = conn.getHBaseTable(ROLE_TABLE);
        htab.put(puts);
        conn.flush(htab);
    }
    // Remove any global privileges held by this role
    PrincipalPrivilegeSet global = getGlobalPrivs();
    if (global != null && global.getRolePrivileges() != null && global.getRolePrivileges().remove(roleName) != null) {
        putGlobalPrivs(global);
    }
    // Now, walk the db table
    puts.clear();
    List<Database> dbs = scanDatabases(null);
    // rare, but can happen
    if (dbs == null)
        dbs = new ArrayList<>();
    for (Database db : dbs) {
        if (db.getPrivileges() != null && db.getPrivileges().getRolePrivileges() != null && db.getPrivileges().getRolePrivileges().remove(roleName) != null) {
            byte[][] serialized = HBaseUtils.serializeDatabase(db);
            Put put = new Put(serialized[0]);
            put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
            puts.add(put);
        }
    }
    if (puts.size() > 0) {
        HTableInterface htab = conn.getHBaseTable(DB_TABLE);
        htab.put(puts);
        conn.flush(htab);
    }
    // Finally, walk the table table
    puts.clear();
    for (Database db : dbs) {
        List<Table> tables = scanTables(db.getName(), null);
        if (tables != null) {
            for (Table table : tables) {
                if (table.getPrivileges() != null && table.getPrivileges().getRolePrivileges() != null && table.getPrivileges().getRolePrivileges().remove(roleName) != null) {
                    byte[][] serialized = HBaseUtils.serializeTable(table, HBaseUtils.hashStorageDescriptor(table.getSd(), md));
                    Put put = new Put(serialized[0]);
                    put.add(CATALOG_CF, CATALOG_COL, serialized[1]);
                    puts.add(put);
                }
            }
        }
    }
    if (puts.size() > 0) {
        HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
        htab.put(puts);
        conn.flush(htab);
    }
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) ArrayList(java.util.ArrayList) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put) Database(org.apache.hadoop.hive.metastore.api.Database) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap)

Example 2 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method getTables.

/**
   * Fetch a list of table objects.
   * @param dbName Database that all fetched tables are in
   * @param tableNames list of table names
   * @return list of tables, in the same order as the provided names.
   * @throws IOException
   */
List<Table> getTables(String dbName, List<String> tableNames) throws IOException {
    // I could implement getTable in terms of this method.  But it is such a core function
    // that I don't want to slow it down for the much less common fetching of multiple tables.
    List<Table> results = new ArrayList<>(tableNames.size());
    ObjectPair<String, String>[] hashKeys = new ObjectPair[tableNames.size()];
    boolean atLeastOneMissing = false;
    for (int i = 0; i < tableNames.size(); i++) {
        hashKeys[i] = new ObjectPair<>(dbName, tableNames.get(i));
        // The result may be null, but we still want to add it so that we have a slot in the list
        // for it.
        results.add(tableCache.get(hashKeys[i]));
        if (results.get(i) == null)
            atLeastOneMissing = true;
    }
    if (!atLeastOneMissing)
        return results;
    // Now build a single get that will fetch the remaining tables
    List<Get> gets = new ArrayList<>();
    HTableInterface htab = conn.getHBaseTable(TABLE_TABLE);
    for (int i = 0; i < tableNames.size(); i++) {
        if (results.get(i) != null)
            continue;
        byte[] key = HBaseUtils.buildKey(dbName, tableNames.get(i));
        Get g = new Get(key);
        g.addColumn(CATALOG_CF, CATALOG_COL);
        gets.add(g);
    }
    Result[] res = htab.get(gets);
    for (int i = 0, nextGet = 0; i < tableNames.size(); i++) {
        if (results.get(i) != null)
            continue;
        byte[] serialized = res[nextGet++].getValue(CATALOG_CF, CATALOG_COL);
        if (serialized != null) {
            HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializeTable(dbName, tableNames.get(i), serialized);
            StorageDescriptor sd = getStorageDescriptor(sdParts.sdHash);
            HBaseUtils.assembleStorageDescriptor(sd, sdParts);
            tableCache.put(hashKeys[i], sdParts.containingTable);
            results.set(i, sdParts.containingTable);
        }
    }
    return results;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result) Get(org.apache.hadoop.hbase.client.Get) ObjectPair(org.apache.hadoop.hive.common.ObjectPair)

Example 3 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseTestSetup method setUpFixtures.

private void setUpFixtures(HiveConf conf) throws Exception {
    /* We are not starting zookeeper server here because
     * QTestUtil already starts it.
     */
    int zkPort = conf.getInt("hive.zookeeper.client.port", -1);
    if ((zkPort == zooKeeperPort) && (hbaseCluster != null)) {
        return;
    }
    zooKeeperPort = zkPort;
    String tmpdir = System.getProperty("test.tmp.dir");
    this.tearDown();
    conf.set("hbase.master", "local");
    hbaseRoot = "file:///" + tmpdir + "/hbase";
    conf.set("hbase.rootdir", hbaseRoot);
    conf.set("hbase.zookeeper.property.clientPort", Integer.toString(zooKeeperPort));
    Configuration hbaseConf = HBaseConfiguration.create(conf);
    hbaseConf.setInt("hbase.master.port", findFreePort());
    hbaseConf.setInt("hbase.master.info.port", -1);
    hbaseConf.setInt("hbase.regionserver.port", findFreePort());
    hbaseConf.setInt("hbase.regionserver.info.port", -1);
    hbaseCluster = new MiniHBaseCluster(hbaseConf, NUM_REGIONSERVERS);
    conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort());
    hbaseConn = HConnectionManager.createConnection(hbaseConf);
    // opening the META table ensures that cluster is running
    HTableInterface meta = null;
    try {
        meta = hbaseConn.getTable(TableName.META_TABLE_NAME);
    } finally {
        if (meta != null)
            meta.close();
    }
    createHBaseTable();
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) MiniHBaseCluster(org.apache.hadoop.hbase.MiniHBaseCluster) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface)

Example 4 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method read.

private byte[] read(String table, byte[] key, byte[] colFam, byte[] colName) throws IOException {
    HTableInterface htab = conn.getHBaseTable(table);
    Get g = new Get(key);
    g.addColumn(colFam, colName);
    Result res = htab.get(g);
    return res.getValue(colFam, colName);
}
Also used : Get(org.apache.hadoop.hbase.client.Get) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 5 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.

the class HBaseReadWrite method store.

/**********************************************************************************************
   * General access methods
   *********************************************************************************************/
private void store(String table, byte[] key, byte[] colFam, byte[] colName, byte[] obj) throws IOException {
    HTableInterface htab = conn.getHBaseTable(table);
    Put p = new Put(key);
    p.add(colFam, colName, obj);
    htab.put(p);
    conn.flush(htab);
}
Also used : HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)117 Result (org.apache.hadoop.hbase.client.Result)43 Put (org.apache.hadoop.hbase.client.Put)41 IOException (java.io.IOException)36 ArrayList (java.util.ArrayList)26 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)23 Get (org.apache.hadoop.hbase.client.Get)21 Scan (org.apache.hadoop.hbase.client.Scan)21 Test (org.junit.Test)20 SQLException (java.sql.SQLException)19 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)17 Connection (java.sql.Connection)15 HashMap (java.util.HashMap)15 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)13 Delete (org.apache.hadoop.hbase.client.Delete)12 Mutation (org.apache.hadoop.hbase.client.Mutation)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)11 ResultSet (java.sql.ResultSet)10 Configuration (org.apache.hadoop.conf.Configuration)9 ConnectionQueryServices (org.apache.phoenix.query.ConnectionQueryServices)9