Search in sources :

Example 6 with PrivilegeBag

use of org.apache.hadoop.hive.metastore.api.PrivilegeBag in project hive by apache.

the class AbstractTestAuthorizationApiAuthorizer method testRevokePriv.

@Test
public void testRevokePriv() throws Exception {
    FunctionInvoker invoker = new FunctionInvoker() {

        @Override
        public void invoke() throws Exception {
            msc.revoke_privileges(new PrivilegeBag(new ArrayList<HiveObjectPrivilege>()), false);
        }
    };
    testFunction(invoker);
}
Also used : PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 7 with PrivilegeBag

use of org.apache.hadoop.hive.metastore.api.PrivilegeBag in project hive by apache.

the class TestHBaseStoreIntegration method listGlobalGrants.

@Test
public void listGlobalGrants() throws Exception {
    String[] roleNames = new String[] { "lgg_role1", "lgg_role2" };
    String[] userNames = new String[] { "merry", "pippen" };
    store.addRole(roleNames[0], "me");
    store.addRole(roleNames[1], "me");
    int now = (int) (System.currentTimeMillis() / 1000);
    Role role1 = store.getRole(roleNames[0]);
    Role role2 = store.getRole(roleNames[1]);
    store.grantRole(role1, userNames[0], PrincipalType.USER, "bob", PrincipalType.USER, false);
    store.grantRole(role1, roleNames[1], PrincipalType.ROLE, "admin", PrincipalType.ROLE, true);
    store.grantRole(role2, userNames[1], PrincipalType.USER, "bob", PrincipalType.USER, false);
    List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
    HiveObjectRef hiveObjRef = new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null);
    PrivilegeGrantInfo grantInfo = new PrivilegeGrantInfo("read", now, "me", PrincipalType.USER, false);
    HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, userNames[0], PrincipalType.USER, grantInfo);
    privileges.add(hop);
    grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo);
    privileges.add(hop);
    PrivilegeBag pBag = new PrivilegeBag(privileges);
    store.grantPrivileges(pBag);
    List<HiveObjectPrivilege> hops = store.listPrincipalGlobalGrants(roleNames[0], PrincipalType.ROLE);
    Assert.assertEquals(1, hops.size());
    Assert.assertEquals(PrincipalType.ROLE, hops.get(0).getPrincipalType());
    Assert.assertEquals(HiveObjectType.GLOBAL, hops.get(0).getHiveObject().getObjectType());
    Assert.assertEquals("write", hops.get(0).getGrantInfo().getPrivilege());
    hops = store.listPrincipalGlobalGrants(userNames[0], PrincipalType.USER);
    Assert.assertEquals(1, hops.size());
    Assert.assertEquals(PrincipalType.USER, hops.get(0).getPrincipalType());
    Assert.assertEquals(HiveObjectType.GLOBAL, hops.get(0).getHiveObject().getObjectType());
    Assert.assertEquals("read", hops.get(0).getGrantInfo().getPrivilege());
    hops = store.listPrincipalGlobalGrants(roleNames[1], PrincipalType.ROLE);
    Assert.assertEquals(0, hops.size());
    hops = store.listPrincipalGlobalGrants(userNames[1], PrincipalType.USER);
    Assert.assertEquals(0, hops.size());
    hops = store.listGlobalGrantsAll();
    Assert.assertEquals(2, hops.size());
    boolean sawUser = false, sawRole = false;
    for (HiveObjectPrivilege h : hops) {
        if (h.getPrincipalName().equals(userNames[0])) {
            Assert.assertEquals(PrincipalType.USER, h.getPrincipalType());
            Assert.assertEquals(HiveObjectType.GLOBAL, h.getHiveObject().getObjectType());
            Assert.assertEquals("read", h.getGrantInfo().getPrivilege());
            sawUser = true;
        } else if (h.getPrincipalName().equals(roleNames[0])) {
            Assert.assertEquals(PrincipalType.ROLE, h.getPrincipalType());
            Assert.assertEquals(HiveObjectType.GLOBAL, h.getHiveObject().getObjectType());
            Assert.assertEquals("write", h.getGrantInfo().getPrivilege());
            sawRole = true;
        }
    }
    Assert.assertTrue(sawUser && sawRole);
}
Also used : Role(org.apache.hadoop.hive.metastore.api.Role) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 8 with PrivilegeBag

use of org.apache.hadoop.hive.metastore.api.PrivilegeBag in project hive by apache.

the class TestHBaseSchemaTool method oneMondoTest.

@Test
public void oneMondoTest() throws Exception {
    // This is a pain to do in one big test, but we have to control the order so that we have tests
    // without dbs, etc.
    HBaseSchemaTool tool = new HBaseSchemaTool();
    ByteArrayOutputStream outStr = new ByteArrayOutputStream();
    PrintStream out = new PrintStream(outStr);
    ByteArrayOutputStream errStr = new ByteArrayOutputStream();
    PrintStream err = new PrintStream(errStr);
    // This needs to be up front before we create any tables or partitions
    tool.go(false, HBaseReadWrite.SD_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("No storage descriptors" + lsep, outStr.toString());
    // This one needs to be up front too
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SEQUENCES_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("No sequences" + lsep, outStr.toString());
    // Create some databases
    String[] dbNames = new String[3];
    for (int i = 0; i < dbNames.length; i++) {
        dbNames[i] = "db" + i;
        Database db = new Database(dbNames[i], "no description", "file:///tmp", emptyParameters);
        store.createDatabase(db);
    }
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.DB_TABLE, "db0", null, conf, out, err);
    Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.DB_TABLE, null, ".*", conf, out, err);
    Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db1\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db2\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.DB_TABLE, null, "db[12]", conf, out, err);
    Assert.assertEquals("{\"name\":\"db1\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db2\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
    String[] roleNames = new String[2];
    for (int i = 0; i < roleNames.length; i++) {
        roleNames[i] = "role" + i;
        store.addRole(roleNames[i], "me");
    }
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.ROLE_TABLE, null, "role.", conf, out, err);
    Assert.assertEquals("{\"roleName\":\"role0\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep + "{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep, outStr.toString().replaceAll("createTime\":[0-9]+", "createTime\":now"));
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.ROLE_TABLE, "role1", null, conf, out, err);
    Assert.assertEquals("{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep, outStr.toString().replaceAll("createTime\":[0-9]+", "createTime\":now"));
    Role role1 = store.getRole("role1");
    store.grantRole(role1, "fred", PrincipalType.USER, "me", PrincipalType.USER, false);
    store.grantRole(role1, "joanne", PrincipalType.USER, "me", PrincipalType.USER, false);
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.USER_TO_ROLE_TABLE, null, ".*", conf, out, err);
    Assert.assertEquals("fred: role1" + lsep + "joanne: role1" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.USER_TO_ROLE_TABLE, "joanne", null, conf, out, err);
    Assert.assertEquals("role1" + lsep, outStr.toString());
    String[] funcNames = new String[3];
    for (int i = 0; i < funcNames.length; i++) {
        funcNames[i] = "func" + i;
        Function function = new Function(funcNames[i], "db1", "Function", "me", PrincipalType.USER, 0, FunctionType.JAVA, null);
        store.createFunction(function);
    }
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.FUNC_TABLE, "db1.func0", null, conf, out, err);
    Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.FUNC_TABLE, null, ".*", conf, out, err);
    Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func1\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func2\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.FUNC_TABLE, null, "db1.func[12]", conf, out, err);
    Assert.assertEquals("{\"functionName\":\"func1\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func2\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.GLOBAL_PRIVS_TABLE, null, null, conf, out, err);
    Assert.assertEquals("No global privileges" + lsep, outStr.toString());
    List<HiveObjectPrivilege> privileges = new ArrayList<>();
    HiveObjectRef hiveObjRef = new HiveObjectRef(HiveObjectType.GLOBAL, "db0", "tab0", null, null);
    PrivilegeGrantInfo grantInfo = new PrivilegeGrantInfo("read", 0, "me", PrincipalType.USER, false);
    HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, "user", PrincipalType.USER, grantInfo);
    privileges.add(hop);
    grantInfo = new PrivilegeGrantInfo("create", 0, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, "user", PrincipalType.USER, grantInfo);
    privileges.add(hop);
    PrivilegeBag pBag = new PrivilegeBag(privileges);
    store.grantPrivileges(pBag);
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.GLOBAL_PRIVS_TABLE, null, null, conf, out, err);
    Assert.assertEquals("{\"userPrivileges\":{\"user\":[{\"privilege\":\"read\",\"createTime\":0," + "\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":0},{\"privilege\":\"create\"," + "\"createTime\":0,\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":1}]}}" + lsep, outStr.toString());
    String[] tableNames = new String[3];
    for (int i = 0; i < tableNames.length; i++) {
        tableNames[i] = "tab" + i;
        StorageDescriptor sd = new StorageDescriptor(Arrays.asList(new FieldSchema("col1", "int", ""), new FieldSchema("col2", "varchar(32)", "")), "/tmp", null, null, false, 0, null, null, null, Collections.<String, String>emptyMap());
        Table tab = new Table(tableNames[i], dbNames[0], "me", 0, 0, 0, sd, Arrays.asList(new FieldSchema("pcol1", "string", ""), new FieldSchema("pcol2", "string", "")), Collections.<String, String>emptyMap(), null, null, null);
        store.createTable(tab);
    }
    ColumnStatisticsDesc tableStatsDesc = new ColumnStatisticsDesc(false, "db0", "tab0");
    ColumnStatisticsData tcsd = new ColumnStatisticsData();
    LongColumnStatsData tlcsd = new LongColumnStatsData(1, 2);
    tlcsd.setLowValue(-95);
    tlcsd.setHighValue(95);
    tcsd.setLongStats(tlcsd);
    ColumnStatisticsData tcsd2 = new ColumnStatisticsData();
    tcsd2.setStringStats(new StringColumnStatsData(97, 18.78, 29, 397));
    List<ColumnStatisticsObj> tcsos = Arrays.asList(new ColumnStatisticsObj("col1", "int", tcsd), new ColumnStatisticsObj("col2", "varchar(32)", tcsd2));
    ColumnStatistics tStatObj = new ColumnStatistics(tableStatsDesc, tcsos);
    store.updateTableColumnStatistics(tStatObj);
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.TABLE_TABLE, "db0.tab1", null, conf, out, err);
    Assert.assertEquals("{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"}," + "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{}," + "\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.TABLE_TABLE, null, "db0.*", conf, out, err);
    Assert.assertEquals("{\"tableName\":\"tab0\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"}," + "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}," + "\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column " + "col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78," + "\"numNulls\":29,\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep + "{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0," + "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\"," + "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\"," + "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep + "{\"tableName\":\"tab2\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0," + "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\"," + "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\"," + "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
    List<List<String>> partVals = Arrays.asList(Arrays.asList("a", "b"), Arrays.asList("c", "d"));
    for (List<String> pv : partVals) {
        StorageDescriptor sd = new StorageDescriptor(Arrays.asList(new FieldSchema("col1", "int", ""), new FieldSchema("col2", "varchar(32)", "")), "/tmp", null, null, false, 0, null, null, null, Collections.<String, String>emptyMap());
        Partition p = new Partition(pv, "db0", "tab1", 0, 0, sd, Collections.<String, String>emptyMap());
        store.addPartition(p);
    }
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.PART_TABLE, "db0.tab1.a.b", null, conf, out, err);
    Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
    ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(false, "db0", "tab1");
    statsDesc.setPartName("pcol1=c/pcol2=d");
    ColumnStatisticsData csd1 = new ColumnStatisticsData();
    LongColumnStatsData lcsd = new LongColumnStatsData(1, 2);
    lcsd.setLowValue(-95);
    lcsd.setHighValue(95);
    csd1.setLongStats(lcsd);
    ColumnStatisticsData csd2 = new ColumnStatisticsData();
    csd2.setStringStats(new StringColumnStatsData(97, 18.78, 29, 397));
    List<ColumnStatisticsObj> csos = Arrays.asList(new ColumnStatisticsObj("col1", "int", csd1), new ColumnStatisticsObj("col2", "varchar(32)", csd2));
    ColumnStatistics statsObj = new ColumnStatistics(statsDesc, csos);
    store.updatePartitionColumnStatistics(statsObj, partVals.get(1));
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.PART_TABLE, "db0.tab1.c.d", null, conf, out, err);
    Assert.assertEquals("{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats: column col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29," + "\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.PART_TABLE, null, "db0.tab1", conf, out, err);
    Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:" + lsep + "{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\",\"createTime\":0," + "\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column " + "col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29," + "\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.PART_TABLE, null, "db0.tab1.a", conf, out, err);
    Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SD_TABLE, "qQTgZAi5VzgpozzFGmIVTQ", null, conf, out, err);
    Assert.assertEquals("{\"cols\":[{\"name\":\"col1\",\"type\":\"int\",\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0," + "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SD_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("qQTgZAi5VzgpozzFGmIVTQ: {\"cols\":[{\"name\":\"col1\",\"type\":\"int\"," + "\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0," + "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SECURITY_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("No security related entries" + lsep, outStr.toString());
    store.addMasterKey("this be a key");
    store.addToken("tokenid", "delegation token");
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SECURITY_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("Master key 0: this be a key" + lsep + "Delegation token tokenid: delegation token" + lsep, outStr.toString());
    outStr = new ByteArrayOutputStream();
    out = new PrintStream(outStr);
    tool.go(false, HBaseReadWrite.SEQUENCES_TABLE, null, "whatever", conf, out, err);
    Assert.assertEquals("master_key: 1" + lsep, outStr.toString());
}
Also used : PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Function(org.apache.hadoop.hive.metastore.api.Function) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) List(java.util.List) ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) PrintStream(java.io.PrintStream) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) ByteArrayOutputStream(java.io.ByteArrayOutputStream) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) Role(org.apache.hadoop.hive.metastore.api.Role) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 9 with PrivilegeBag

use of org.apache.hadoop.hive.metastore.api.PrivilegeBag in project hive by apache.

the class TestHBaseStoreIntegration method doGrantRevoke.

private void doGrantRevoke(HiveObjectType objectType, String dbName, String tableName, String[] roleNames, String[] userNames) throws Exception {
    store.addRole(roleNames[0], "me");
    store.addRole(roleNames[1], "me");
    int now = (int) (System.currentTimeMillis() / 1000);
    Role role1 = store.getRole(roleNames[0]);
    Role role2 = store.getRole(roleNames[1]);
    store.grantRole(role1, userNames[0], PrincipalType.USER, "bob", PrincipalType.USER, false);
    store.grantRole(role1, roleNames[1], PrincipalType.ROLE, "admin", PrincipalType.ROLE, true);
    store.grantRole(role2, userNames[1], PrincipalType.USER, "bob", PrincipalType.USER, false);
    List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
    HiveObjectRef hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    PrivilegeGrantInfo grantInfo = new PrivilegeGrantInfo("read", now, "me", PrincipalType.USER, false);
    HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, userNames[0], PrincipalType.USER, grantInfo);
    privileges.add(hop);
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo);
    privileges.add(hop);
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("exec", now, "me", PrincipalType.USER, false);
    hop = new HiveObjectPrivilege(hiveObjRef, roleNames[1], PrincipalType.ROLE, grantInfo);
    privileges.add(hop);
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("create", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo);
    privileges.add(hop);
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo);
    privileges.add(hop);
    PrivilegeBag pBag = new PrivilegeBag(privileges);
    store.grantPrivileges(pBag);
    PrincipalPrivilegeSet pps = getPPS(objectType, dbName, tableName, userNames[0]);
    Assert.assertEquals(1, pps.getUserPrivilegesSize());
    Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[0]).size());
    grantInfo = pps.getUserPrivileges().get(userNames[0]).get(0);
    Assert.assertEquals("read", grantInfo.getPrivilege());
    Assert.assertTrue(now <= grantInfo.getCreateTime());
    Assert.assertEquals("me", grantInfo.getGrantor());
    Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType());
    Assert.assertFalse(grantInfo.isGrantOption());
    Assert.assertEquals(2, pps.getRolePrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size());
    grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0);
    Assert.assertEquals("write", grantInfo.getPrivilege());
    Assert.assertTrue(now <= grantInfo.getCreateTime());
    Assert.assertEquals("me", grantInfo.getGrantor());
    Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType());
    Assert.assertTrue(grantInfo.isGrantOption());
    Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size());
    grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0);
    Assert.assertEquals("exec", grantInfo.getPrivilege());
    Assert.assertTrue(now <= grantInfo.getCreateTime());
    Assert.assertEquals("me", grantInfo.getGrantor());
    Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType());
    Assert.assertFalse(grantInfo.isGrantOption());
    pps = getPPS(objectType, dbName, tableName, userNames[1]);
    Assert.assertEquals(0, pps.getUserPrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size());
    grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0);
    Assert.assertEquals("exec", grantInfo.getPrivilege());
    Assert.assertTrue(now <= grantInfo.getCreateTime());
    Assert.assertEquals("me", grantInfo.getGrantor());
    Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType());
    Assert.assertFalse(grantInfo.isGrantOption());
    pps = getPPS(objectType, dbName, tableName, userNames[2]);
    Assert.assertEquals(1, pps.getUserPrivilegesSize());
    Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size());
    Assert.assertEquals(0, pps.getRolePrivilegesSize());
    pps = getPPS(objectType, dbName, tableName, userNames[3]);
    Assert.assertEquals(0, pps.getUserPrivilegesSize());
    Assert.assertEquals(0, pps.getRolePrivilegesSize());
    // Test that removing role removes the role grants
    store.removeRole(roleNames[1]);
    checkRoleRemovedFromAllPrivileges(objectType, dbName, tableName, roleNames[1]);
    pps = getPPS(objectType, dbName, tableName, userNames[0]);
    Assert.assertEquals(1, pps.getRolePrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size());
    pps = getPPS(objectType, dbName, tableName, userNames[1]);
    Assert.assertEquals(0, pps.getRolePrivilegesSize());
    // Test that revoking with grant option = true just removes grant option
    privileges.clear();
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo);
    privileges.add(hop);
    hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null);
    grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true);
    hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo);
    privileges.add(hop);
    pBag = new PrivilegeBag(privileges);
    store.revokePrivileges(pBag, true);
    pps = getPPS(objectType, dbName, tableName, userNames[0]);
    Assert.assertEquals(1, pps.getRolePrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size());
    grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0);
    Assert.assertEquals("write", grantInfo.getPrivilege());
    Assert.assertTrue(now <= grantInfo.getCreateTime());
    Assert.assertEquals("me", grantInfo.getGrantor());
    Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType());
    Assert.assertFalse(grantInfo.isGrantOption());
    pps = getPPS(objectType, dbName, tableName, userNames[2]);
    Assert.assertEquals(1, pps.getUserPrivilegesSize());
    Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size());
    for (PrivilegeGrantInfo pgi : pps.getUserPrivileges().get(userNames[2])) {
        if (pgi.getPrivilege().equals("create"))
            Assert.assertTrue(pgi.isGrantOption());
        else if (pgi.getPrivilege().equals("create2"))
            Assert.assertFalse(pgi.isGrantOption());
        else
            Assert.fail("huh?");
    }
    // Test revoking revokes
    store.revokePrivileges(pBag, false);
    pps = getPPS(objectType, dbName, tableName, userNames[0]);
    Assert.assertEquals(1, pps.getUserPrivilegesSize());
    Assert.assertEquals(1, pps.getRolePrivilegesSize());
    Assert.assertEquals(0, pps.getRolePrivileges().get(roleNames[0]).size());
    pps = getPPS(objectType, dbName, tableName, userNames[2]);
    Assert.assertEquals(1, pps.getUserPrivilegesSize());
    Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[2]).size());
    Assert.assertEquals("create", pps.getUserPrivileges().get(userNames[2]).get(0).getPrivilege());
    Assert.assertEquals(0, pps.getRolePrivilegesSize());
}
Also used : Role(org.apache.hadoop.hive.metastore.api.Role) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) ArrayList(java.util.ArrayList)

Example 10 with PrivilegeBag

use of org.apache.hadoop.hive.metastore.api.PrivilegeBag in project hive by apache.

the class HiveV1Authorizer method toPrivilegeBag.

private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges, HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption) throws HiveException {
    PrivilegeBag privBag = new PrivilegeBag();
    if (privileges.isEmpty()) {
        return privBag;
    }
    String grantorName = grantor.getName();
    PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType());
    if (privObject.getType() == null || privObject.getType() == HivePrivilegeObject.HivePrivilegeObjectType.GLOBAL) {
        for (HivePrivilege priv : privileges) {
            List<String> columns = priv.getColumns();
            if (columns != null && !columns.isEmpty()) {
                throw new HiveException("For user-level privileges, column sets should be null. columns=" + columns.toString());
            }
            privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType, grantOption)));
        }
        return privBag;
    }
    if (privObject.getPartKeys() != null && grantOption) {
        throw new HiveException("Grant does not support partition level.");
    }
    Hive hive = Hive.getWithFastCheck(this.conf);
    Database dbObj = hive.getDatabase(privObject.getDbname());
    if (dbObj == null) {
        throw new HiveException("Database " + privObject.getDbname() + " does not exists");
    }
    Table tableObj = null;
    if (privObject.getObjectName() != null) {
        tableObj = hive.getTable(dbObj.getName(), privObject.getObjectName());
    }
    List<String> partValues = null;
    if (tableObj != null) {
        if ((!tableObj.isPartitioned()) && privObject.getPartKeys() != null) {
            throw new HiveException("Table is not partitioned, but partition name is present: partSpec=" + privObject.getPartKeys());
        }
        if (privObject.getPartKeys() != null) {
            Map<String, String> partSpec = Warehouse.makeSpecFromValues(tableObj.getPartitionKeys(), privObject.getPartKeys());
            Partition partObj = hive.getPartition(tableObj, partSpec, false).getTPartition();
            partValues = partObj.getValues();
        }
    }
    for (HivePrivilege priv : privileges) {
        List<String> columns = priv.getColumns();
        if (columns != null && !columns.isEmpty()) {
            if (!priv.supportsScope(PrivilegeScope.COLUMN_LEVEL_SCOPE)) {
                throw new HiveException(priv.getName() + " does not support column level privilege.");
            }
            if (tableObj == null) {
                throw new HiveException("For user-level/database-level privileges, column sets should be null. columns=" + columns);
            }
            for (int i = 0; i < columns.size(); i++) {
                privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(), partValues, columns.get(i)), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
            }
        } else if (tableObj == null) {
            privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null, null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
        } else if (partValues == null) {
            privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(), null, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
        } else {
            privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(), partValues, null), null, null, new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
        }
    }
    return privBag;
}
Also used : PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) Partition(org.apache.hadoop.hive.metastore.api.Partition) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Table(org.apache.hadoop.hive.ql.metadata.Table) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) Hive(org.apache.hadoop.hive.ql.metadata.Hive) Database(org.apache.hadoop.hive.metastore.api.Database) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType)

Aggregations

PrivilegeBag (org.apache.hadoop.hive.metastore.api.PrivilegeBag)13 HiveObjectPrivilege (org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege)8 ArrayList (java.util.ArrayList)7 HiveObjectRef (org.apache.hadoop.hive.metastore.api.HiveObjectRef)7 PrivilegeGrantInfo (org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo)7 Test (org.junit.Test)6 Role (org.apache.hadoop.hive.metastore.api.Role)5 Database (org.apache.hadoop.hive.metastore.api.Database)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 HiveAuthzPluginException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException)3 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 Partition (org.apache.hadoop.hive.metastore.api.Partition)2 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)2 Table (org.apache.hadoop.hive.metastore.api.Table)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 HiveAccessControlException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException)2 TException (org.apache.thrift.TException)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1