Search in sources :

Example 1 with SelectedRole

use of io.prestosql.spi.security.SelectedRole in project hetu-core by openlookeng.

the class TestHiveRoles method testSetRole.

@Test
public void testSetRole() {
    executeFromAdmin("CREATE ROLE set_role_1");
    executeFromAdmin("CREATE ROLE set_role_2");
    executeFromAdmin("CREATE ROLE set_role_3");
    executeFromAdmin("CREATE ROLE set_role_4");
    executeFromAdmin("GRANT set_role_1 TO USER set_user_1");
    executeFromAdmin("GRANT set_role_2 TO ROLE set_role_1");
    executeFromAdmin("GRANT set_role_3 TO ROLE set_role_2");
    Session unsetRole = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty())).build();
    Session setRoleAll = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ALL, Optional.empty())))).build();
    Session setRoleNone = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.NONE, Optional.empty())))).build();
    Session setRole1 = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_1"))))).build();
    Session setRole2 = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_2"))))).build();
    Session setRole3 = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_3"))))).build();
    Session setRole4 = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_4"))))).build();
    MaterializedResult actual = getQueryRunner().execute(unsetRole, "SELECT * FROM hive.information_schema.applicable_roles");
    MaterializedResult expected = MaterializedResult.resultBuilder(unsetRole, createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType()).row("set_user_1", "USER", "public", "NO").row("set_user_1", "USER", "set_role_1", "NO").row("set_role_1", "ROLE", "set_role_2", "NO").row("set_role_2", "ROLE", "set_role_3", "NO").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(unsetRole, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(unsetRole, createUnboundedVarcharType()).row("public").row("set_role_1").row("set_role_2").row("set_role_3").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(setRoleAll, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(setRoleAll, createUnboundedVarcharType()).row("public").row("set_role_1").row("set_role_2").row("set_role_3").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(setRoleNone, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(setRoleNone, createUnboundedVarcharType()).row("public").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(setRole1, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(setRole1, createUnboundedVarcharType()).row("public").row("set_role_1").row("set_role_2").row("set_role_3").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(setRole2, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(setRole2, createUnboundedVarcharType()).row("public").row("set_role_2").row("set_role_3").build();
    assertEqualsIgnoreOrder(actual, expected);
    actual = getQueryRunner().execute(setRole3, "SELECT * FROM hive.information_schema.enabled_roles");
    expected = MaterializedResult.resultBuilder(setRole3, createUnboundedVarcharType()).row("public").row("set_role_3").build();
    assertEqualsIgnoreOrder(actual, expected);
    assertQueryFails(setRole4, "SELECT * FROM hive.information_schema.enabled_roles", ".*?Cannot set role set_role_4");
    executeFromAdmin("DROP ROLE set_role_1");
    executeFromAdmin("DROP ROLE set_role_2");
    executeFromAdmin("DROP ROLE set_role_3");
    executeFromAdmin("DROP ROLE set_role_4");
}
Also used : SelectedRole(io.prestosql.spi.security.SelectedRole) Identity(io.prestosql.spi.security.Identity) MaterializedResult(io.prestosql.testing.MaterializedResult) Session(io.prestosql.Session) Test(org.testng.annotations.Test)

Example 2 with SelectedRole

use of io.prestosql.spi.security.SelectedRole in project hetu-core by openlookeng.

the class TestHttpRequestSessionContext method testSessionContext.

@Test
public void testSessionContext() {
    HttpServletRequest request = new MockHttpServletRequest(ImmutableListMultimap.<String, String>builder().put(PRESTO_USER, "testUser").put(PRESTO_SOURCE, "testSource").put(PRESTO_CATALOG, "testCatalog").put(PRESTO_SCHEMA, "testSchema").put(PRESTO_PATH, "testPath").put(PRESTO_LANGUAGE, "zh-TW").put(PRESTO_TIME_ZONE, "Asia/Taipei").put(PRESTO_CLIENT_INFO, "client-info").put(PRESTO_SESSION, QUERY_MAX_MEMORY + "=1GB").put(PRESTO_SESSION, JOIN_DISTRIBUTION_TYPE + "=partitioned," + HASH_PARTITION_COUNT + " = 43").put(PRESTO_SESSION, "some_session_property=some value with %2C comma").put(PRESTO_PREPARED_STATEMENT, "query1=select * from foo,query2=select * from bar").put(PRESTO_ROLE, "foo_connector=ALL").put(PRESTO_ROLE, "bar_connector=NONE").put(PRESTO_ROLE, "foobar_connector=ROLE{role}").put(PRESTO_EXTRA_CREDENTIAL, "test.token.foo=bar").put(PRESTO_EXTRA_CREDENTIAL, "test.token.abc=xyz").build(), "testRemote");
    HttpRequestSessionContext context = new HttpRequestSessionContext(request, user -> ImmutableSet.of(user));
    assertEquals(context.getSource(), "testSource");
    assertEquals(context.getCatalog(), "testCatalog");
    assertEquals(context.getSchema(), "testSchema");
    assertEquals(context.getPath(), "testPath");
    assertEquals(context.getIdentity(), new Identity("testUser", Optional.empty()));
    assertEquals(context.getClientInfo(), "client-info");
    assertEquals(context.getLanguage(), "zh-TW");
    assertEquals(context.getTimeZoneId(), "Asia/Taipei");
    assertEquals(context.getSystemProperties(), ImmutableMap.of(QUERY_MAX_MEMORY, "1GB", JOIN_DISTRIBUTION_TYPE, "partitioned", HASH_PARTITION_COUNT, "43", "some_session_property", "some value with , comma"));
    assertEquals(context.getPreparedStatements(), ImmutableMap.of("query1", "select * from foo", "query2", "select * from bar"));
    assertEquals(context.getIdentity().getRoles(), ImmutableMap.of("foo_connector", new SelectedRole(SelectedRole.Type.ALL, Optional.empty()), "bar_connector", new SelectedRole(SelectedRole.Type.NONE, Optional.empty()), "foobar_connector", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("role"))));
    assertEquals(context.getIdentity().getExtraCredentials(), ImmutableMap.of("test.token.foo", "bar", "test.token.abc", "xyz"));
    assertEquals(context.getIdentity().getGroups(), ImmutableSet.of("testUser"));
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) SelectedRole(io.prestosql.spi.security.SelectedRole) Identity(io.prestosql.spi.security.Identity) Test(org.testng.annotations.Test)

Example 3 with SelectedRole

use of io.prestosql.spi.security.SelectedRole in project hetu-core by openlookeng.

the class HiveQueryRunner method createQueryRunner.

public static DistributedQueryRunner createQueryRunner(Iterable<TpchTable<?>> tables, Map<String, String> extraProperties, String security, Map<String, String> extraHiveProperties, Optional<Path> baseDataDir, boolean hasStateStore, String jdbcUrl) throws Exception {
    assertEquals(DateTimeZone.getDefault(), TIME_ZONE, "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments");
    setupLogging();
    DistributedQueryRunner queryRunner = null;
    if (hasStateStore) {
        queryRunner = DistributedQueryRunner.builder(createSession(Optional.of(new SelectedRole(ROLE, Optional.of("admin"))))).setNodeCount(4).setExtraProperties(extraProperties).setBaseDataDir(baseDataDir).buildWithStateStore();
    } else {
        Map<String, String> configProperties = new HashMap<>();
        configProperties.put("auto-vacuum.enabled", "true");
        configProperties.put("optimizer.cte-reuse-enabled", "true");
        configProperties.put("auto-vacuum.scan.interval", "15s");
        configProperties.put("hetu.split-cache-map.enabled", "true");
        queryRunner = DistributedQueryRunner.builder(createSession(Optional.of(new SelectedRole(ROLE, Optional.of("admin"))))).setNodeCount(4).setCoordinatorProperties(configProperties).setExtraProperties(extraProperties).setBaseDataDir(baseDataDir).build();
    }
    try {
        if (jdbcUrl != null && !jdbcUrl.isEmpty()) {
            File directory = new File("");
            String courseFile = directory.getCanonicalPath();
            System.setProperty("config", courseFile + "/etc/");
            String configDir = System.getProperty("config");
            String hetumetastoreConfig = configDir + "hetu-metastore.properties";
            File file = new File(configDir);
            if (!file.exists()) {
                file.mkdirs();
            }
            File file2 = new File(configDir, "hetu-metastore.properties");
            if (!file2.exists()) {
                try {
                    file2.createNewFile();
                } catch (IOException e) {
                    log.info(e.getMessage());
                }
            }
            try (BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(hetumetastoreConfig))) {
                bufferedWriter.write("hetu.metastore.db.url = " + jdbcUrl);
                bufferedWriter.write("\n");
                bufferedWriter.write("hetu.metastore.type = jdbc\n");
                bufferedWriter.write("hetu.metastore.db.user = user\n");
                bufferedWriter.write("hetu.metastore.db.password = testpass\n");
                bufferedWriter.write("hetu.metastore.cache.ttl = 0s");
            }
            queryRunner.installPlugin(new HetuMetastorePlugin());
            queryRunner.getCoordinator().loadMetastore();
            queryRunner.installPlugin(new StarTreePlugin());
        }
        queryRunner.installPlugin(new TpchPlugin());
        queryRunner.createCatalog("tpch", "tpch");
        File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data").toFile();
        HiveConfig hiveConfig = new HiveConfig();
        HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveConfig), ImmutableSet.of());
        HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveConfig, new NoHdfsAuthentication());
        FileHiveMetastore metastore = new FileHiveMetastore(hdfsEnvironment, baseDir.toURI().toString(), "test");
        queryRunner.installPlugin(new HivePlugin(HIVE_CATALOG, Optional.of(metastore)));
        Map<String, String> hiveProperties = ImmutableMap.<String, String>builder().putAll(extraHiveProperties).put("hive.rcfile.time-zone", TIME_ZONE.getID()).put("hive.parquet.time-zone", TIME_ZONE.getID()).put("hive.security", security).put("hive.max-partitions-per-scan", "1000").put("hive.assume-canonical-partition-keys", "true").build();
        Map<String, String> hiveBucketedProperties = ImmutableMap.<String, String>builder().putAll(hiveProperties).put("hive.max-initial-split-size", // so that each bucket has multiple splits
        "10kB").put("hive.max-split-size", // so that each bucket has multiple splits
        "10kB").put("hive.storage-format", // so that there's no minimum split size for the file
        "TEXTFILE").put("hive.compression-codec", // so that the file is splittable
        "NONE").build();
        Map<String, String> hiveAutoVacuumProperties = ImmutableMap.<String, String>builder().putAll(hiveProperties).put("hive.auto-vacuum-enabled", "true").put("hive.vacuum-collector-interval", "15s").build();
        queryRunner.createCatalog(HIVE_AUTO_VACUUM_CATALOG, HIVE_CATALOG, hiveAutoVacuumProperties);
        queryRunner.createCatalog(HIVE_CATALOG, HIVE_CATALOG, hiveProperties);
        queryRunner.createCatalog(HIVE_BUCKETED_CATALOG, HIVE_CATALOG, hiveBucketedProperties);
        HiveIdentity identity = new HiveIdentity(SESSION);
        if (!metastore.getDatabase(TPCH_SCHEMA).isPresent()) {
            metastore.createDatabase(identity, createDatabaseMetastoreObject(TPCH_SCHEMA));
            copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(Optional.empty()), tables);
        }
        if (!metastore.getDatabase(TPCH_BUCKETED_SCHEMA).isPresent()) {
            metastore.createDatabase(identity, createDatabaseMetastoreObject(TPCH_BUCKETED_SCHEMA));
            copyTpchTablesBucketed(queryRunner, "tpch", TINY_SCHEMA_NAME, createBucketedSession(Optional.empty()), tables);
        }
        return queryRunner;
    } catch (Exception e) {
        queryRunner.close();
        throw e;
    }
}
Also used : DistributedQueryRunner(io.prestosql.tests.DistributedQueryRunner) HashMap(java.util.HashMap) SelectedRole(io.prestosql.spi.security.SelectedRole) TpchPlugin(io.prestosql.plugin.tpch.TpchPlugin) FileWriter(java.io.FileWriter) IOException(java.io.IOException) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) IOException(java.io.IOException) BufferedWriter(java.io.BufferedWriter) FileHiveMetastore(io.prestosql.plugin.hive.metastore.file.FileHiveMetastore) HetuMetastorePlugin(io.hetu.core.metastore.HetuMetastorePlugin) StarTreePlugin(io.hetu.core.cube.startree.StarTreePlugin) File(java.io.File)

Example 4 with SelectedRole

use of io.prestosql.spi.security.SelectedRole in project hetu-core by openlookeng.

the class TestHiveIntegrationSmokeTest method testSchemaOperations.

@Test
public void testSchemaOperations() {
    Session admin = Session.builder(getQueryRunner().getDefaultSession()).setIdentity(new Identity("hive", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("admin"))))).build();
    assertUpdate(admin, "CREATE SCHEMA new_schema");
    assertUpdate(admin, "CREATE TABLE new_schema.test (x bigint)");
    assertQueryFails(admin, "DROP SCHEMA new_schema", "Schema not empty: new_schema");
    assertUpdate(admin, "DROP TABLE new_schema.test");
    assertUpdate(admin, "DROP SCHEMA new_schema");
}
Also used : SelectedRole(io.prestosql.spi.security.SelectedRole) Identity(io.prestosql.spi.security.Identity) Session(io.prestosql.Session) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) Test(org.testng.annotations.Test) AbstractTestIntegrationSmokeTest(io.prestosql.tests.AbstractTestIntegrationSmokeTest)

Example 5 with SelectedRole

use of io.prestosql.spi.security.SelectedRole in project hetu-core by openlookeng.

the class SetRoleTask method execute.

@Override
public ListenableFuture<?> execute(SetRole statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters, HeuristicIndexerManager heuristicIndexerManager) {
    Session session = stateMachine.getSession();
    String catalog = createCatalogName(session, statement);
    if (statement.getType() == SetRole.Type.ROLE) {
        accessControl.checkCanSetRole(session.getRequiredTransactionId(), session.getIdentity(), statement.getRole().map(c -> c.getValue()).get(), catalog);
    }
    SelectedRole.Type type;
    switch(statement.getType()) {
        case ROLE:
            type = SelectedRole.Type.ROLE;
            break;
        case ALL:
            type = SelectedRole.Type.ALL;
            break;
        case NONE:
            type = SelectedRole.Type.NONE;
            break;
        default:
            throw new IllegalArgumentException("Unsupported type: " + statement.getType());
    }
    stateMachine.addSetRole(catalog, new SelectedRole(type, statement.getRole().map(c -> c.getValue())));
    return immediateFuture(null);
}
Also used : SelectedRole(io.prestosql.spi.security.SelectedRole) Session(io.prestosql.Session)

Aggregations

SelectedRole (io.prestosql.spi.security.SelectedRole)8 Identity (io.prestosql.spi.security.Identity)4 Session (io.prestosql.Session)3 Test (org.testng.annotations.Test)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 ConnectorSession (io.prestosql.spi.connector.ConnectorSession)2 HashMap (java.util.HashMap)2 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)1 StarTreePlugin (io.hetu.core.cube.startree.StarTreePlugin)1 HetuMetastorePlugin (io.hetu.core.metastore.HetuMetastorePlugin)1 HiveIdentity (io.prestosql.plugin.hive.authentication.HiveIdentity)1 NoHdfsAuthentication (io.prestosql.plugin.hive.authentication.NoHdfsAuthentication)1 HivePrincipal (io.prestosql.plugin.hive.metastore.HivePrincipal)1 FileHiveMetastore (io.prestosql.plugin.hive.metastore.file.FileHiveMetastore)1 TpchPlugin (io.prestosql.plugin.tpch.TpchPlugin)1 PrestoException (io.prestosql.spi.PrestoException)1 CatalogName (io.prestosql.spi.connector.CatalogName)1 CatalogName.createInformationSchemaCatalogName (io.prestosql.spi.connector.CatalogName.createInformationSchemaCatalogName)1 CatalogName.createSystemTablesCatalogName (io.prestosql.spi.connector.CatalogName.createSystemTablesCatalogName)1 RoleGrant (io.prestosql.spi.security.RoleGrant)1