use of io.trino.spi.security.SelectedRole in project trino by trinodb.
the class TestAccessControl method testNoCatalogIsNeededInSessionForShowRoles.
@Test
public void testNoCatalogIsNeededInSessionForShowRoles() {
Session session = testSessionBuilder().setIdentity(Identity.forUser("alice").withConnectorRoles(ImmutableMap.of("mock", new SelectedRole(ROLE, Optional.of("alice_role")))).build()).build();
assertQuery(session, "SHOW ROLES IN mock", "VALUES 'alice_role'");
assertQuery(session, "SHOW ROLE GRANTS IN mock", "VALUES 'alice_role'");
assertQuery(session, "SHOW CURRENT ROLES FROM mock", "VALUES 'alice_role'");
assertQuery(session, "SELECT * FROM mock.information_schema.applicable_roles", "SELECT 'alice', 'USER', 'alice_role', 'NO'");
}
use of io.trino.spi.security.SelectedRole in project trino by trinodb.
the class HttpRequestSessionContextFactory method createSessionContext.
public SessionContext createSessionContext(MultivaluedMap<String, String> headers, Optional<String> alternateHeaderName, Optional<String> remoteAddress, Optional<Identity> authenticatedIdentity) throws WebApplicationException {
ProtocolHeaders protocolHeaders;
try {
protocolHeaders = detectProtocol(alternateHeaderName, headers.keySet());
} catch (ProtocolDetectionException e) {
throw badRequest(e.getMessage());
}
Optional<String> catalog = Optional.ofNullable(trimEmptyToNull(headers.getFirst(protocolHeaders.requestCatalog())));
Optional<String> schema = Optional.ofNullable(trimEmptyToNull(headers.getFirst(protocolHeaders.requestSchema())));
Optional<String> path = Optional.ofNullable(trimEmptyToNull(headers.getFirst(protocolHeaders.requestPath())));
assertRequest((catalog.isPresent()) || (schema.isEmpty()), "Schema is set but catalog is not");
requireNonNull(authenticatedIdentity, "authenticatedIdentity is null");
Identity identity = buildSessionIdentity(authenticatedIdentity, protocolHeaders, headers);
SelectedRole selectedRole = parseSystemRoleHeaders(protocolHeaders, headers);
Optional<String> source = Optional.ofNullable(headers.getFirst(protocolHeaders.requestSource()));
Optional<String> traceToken = Optional.ofNullable(trimEmptyToNull(headers.getFirst(protocolHeaders.requestTraceToken())));
Optional<String> userAgent = Optional.ofNullable(headers.getFirst(USER_AGENT));
Optional<String> remoteUserAddress = requireNonNull(remoteAddress, "remoteAddress is null");
Optional<String> timeZoneId = Optional.ofNullable(headers.getFirst(protocolHeaders.requestTimeZone()));
Optional<String> language = Optional.ofNullable(headers.getFirst(protocolHeaders.requestLanguage()));
Optional<String> clientInfo = Optional.ofNullable(headers.getFirst(protocolHeaders.requestClientInfo()));
Set<String> clientTags = parseClientTags(protocolHeaders, headers);
Set<String> clientCapabilities = parseClientCapabilities(protocolHeaders, headers);
ResourceEstimates resourceEstimates = parseResourceEstimate(protocolHeaders, headers);
// parse session properties
ImmutableMap.Builder<String, String> systemProperties = ImmutableMap.builder();
Map<String, Map<String, String>> catalogSessionProperties = new HashMap<>();
for (Entry<String, String> entry : parseSessionHeaders(protocolHeaders, headers).entrySet()) {
String fullPropertyName = entry.getKey();
String propertyValue = entry.getValue();
List<String> nameParts = DOT_SPLITTER.splitToList(fullPropertyName);
if (nameParts.size() == 1) {
String propertyName = nameParts.get(0);
assertRequest(!propertyName.isEmpty(), "Invalid %s header", protocolHeaders.requestSession());
// catalog session properties cannot be validated until the transaction has stated, so we delay system property validation also
systemProperties.put(propertyName, propertyValue);
} else if (nameParts.size() == 2) {
String catalogName = nameParts.get(0);
String propertyName = nameParts.get(1);
assertRequest(!catalogName.isEmpty(), "Invalid %s header", protocolHeaders.requestSession());
assertRequest(!propertyName.isEmpty(), "Invalid %s header", protocolHeaders.requestSession());
// catalog session properties cannot be validated until the transaction has stated
catalogSessionProperties.computeIfAbsent(catalogName, id -> new HashMap<>()).put(propertyName, propertyValue);
} else {
throw badRequest(format("Invalid %s header", protocolHeaders.requestSession()));
}
}
requireNonNull(catalogSessionProperties, "catalogSessionProperties is null");
catalogSessionProperties = catalogSessionProperties.entrySet().stream().collect(toImmutableMap(Entry::getKey, entry -> ImmutableMap.copyOf(entry.getValue())));
Map<String, String> preparedStatements = parsePreparedStatementsHeaders(protocolHeaders, headers);
String transactionIdHeader = headers.getFirst(protocolHeaders.requestTransactionId());
boolean clientTransactionSupport = transactionIdHeader != null;
Optional<TransactionId> transactionId = parseTransactionId(transactionIdHeader);
return new SessionContext(protocolHeaders, catalog, schema, path, authenticatedIdentity, identity, selectedRole, source, traceToken, userAgent, remoteUserAddress, timeZoneId, language, clientTags, clientCapabilities, resourceEstimates, systemProperties.buildOrThrow(), catalogSessionProperties, preparedStatements, transactionId, clientTransactionSupport, clientInfo);
}
use of io.trino.spi.security.SelectedRole in project trino by trinodb.
the class HttpRequestSessionContextFactory method buildSessionIdentity.
private Identity buildSessionIdentity(Optional<Identity> authenticatedIdentity, ProtocolHeaders protocolHeaders, MultivaluedMap<String, String> headers) {
String trinoUser = trimEmptyToNull(headers.getFirst(protocolHeaders.requestUser()));
String user = trinoUser != null ? trinoUser : authenticatedIdentity.map(Identity::getUser).orElse(null);
assertRequest(user != null, "User must be set");
SelectedRole systemRole = parseSystemRoleHeaders(protocolHeaders, headers);
ImmutableSet.Builder<String> systemEnabledRoles = ImmutableSet.builder();
if (systemRole.getType() == Type.ROLE) {
systemEnabledRoles.add(systemRole.getRole().orElseThrow());
}
return authenticatedIdentity.map(identity -> Identity.from(identity).withUser(user)).orElseGet(() -> Identity.forUser(user)).withEnabledRoles(systemEnabledRoles.build()).withAdditionalConnectorRoles(parseConnectorRoleHeaders(protocolHeaders, headers)).withAdditionalExtraCredentials(parseExtraCredentials(protocolHeaders, headers)).withAdditionalGroups(groupProvider.getGroups(user)).build();
}
use of io.trino.spi.security.SelectedRole in project trino by trinodb.
the class TestIcebergMetadataListing method createQueryRunner.
@Override
protected DistributedQueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setIdentity(Identity.forUser("hive").withConnectorRole("hive", new SelectedRole(ROLE, Optional.of("admin"))).build()).build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).build();
File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
metastore = new FileHiveMetastore(new NodeVersion("test_version"), hdfsEnvironment, new MetastoreConfig(), new FileHiveMetastoreConfig().setCatalogDirectory(baseDir.toURI().toString()).setMetastoreUser("test"));
queryRunner.installPlugin(new TestingIcebergPlugin(Optional.of(metastore), Optional.empty(), EMPTY_MODULE));
queryRunner.createCatalog("iceberg", "iceberg");
queryRunner.installPlugin(new TestingHivePlugin(metastore));
queryRunner.createCatalog("hive", "hive", ImmutableMap.of("hive.security", "sql-standard"));
return queryRunner;
}
use of io.trino.spi.security.SelectedRole in project trino by trinodb.
the class BaseHiveConnectorTest method testRequiredPartitionFilterAppliedOnDifferentSchema.
@Test
public void testRequiredPartitionFilterAppliedOnDifferentSchema() {
String schemaName = "schema_" + randomTableSuffix();
Session session = Session.builder(getSession()).setIdentity(Identity.forUser("hive").withRole("hive", new SelectedRole(ROLE, Optional.of("admin"))).build()).setCatalogSessionProperty("hive", "query_partition_filter_required", "true").setCatalogSessionProperty("hive", "query_partition_filter_required_schemas", format("[\"%s\"]", schemaName)).build();
getQueryRunner().execute("CREATE SCHEMA " + schemaName);
try (TestTable table = new TestTable(new TrinoSqlExecutor(getQueryRunner(), session), "test_required_partition_filter_", "(id integer, a varchar, b varchar) WITH (partitioned_by = ARRAY['b'])", ImmutableList.of("1, '1', 'b'"))) {
// no partition filter
assertQuery(session, format("SELECT id FROM %s WHERE a = '1'", table.getName()), "SELECT 1");
computeActual(session, format("EXPLAIN SELECT id FROM %s WHERE a = '1'", table.getName()));
computeActual(session, format("EXPLAIN ANALYZE SELECT id FROM %s WHERE a = '1'", table.getName()));
// partition filter that gets removed by planner
assertQuery(session, format("SELECT id FROM %s WHERE b IS NOT NULL OR true", table.getName()), "SELECT 1");
// Join on non-partition column
assertUpdate(session, format("CREATE TABLE %s.%s_right (id integer, a varchar, b varchar, ds varchar) WITH (partitioned_by = ARRAY['ds'])", schemaName, table.getName()));
assertUpdate(session, format("INSERT INTO %s.%s_right (id, a, ds) VALUES (1, 'a', '1')", schemaName, table.getName()), 1);
assertQueryFails(session, format("SELECT count(*) FROM %2$s l JOIN %s.%2$s_right r ON l.id = r.id WHERE r.a = 'a'", schemaName, table.getName()), format("Filter required on %s\\.%s_right for at least one partition column: ds", schemaName, table.getName()));
assertQuery(session, format("SELECT count(*) FROM %2$s l JOIN %s.%2$s_right r ON l.id = r.id WHERE r.ds = '1'", schemaName, table.getName()), "SELECT 1");
assertUpdate(session, format("DROP TABLE %s.%s_right", schemaName, table.getName()));
}
getQueryRunner().execute("DROP SCHEMA " + schemaName);
}
Aggregations