use of io.trino.metadata.Metadata in project trino by trinodb.
the class ColumnJdbcTable method applyFilter.
@Override
public TupleDomain<ColumnHandle> applyFilter(ConnectorSession connectorSession, Constraint constraint) {
TupleDomain<ColumnHandle> tupleDomain = constraint.getSummary();
if (tupleDomain.isNone() || constraint.predicate().isEmpty()) {
return tupleDomain;
}
Predicate<Map<ColumnHandle, NullableValue>> predicate = constraint.predicate().get();
Set<ColumnHandle> predicateColumns = constraint.getPredicateColumns().orElseThrow(() -> new VerifyException("columns not present for a predicate"));
boolean hasSchemaPredicate = predicateColumns.contains(TABLE_SCHEMA_COLUMN);
boolean hasTablePredicate = predicateColumns.contains(TABLE_NAME_COLUMN);
if (!hasSchemaPredicate && !hasTablePredicate) {
// No filter on schema name and table name at all.
return tupleDomain;
}
Session session = ((FullConnectorSession) connectorSession).getSession();
Optional<String> catalogFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_CATALOG_COLUMN);
Optional<String> schemaFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_SCHEMA_COLUMN);
Optional<String> tableFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_NAME_COLUMN);
if (schemaFilter.isPresent() && tableFilter.isPresent()) {
// No need to narrow down the domain.
return tupleDomain;
}
List<String> catalogs = listCatalogs(session, metadata, accessControl, catalogFilter).keySet().stream().filter(catalogName -> predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(catalogName)))).collect(toImmutableList());
List<CatalogSchemaName> schemas = catalogs.stream().flatMap(catalogName -> listSchemas(session, metadata, accessControl, catalogName, schemaFilter).stream().filter(schemaName -> !hasSchemaPredicate || predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(catalogName), TABLE_SCHEMA_COLUMN, toNullableValue(schemaName)))).map(schemaName -> new CatalogSchemaName(catalogName, schemaName))).collect(toImmutableList());
if (!hasTablePredicate) {
return TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().put(TABLE_CATALOG_COLUMN, schemas.stream().map(CatalogSchemaName::getCatalogName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_SCHEMA_COLUMN, schemas.stream().map(CatalogSchemaName::getSchemaName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).buildOrThrow());
}
List<CatalogSchemaTableName> tables = schemas.stream().flatMap(schema -> {
QualifiedTablePrefix tablePrefix = tableFilter.isPresent() ? new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName(), tableFilter.get()) : new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName());
return listTables(session, metadata, accessControl, tablePrefix).stream().filter(schemaTableName -> predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(schema.getCatalogName()), TABLE_SCHEMA_COLUMN, toNullableValue(schemaTableName.getSchemaName()), TABLE_NAME_COLUMN, toNullableValue(schemaTableName.getTableName())))).map(schemaTableName -> new CatalogSchemaTableName(schema.getCatalogName(), schemaTableName.getSchemaName(), schemaTableName.getTableName()));
}).collect(toImmutableList());
return TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().put(TABLE_CATALOG_COLUMN, tables.stream().map(CatalogSchemaTableName::getCatalogName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_SCHEMA_COLUMN, tables.stream().map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getSchemaName()).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_NAME_COLUMN, tables.stream().map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getTableName()).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).buildOrThrow());
}
use of io.trino.metadata.Metadata in project trino by trinodb.
the class TableCommentSystemTable method getComment.
private Optional<String> getComment(Session session, QualifiedTablePrefix prefix, SchemaTableName name, Map<SchemaTableName, ViewInfo> views, Map<SchemaTableName, ViewInfo> materializedViews) {
ViewInfo materializedViewDefinition = materializedViews.get(name);
if (materializedViewDefinition != null) {
return materializedViewDefinition.getComment();
}
ViewInfo viewInfo = views.get(name);
if (viewInfo != null) {
return viewInfo.getComment();
}
QualifiedObjectName tableName = new QualifiedObjectName(prefix.getCatalogName(), name.getSchemaName(), name.getTableName());
return metadata.getRedirectionAwareTableHandle(session, tableName).getTableHandle().map(handle -> metadata.getTableMetadata(session, handle)).map(metadata -> metadata.getMetadata().getComment()).orElseGet(() -> {
// A previously listed table might have been dropped concurrently
LOG.warn("Failed to get metadata for table: %s", name);
return Optional.empty();
});
}
use of io.trino.metadata.Metadata in project trino by trinodb.
the class ExtractSpatialJoins method loadKdbTree.
private static KdbTree loadKdbTree(String tableName, Session session, Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) {
QualifiedObjectName name = toQualifiedObjectName(tableName, session.getCatalog().get(), session.getSchema().get());
TableHandle tableHandle = metadata.getTableHandle(session, name).orElseThrow(() -> new TrinoException(INVALID_SPATIAL_PARTITIONING, format("Table not found: %s", name)));
Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle);
List<ColumnHandle> visibleColumnHandles = columnHandles.values().stream().filter(handle -> !metadata.getColumnMetadata(session, tableHandle, handle).isHidden()).collect(toImmutableList());
checkSpatialPartitioningTable(visibleColumnHandles.size() == 1, "Expected single column for table %s, but found %s columns", name, columnHandles.size());
ColumnHandle kdbTreeColumn = Iterables.getOnlyElement(visibleColumnHandles);
Optional<KdbTree> kdbTree = Optional.empty();
try (SplitSource splitSource = splitManager.getSplits(session, tableHandle, UNGROUPED_SCHEDULING, EMPTY, alwaysTrue())) {
while (!Thread.currentThread().isInterrupted()) {
SplitBatch splitBatch = getFutureValue(splitSource.getNextBatch(NOT_PARTITIONED, Lifespan.taskWide(), 1000));
List<Split> splits = splitBatch.getSplits();
for (Split split : splits) {
try (ConnectorPageSource pageSource = pageSourceManager.createPageSource(session, split, tableHandle, ImmutableList.of(kdbTreeColumn), DynamicFilter.EMPTY)) {
do {
getFutureValue(pageSource.isBlocked());
Page page = pageSource.getNextPage();
if (page != null && page.getPositionCount() > 0) {
checkSpatialPartitioningTable(kdbTree.isEmpty(), "Expected exactly one row for table %s, but found more", name);
checkSpatialPartitioningTable(page.getPositionCount() == 1, "Expected exactly one row for table %s, but found %s rows", name, page.getPositionCount());
String kdbTreeJson = VARCHAR.getSlice(page.getBlock(0), 0).toStringUtf8();
try {
kdbTree = Optional.of(KdbTreeUtils.fromJson(kdbTreeJson));
} catch (IllegalArgumentException e) {
checkSpatialPartitioningTable(false, "Invalid JSON string for KDB tree: %s", e.getMessage());
}
}
} while (!pageSource.isFinished());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
if (splitBatch.isLastBatch()) {
break;
}
}
}
checkSpatialPartitioningTable(kdbTree.isPresent(), "Expected exactly one row for table %s, but got none", name);
return kdbTree.get();
}
use of io.trino.metadata.Metadata in project trino by trinodb.
the class InlineProjectIntoFilter method apply.
@Override
public Result apply(FilterNode node, Captures captures, Context context) {
ProjectNode projectNode = captures.get(PROJECTION);
List<Expression> filterConjuncts = extractConjuncts(node.getPredicate());
Map<Boolean, List<Expression>> conjuncts = filterConjuncts.stream().collect(partitioningBy(SymbolReference.class::isInstance));
List<Expression> simpleConjuncts = conjuncts.get(true);
List<Expression> complexConjuncts = conjuncts.get(false);
// Do not inline expression if the symbol is used multiple times in simple conjuncts.
Set<Expression> simpleUniqueConjuncts = simpleConjuncts.stream().collect(Collectors.groupingBy(identity(), counting())).entrySet().stream().filter(entry -> entry.getValue() == 1).map(Map.Entry::getKey).collect(toImmutableSet());
Set<Expression> complexConjunctSymbols = SymbolsExtractor.extractUnique(complexConjuncts).stream().map(Symbol::toSymbolReference).collect(toImmutableSet());
// Do not inline expression if the symbol is used in complex conjuncts.
Set<Expression> simpleConjunctsToInline = Sets.difference(simpleUniqueConjuncts, complexConjunctSymbols);
if (simpleConjunctsToInline.isEmpty()) {
return Result.empty();
}
ImmutableList.Builder<Expression> newConjuncts = ImmutableList.builder();
Assignments.Builder newAssignments = Assignments.builder();
Assignments.Builder postFilterAssignmentsBuilder = Assignments.builder();
for (Expression conjunct : filterConjuncts) {
if (simpleConjunctsToInline.contains(conjunct)) {
Expression expression = projectNode.getAssignments().get(Symbol.from(conjunct));
if (expression == null || expression instanceof SymbolReference) {
// expression == null -> The symbol is not produced by the underlying projection (i.e. it is a correlation symbol).
// expression instanceof SymbolReference -> Do not inline trivial projections.
newConjuncts.add(conjunct);
} else {
newConjuncts.add(expression);
newAssignments.putIdentities(SymbolsExtractor.extractUnique(expression));
postFilterAssignmentsBuilder.put(Symbol.from(conjunct), TRUE_LITERAL);
}
} else {
newConjuncts.add(conjunct);
}
}
Assignments postFilterAssignments = postFilterAssignmentsBuilder.build();
if (postFilterAssignments.isEmpty()) {
return Result.empty();
}
Set<Symbol> postFilterSymbols = postFilterAssignments.getSymbols();
// Remove inlined expressions from the underlying projection.
newAssignments.putAll(projectNode.getAssignments().filter(symbol -> !postFilterSymbols.contains(symbol)));
Map<Symbol, Expression> outputAssignments = new HashMap<>();
outputAssignments.putAll(Assignments.identity(node.getOutputSymbols()).getMap());
// Restore inlined symbols.
outputAssignments.putAll(postFilterAssignments.getMap());
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new FilterNode(node.getId(), new ProjectNode(projectNode.getId(), projectNode.getSource(), newAssignments.build()), combineConjuncts(metadata, newConjuncts.build())), Assignments.builder().putAll(outputAssignments).build()));
}
use of io.trino.metadata.Metadata in project trino by trinodb.
the class RevokeRolesTask method execute.
@Override
public ListenableFuture<Void> execute(RevokeRoles statement, QueryStateMachine stateMachine, List<Expression> parameters, WarningCollector warningCollector) {
Session session = stateMachine.getSession();
Set<String> roles = statement.getRoles().stream().map(role -> role.getValue().toLowerCase(Locale.ENGLISH)).collect(toImmutableSet());
Set<TrinoPrincipal> grantees = statement.getGrantees().stream().map(MetadataUtil::createPrincipal).collect(toImmutableSet());
boolean adminOption = statement.isAdminOption();
Optional<TrinoPrincipal> grantor = statement.getGrantor().map(specification -> createPrincipal(session, specification));
Optional<String> catalog = processRoleCommandCatalog(metadata, session, statement, statement.getCatalog().map(Identifier::getValue));
Set<String> specifiedRoles = new LinkedHashSet<>();
specifiedRoles.addAll(roles);
grantees.stream().filter(principal -> principal.getType() == ROLE).map(TrinoPrincipal::getName).forEach(specifiedRoles::add);
if (grantor.isPresent() && grantor.get().getType() == ROLE) {
specifiedRoles.add(grantor.get().getName());
}
for (String role : specifiedRoles) {
checkRoleExists(session, statement, metadata, role, catalog);
}
accessControl.checkCanRevokeRoles(session.toSecurityContext(), roles, grantees, adminOption, grantor, catalog);
metadata.revokeRoles(session, roles, grantees, adminOption, grantor, catalog);
return immediateVoidFuture();
}
Aggregations