use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class QueryPlanner method planGroupingSets.
private GroupingSetsPlan planGroupingSets(PlanBuilder subPlan, QuerySpecification node, GroupingSetAnalysis groupingSetAnalysis) {
Map<Symbol, Symbol> groupingSetMappings = new LinkedHashMap<>();
// Compute a set of artificial columns that will contain the values of the original columns
// filtered by whether the column is included in the grouping set
// This will become the basis for the scope for any column references
Symbol[] fields = new Symbol[subPlan.getTranslations().getFieldSymbols().size()];
for (FieldId field : groupingSetAnalysis.getAllFields()) {
Symbol input = subPlan.getTranslations().getFieldSymbols().get(field.getFieldIndex());
Symbol output = symbolAllocator.newSymbol(input, "gid");
fields[field.getFieldIndex()] = output;
groupingSetMappings.put(output, input);
}
Map<ScopeAware<Expression>, Symbol> complexExpressions = new HashMap<>();
for (Expression expression : groupingSetAnalysis.getComplexExpressions()) {
if (!complexExpressions.containsKey(scopeAwareKey(expression, analysis, subPlan.getScope()))) {
Symbol input = subPlan.translate(expression);
Symbol output = symbolAllocator.newSymbol(expression, analysis.getType(expression), "gid");
complexExpressions.put(scopeAwareKey(expression, analysis, subPlan.getScope()), output);
groupingSetMappings.put(output, input);
}
}
// For the purpose of "distinct", we need to canonicalize column references that may have varying
// syntactic forms (e.g., "t.a" vs "a"). Thus we need to enumerate grouping sets based on the underlying
// fieldId associated with each column reference expression.
// The catch is that simple group-by expressions can be arbitrary expressions (this is a departure from the SQL specification).
// But, they don't affect the number of grouping sets or the behavior of "distinct" . We can compute all the candidate
// grouping sets in terms of fieldId, dedup as appropriate and then cross-join them with the complex expressions.
// This tracks the grouping sets before complex expressions are considered.
// It's also used to compute the descriptors needed to implement grouping()
List<Set<FieldId>> columnOnlyGroupingSets = enumerateGroupingSets(groupingSetAnalysis);
if (node.getGroupBy().isPresent() && node.getGroupBy().get().isDistinct()) {
columnOnlyGroupingSets = columnOnlyGroupingSets.stream().distinct().collect(toImmutableList());
}
// translate from FieldIds to Symbols
List<List<Symbol>> sets = columnOnlyGroupingSets.stream().map(set -> set.stream().map(FieldId::getFieldIndex).map(index -> fields[index]).collect(toImmutableList())).collect(toImmutableList());
// combine (cartesian product) with complex expressions
List<List<Symbol>> groupingSets = sets.stream().map(set -> ImmutableList.<Symbol>builder().addAll(set).addAll(complexExpressions.values()).build()).collect(toImmutableList());
// Generate GroupIdNode (multiple grouping sets) or ProjectNode (single grouping set)
PlanNode groupId;
Optional<Symbol> groupIdSymbol = Optional.empty();
if (groupingSets.size() > 1) {
groupIdSymbol = Optional.of(symbolAllocator.newSymbol("groupId", BIGINT));
groupId = new GroupIdNode(idAllocator.getNextId(), subPlan.getRoot(), groupingSets, groupingSetMappings, subPlan.getRoot().getOutputSymbols(), groupIdSymbol.get());
} else {
Assignments.Builder assignments = Assignments.builder();
assignments.putIdentities(subPlan.getRoot().getOutputSymbols());
groupingSetMappings.forEach((key, value) -> assignments.put(key, value.toSymbolReference()));
groupId = new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments.build());
}
subPlan = new PlanBuilder(subPlan.getTranslations().withNewMappings(complexExpressions, Arrays.asList(fields)), groupId);
return new GroupingSetsPlan(subPlan, columnOnlyGroupingSets, groupingSets, groupIdSymbol);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class QueryPlanner method disambiguateOutputs.
public static NodeAndMappings disambiguateOutputs(NodeAndMappings plan, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator) {
Set<Symbol> distinctOutputs = ImmutableSet.copyOf(plan.getFields());
if (distinctOutputs.size() < plan.getFields().size()) {
Assignments.Builder assignments = Assignments.builder();
ImmutableList.Builder<Symbol> newOutputs = ImmutableList.builder();
Set<Symbol> uniqueOutputs = new HashSet<>();
for (Symbol output : plan.getFields()) {
if (uniqueOutputs.add(output)) {
assignments.putIdentity(output);
newOutputs.add(output);
} else {
Symbol newOutput = symbolAllocator.newSymbol(output);
assignments.put(newOutput, output.toSymbolReference());
newOutputs.add(newOutput);
}
}
return new NodeAndMappings(new ProjectNode(idAllocator.getNextId(), plan.getNode(), assignments.build()), newOutputs.build());
}
return plan;
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class QueryPlanner method coerce.
/**
* Creates a projection with any additional coercions by identity of the provided expressions.
*
* @return the new subplan and a mapping of each expression to the symbol representing the coercion or an existing symbol if a coercion wasn't needed
*/
public static PlanAndMappings coerce(PlanBuilder subPlan, List<Expression> expressions, Analysis analysis, PlanNodeIdAllocator idAllocator, SymbolAllocator symbolAllocator, TypeCoercion typeCoercion) {
Assignments.Builder assignments = Assignments.builder();
assignments.putIdentities(subPlan.getRoot().getOutputSymbols());
Map<NodeRef<Expression>, Symbol> mappings = new HashMap<>();
for (Expression expression : expressions) {
Type coercion = analysis.getCoercion(expression);
// expressions may be repeated, for example, when resolving ordinal references in a GROUP BY clause
if (!mappings.containsKey(NodeRef.of(expression))) {
if (coercion != null) {
Type type = analysis.getType(expression);
Symbol symbol = symbolAllocator.newSymbol(expression, coercion);
assignments.put(symbol, new Cast(subPlan.rewrite(expression), toSqlType(coercion), false, typeCoercion.isTypeOnlyCoercion(type, coercion)));
mappings.put(NodeRef.of(expression), symbol);
} else {
mappings.put(NodeRef.of(expression), subPlan.translate(expression));
}
}
}
subPlan = subPlan.withNewRoot(new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments.build()));
return new PlanAndMappings(subPlan, mappings);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class SubqueryPlanner method planValue.
private PlanAndMappings planValue(PlanBuilder subPlan, Expression value, Type actualType, Optional<Type> coercion) {
subPlan = subPlan.appendProjections(ImmutableList.of(value), symbolAllocator, idAllocator);
// Adapt implicit row type (in the SQL spec, <row value special case>) by wrapping it with a row constructor
Symbol column = subPlan.translate(value);
Type declaredType = analysis.getType(value);
if (!actualType.equals(declaredType)) {
Symbol wrapped = symbolAllocator.newSymbol("row", actualType);
Assignments assignments = Assignments.builder().putIdentities(subPlan.getRoot().getOutputSymbols()).put(wrapped, new Row(ImmutableList.of(column.toSymbolReference()))).build();
subPlan = subPlan.withNewRoot(new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments));
column = wrapped;
}
return coerceIfNecessary(subPlan, column, value, actualType, coercion);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class RemoveEmptyExceptBranches method apply.
@Override
public Result apply(ExceptNode node, Captures captures, Context context) {
if (isEmpty(node.getSources().get(0), context.getLookup())) {
return Result.ofPlanNode(new ValuesNode(node.getId(), node.getOutputSymbols(), ImmutableList.of()));
}
boolean hasEmptyBranches = node.getSources().stream().skip(// first source is the set we're excluding rows from, so ignore it
1).anyMatch(source -> isEmpty(source, context.getLookup()));
if (!hasEmptyBranches) {
return Result.empty();
}
ImmutableList.Builder<PlanNode> newSourcesBuilder = ImmutableList.builder();
ImmutableListMultimap.Builder<Symbol, Symbol> outputsToInputsBuilder = ImmutableListMultimap.builder();
for (int i = 0; i < node.getSources().size(); i++) {
PlanNode source = node.getSources().get(i);
if (i == 0 || !isEmpty(source, context.getLookup())) {
newSourcesBuilder.add(source);
for (Symbol column : node.getOutputSymbols()) {
outputsToInputsBuilder.put(column, node.getSymbolMapping().get(column).get(i));
}
}
}
List<PlanNode> newSources = newSourcesBuilder.build();
ListMultimap<Symbol, Symbol> outputsToInputs = outputsToInputsBuilder.build();
if (newSources.size() == 1) {
Assignments.Builder assignments = Assignments.builder();
outputsToInputs.entries().stream().forEach(entry -> assignments.put(entry.getKey(), entry.getValue().toSymbolReference()));
if (node.isDistinct()) {
return Result.ofPlanNode(new AggregationNode(node.getId(), new ProjectNode(context.getIdAllocator().getNextId(), newSources.get(0), assignments.build()), ImmutableMap.of(), singleGroupingSet(node.getOutputSymbols()), ImmutableList.of(), Step.SINGLE, Optional.empty(), Optional.empty()));
}
return Result.ofPlanNode(new ProjectNode(node.getId(), newSources.get(0), assignments.build()));
}
return Result.ofPlanNode(new ExceptNode(node.getId(), newSources, outputsToInputs, node.getOutputSymbols(), node.isDistinct()));
}
Aggregations