use of io.trino.sql.planner.plan.ProjectNode in project trino by trinodb.
the class MergeLimitOverProjectWithSort method apply.
@Override
public Result apply(LimitNode parent, Captures captures, Context context) {
ProjectNode project = captures.get(PROJECT);
SortNode sort = captures.get(SORT);
return Result.ofPlanNode(project.replaceChildren(ImmutableList.of(new TopNNode(parent.getId(), sort.getSource(), parent.getCount(), sort.getOrderingScheme(), parent.isPartial() ? PARTIAL : SINGLE))));
}
use of io.trino.sql.planner.plan.ProjectNode in project trino by trinodb.
the class PushLimitThroughProject method apply.
@Override
public Result apply(LimitNode parent, Captures captures, Context context) {
ProjectNode projectNode = captures.get(CHILD);
// Do not push down if the projection is made up of symbol references and exclusive dereferences. This prevents
// undoing of PushDownDereferencesThroughLimit. We still push limit in the case of overlapping dereferences since
// it enables PushDownDereferencesThroughLimit rule to push optimal dereferences.
Set<Expression> projections = ImmutableSet.copyOf(projectNode.getAssignments().getExpressions());
if (!extractRowSubscripts(projections, false, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes()).isEmpty() && exclusiveDereferences(projections, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes())) {
return Result.empty();
}
// for a LimitNode without ties and pre-sorted inputs, simply reorder the nodes
if (!parent.isWithTies() && !parent.requiresPreSortedInputs()) {
return Result.ofPlanNode(transpose(parent, projectNode));
}
// for a LimitNode with ties, the tiesResolvingScheme must be rewritten in terms of symbols before projection
SymbolMapper.Builder symbolMapper = SymbolMapper.builder();
Set<Symbol> symbolsForRewrite = ImmutableSet.<Symbol>builder().addAll(parent.getPreSortedInputs()).addAll(parent.getTiesResolvingScheme().map(OrderingScheme::getOrderBy).orElse(ImmutableList.of())).build();
for (Symbol symbol : symbolsForRewrite) {
Expression expression = projectNode.getAssignments().get(symbol);
// if a symbol results from some computation, the translation fails
if (!(expression instanceof SymbolReference)) {
return Result.empty();
}
symbolMapper.put(symbol, Symbol.from(expression));
}
LimitNode mappedLimitNode = symbolMapper.build().map(parent, projectNode.getSource());
return Result.ofPlanNode(projectNode.replaceChildren(ImmutableList.of(mappedLimitNode)));
}
use of io.trino.sql.planner.plan.ProjectNode in project trino by trinodb.
the class PushDownDereferencesThroughWindow method apply.
@Override
public Result apply(ProjectNode projectNode, Captures captures, Context context) {
WindowNode windowNode = captures.get(CHILD);
// Extract dereferences for pushdown
Set<SubscriptExpression> dereferences = extractRowSubscripts(ImmutableList.<Expression>builder().addAll(projectNode.getAssignments().getExpressions()).addAll(windowNode.getWindowFunctions().values().stream().flatMap(function -> function.getArguments().stream()).collect(toImmutableList())).build(), false, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes());
WindowNode.Specification specification = windowNode.getSpecification();
dereferences = dereferences.stream().filter(expression -> {
Symbol symbol = getBase(expression);
// Exclude partitionBy, orderBy and synthesized symbols
return !specification.getPartitionBy().contains(symbol) && !specification.getOrderingScheme().map(OrderingScheme::getOrderBy).orElse(ImmutableList.of()).contains(symbol) && !windowNode.getCreatedSymbols().contains(symbol);
}).collect(toImmutableSet());
if (dereferences.isEmpty()) {
return Result.empty();
}
// Create new symbols for dereference expressions
Assignments dereferenceAssignments = Assignments.of(dereferences, context.getSession(), context.getSymbolAllocator(), typeAnalyzer);
// Rewrite project node assignments using new symbols for dereference expressions
Map<Expression, SymbolReference> mappings = HashBiMap.create(dereferenceAssignments.getMap()).inverse().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().toSymbolReference()));
Assignments newAssignments = projectNode.getAssignments().rewrite(expression -> replaceExpression(expression, mappings));
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new WindowNode(windowNode.getId(), new ProjectNode(context.getIdAllocator().getNextId(), windowNode.getSource(), Assignments.builder().putIdentities(windowNode.getSource().getOutputSymbols()).putAll(dereferenceAssignments).build()), windowNode.getSpecification(), // Replace dereference expressions in functions
windowNode.getWindowFunctions().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> {
WindowNode.Function oldFunction = entry.getValue();
return new WindowNode.Function(oldFunction.getResolvedFunction(), oldFunction.getArguments().stream().map(expression -> replaceExpression(expression, mappings)).collect(toImmutableList()), oldFunction.getFrame(), oldFunction.isIgnoreNulls());
})), windowNode.getHashSymbol(), windowNode.getPrePartitionedInputs(), windowNode.getPreSortedOrderPrefix()), newAssignments));
}
use of io.trino.sql.planner.plan.ProjectNode in project trino by trinodb.
the class PushDownProjectionsFromPatternRecognition method apply.
@Override
public Result apply(PatternRecognitionNode node, Captures captures, Context context) {
Assignments.Builder assignments = Assignments.builder();
Map<IrLabel, ExpressionAndValuePointers> rewrittenVariableDefinitions = rewriteVariableDefinitions(node.getVariableDefinitions(), assignments, context);
Map<Symbol, Measure> rewrittenMeasureDefinitions = rewriteMeasureDefinitions(node.getMeasures(), assignments, context);
if (assignments.build().isEmpty()) {
return Result.empty();
}
assignments.putIdentities(node.getSource().getOutputSymbols());
ProjectNode projectNode = new ProjectNode(context.getIdAllocator().getNextId(), node.getSource(), assignments.build());
PatternRecognitionNode patternRecognitionNode = new PatternRecognitionNode(node.getId(), projectNode, node.getSpecification(), node.getHashSymbol(), node.getPrePartitionedInputs(), node.getPreSortedOrderPrefix(), node.getWindowFunctions(), rewrittenMeasureDefinitions, node.getCommonBaseFrame(), node.getRowsPerMatch(), node.getSkipToLabel(), node.getSkipToPosition(), node.isInitial(), node.getPattern(), node.getSubsets(), rewrittenVariableDefinitions);
return Result.ofPlanNode(restrictOutputs(context.getIdAllocator(), patternRecognitionNode, ImmutableSet.copyOf(node.getOutputSymbols())).orElse(patternRecognitionNode));
}
use of io.trino.sql.planner.plan.ProjectNode in project trino by trinodb.
the class PushJoinIntoTableScan method apply.
@Override
public Result apply(JoinNode joinNode, Captures captures, Context context) {
if (joinNode.isCrossJoin()) {
return Result.empty();
}
TableScanNode left = captures.get(LEFT_TABLE_SCAN);
TableScanNode right = captures.get(RIGHT_TABLE_SCAN);
verify(!left.isUpdateTarget() && !right.isUpdateTarget(), "Unexpected Join over for-update table scan");
Expression effectiveFilter = getEffectiveFilter(joinNode);
FilterSplitResult filterSplitResult = splitFilter(effectiveFilter, left.getOutputSymbols(), right.getOutputSymbols(), context);
if (!filterSplitResult.getRemainingFilter().equals(BooleanLiteral.TRUE_LITERAL)) {
// TODO add extra filter node above join
return Result.empty();
}
if (left.getEnforcedConstraint().isNone() || right.getEnforcedConstraint().isNone()) {
// enforced constraint harder below.
return Result.empty();
}
Map<String, ColumnHandle> leftAssignments = left.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
Map<String, ColumnHandle> rightAssignments = right.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
/*
* We are (lazily) computing estimated statistics for join node and left and right table
* and passing those to connector via applyJoin.
*
* There are a couple reasons for this approach:
* - the engine knows how to estimate join and connector may not
* - the engine may have cached stats for the table scans (within context.getStatsProvider()), so can be able to provide information more inexpensively
* - in the future, the engine may be able to provide stats for table scan even in case when connector no longer can (see https://github.com/trinodb/trino/issues/6998)
* - the pushdown feasibility assessment logic may be different (or configured differently) for different connectors/catalogs.
*/
JoinStatistics joinStatistics = getJoinStatistics(joinNode, left, right, context);
Optional<JoinApplicationResult<TableHandle>> joinApplicationResult = metadata.applyJoin(context.getSession(), getJoinType(joinNode), left.getTable(), right.getTable(), filterSplitResult.getPushableConditions(), // TODO we could pass only subset of assignments here, those which are needed to resolve filterSplitResult.getPushableConditions
leftAssignments, rightAssignments, joinStatistics);
if (joinApplicationResult.isEmpty()) {
return Result.empty();
}
TableHandle handle = joinApplicationResult.get().getTableHandle();
Map<ColumnHandle, ColumnHandle> leftColumnHandlesMapping = joinApplicationResult.get().getLeftColumnHandles();
Map<ColumnHandle, ColumnHandle> rightColumnHandlesMapping = joinApplicationResult.get().getRightColumnHandles();
ImmutableMap.Builder<Symbol, ColumnHandle> assignmentsBuilder = ImmutableMap.builder();
assignmentsBuilder.putAll(left.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> leftColumnHandlesMapping.get(entry.getValue()))));
assignmentsBuilder.putAll(right.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> rightColumnHandlesMapping.get(entry.getValue()))));
Map<Symbol, ColumnHandle> assignments = assignmentsBuilder.buildOrThrow();
// convert enforced constraint
JoinNode.Type joinType = joinNode.getType();
TupleDomain<ColumnHandle> leftConstraint = deriveConstraint(left.getEnforcedConstraint(), leftColumnHandlesMapping, joinType == RIGHT || joinType == FULL);
TupleDomain<ColumnHandle> rightConstraint = deriveConstraint(right.getEnforcedConstraint(), rightColumnHandlesMapping, joinType == LEFT || joinType == FULL);
TupleDomain<ColumnHandle> newEnforcedConstraint = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().putAll(leftConstraint.getDomains().orElseThrow()).putAll(rightConstraint.getDomains().orElseThrow()).buildOrThrow());
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new TableScanNode(joinNode.getId(), handle, ImmutableList.copyOf(assignments.keySet()), assignments, newEnforcedConstraint, deriveTableStatisticsForPushdown(context.getStatsProvider(), context.getSession(), joinApplicationResult.get().isPrecalculateStatistics(), joinNode), false, Optional.empty()), Assignments.identity(joinNode.getOutputSymbols())));
}
Aggregations