use of io.trino.sql.planner.plan.JoinNode in project trino by trinodb.
the class ExtractSpatialJoins method tryCreateSpatialJoin.
private static Result tryCreateSpatialJoin(Context context, JoinNode joinNode, Expression filter, PlanNodeId nodeId, List<Symbol> outputSymbols, FunctionCall spatialFunction, Optional<Expression> radius, PlannerContext plannerContext, SplitManager splitManager, PageSourceManager pageSourceManager, TypeAnalyzer typeAnalyzer) {
// TODO Add support for distributed left spatial joins
Optional<String> spatialPartitioningTableName = joinNode.getType() == INNER ? getSpatialPartitioningTableName(context.getSession()) : Optional.empty();
Optional<KdbTree> kdbTree = spatialPartitioningTableName.map(tableName -> loadKdbTree(tableName, context.getSession(), plannerContext.getMetadata(), splitManager, pageSourceManager));
List<Expression> arguments = spatialFunction.getArguments();
verify(arguments.size() == 2);
Expression firstArgument = arguments.get(0);
Expression secondArgument = arguments.get(1);
Type sphericalGeographyType = plannerContext.getTypeManager().getType(SPHERICAL_GEOGRAPHY_TYPE_SIGNATURE);
if (typeAnalyzer.getType(context.getSession(), context.getSymbolAllocator().getTypes(), firstArgument).equals(sphericalGeographyType) || typeAnalyzer.getType(context.getSession(), context.getSymbolAllocator().getTypes(), secondArgument).equals(sphericalGeographyType)) {
return Result.empty();
}
Set<Symbol> firstSymbols = extractUnique(firstArgument);
Set<Symbol> secondSymbols = extractUnique(secondArgument);
if (firstSymbols.isEmpty() || secondSymbols.isEmpty()) {
return Result.empty();
}
Optional<Symbol> newFirstSymbol = newGeometrySymbol(context, firstArgument, plannerContext.getTypeManager());
Optional<Symbol> newSecondSymbol = newGeometrySymbol(context, secondArgument, plannerContext.getTypeManager());
PlanNode leftNode = joinNode.getLeft();
PlanNode rightNode = joinNode.getRight();
PlanNode newLeftNode;
PlanNode newRightNode;
// Check if the order of arguments of the spatial function matches the order of join sides
int alignment = checkAlignment(joinNode, firstSymbols, secondSymbols);
if (alignment > 0) {
newLeftNode = newFirstSymbol.map(symbol -> addProjection(context, leftNode, symbol, firstArgument)).orElse(leftNode);
newRightNode = newSecondSymbol.map(symbol -> addProjection(context, rightNode, symbol, secondArgument)).orElse(rightNode);
} else if (alignment < 0) {
newLeftNode = newSecondSymbol.map(symbol -> addProjection(context, leftNode, symbol, secondArgument)).orElse(leftNode);
newRightNode = newFirstSymbol.map(symbol -> addProjection(context, rightNode, symbol, firstArgument)).orElse(rightNode);
} else {
return Result.empty();
}
Expression newFirstArgument = toExpression(newFirstSymbol, firstArgument);
Expression newSecondArgument = toExpression(newSecondSymbol, secondArgument);
Optional<Symbol> leftPartitionSymbol = Optional.empty();
Optional<Symbol> rightPartitionSymbol = Optional.empty();
if (kdbTree.isPresent()) {
leftPartitionSymbol = Optional.of(context.getSymbolAllocator().newSymbol("pid", INTEGER));
rightPartitionSymbol = Optional.of(context.getSymbolAllocator().newSymbol("pid", INTEGER));
if (alignment > 0) {
newLeftNode = addPartitioningNodes(plannerContext, context, newLeftNode, leftPartitionSymbol.get(), kdbTree.get(), newFirstArgument, Optional.empty());
newRightNode = addPartitioningNodes(plannerContext, context, newRightNode, rightPartitionSymbol.get(), kdbTree.get(), newSecondArgument, radius);
} else {
newLeftNode = addPartitioningNodes(plannerContext, context, newLeftNode, leftPartitionSymbol.get(), kdbTree.get(), newSecondArgument, Optional.empty());
newRightNode = addPartitioningNodes(plannerContext, context, newRightNode, rightPartitionSymbol.get(), kdbTree.get(), newFirstArgument, radius);
}
}
Expression newSpatialFunction = FunctionCallBuilder.resolve(context.getSession(), plannerContext.getMetadata()).setName(spatialFunction.getName()).addArgument(GEOMETRY_TYPE_SIGNATURE, newFirstArgument).addArgument(GEOMETRY_TYPE_SIGNATURE, newSecondArgument).build();
Expression newFilter = replaceExpression(filter, ImmutableMap.of(spatialFunction, newSpatialFunction));
return Result.ofPlanNode(new SpatialJoinNode(nodeId, SpatialJoinNode.Type.fromJoinNodeType(joinNode.getType()), newLeftNode, newRightNode, outputSymbols, newFilter, leftPartitionSymbol, rightPartitionSymbol, kdbTree.map(KdbTreeUtils::toJson)));
}
use of io.trino.sql.planner.plan.JoinNode in project trino by trinodb.
the class PushLimitThroughOuterJoin method apply.
@Override
public Result apply(LimitNode parent, Captures captures, Context context) {
JoinNode joinNode = captures.get(CHILD);
PlanNode left = joinNode.getLeft();
PlanNode right = joinNode.getRight();
if (joinNode.getType() == LEFT && !isAtMost(left, context.getLookup(), parent.getCount())) {
if (!ImmutableSet.copyOf(left.getOutputSymbols()).containsAll(parent.getPreSortedInputs())) {
return Result.empty();
}
return Result.ofPlanNode(parent.replaceChildren(ImmutableList.of(joinNode.replaceChildren(ImmutableList.of(new LimitNode(context.getIdAllocator().getNextId(), left, parent.getCount(), Optional.empty(), true, parent.getPreSortedInputs()), right)))));
}
if (joinNode.getType() == RIGHT && !isAtMost(right, context.getLookup(), parent.getCount())) {
if (!ImmutableSet.copyOf(right.getOutputSymbols()).containsAll(parent.getPreSortedInputs())) {
return Result.empty();
}
return Result.ofPlanNode(parent.replaceChildren(ImmutableList.of(joinNode.replaceChildren(ImmutableList.of(left, new LimitNode(context.getIdAllocator().getNextId(), right, parent.getCount(), Optional.empty(), true, parent.getPreSortedInputs()))))));
}
return Result.empty();
}
use of io.trino.sql.planner.plan.JoinNode in project trino by trinodb.
the class PushJoinIntoTableScan method apply.
@Override
public Result apply(JoinNode joinNode, Captures captures, Context context) {
if (joinNode.isCrossJoin()) {
return Result.empty();
}
TableScanNode left = captures.get(LEFT_TABLE_SCAN);
TableScanNode right = captures.get(RIGHT_TABLE_SCAN);
verify(!left.isUpdateTarget() && !right.isUpdateTarget(), "Unexpected Join over for-update table scan");
Expression effectiveFilter = getEffectiveFilter(joinNode);
FilterSplitResult filterSplitResult = splitFilter(effectiveFilter, left.getOutputSymbols(), right.getOutputSymbols(), context);
if (!filterSplitResult.getRemainingFilter().equals(BooleanLiteral.TRUE_LITERAL)) {
// TODO add extra filter node above join
return Result.empty();
}
if (left.getEnforcedConstraint().isNone() || right.getEnforcedConstraint().isNone()) {
// enforced constraint harder below.
return Result.empty();
}
Map<String, ColumnHandle> leftAssignments = left.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
Map<String, ColumnHandle> rightAssignments = right.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
/*
* We are (lazily) computing estimated statistics for join node and left and right table
* and passing those to connector via applyJoin.
*
* There are a couple reasons for this approach:
* - the engine knows how to estimate join and connector may not
* - the engine may have cached stats for the table scans (within context.getStatsProvider()), so can be able to provide information more inexpensively
* - in the future, the engine may be able to provide stats for table scan even in case when connector no longer can (see https://github.com/trinodb/trino/issues/6998)
* - the pushdown feasibility assessment logic may be different (or configured differently) for different connectors/catalogs.
*/
JoinStatistics joinStatistics = getJoinStatistics(joinNode, left, right, context);
Optional<JoinApplicationResult<TableHandle>> joinApplicationResult = metadata.applyJoin(context.getSession(), getJoinType(joinNode), left.getTable(), right.getTable(), filterSplitResult.getPushableConditions(), // TODO we could pass only subset of assignments here, those which are needed to resolve filterSplitResult.getPushableConditions
leftAssignments, rightAssignments, joinStatistics);
if (joinApplicationResult.isEmpty()) {
return Result.empty();
}
TableHandle handle = joinApplicationResult.get().getTableHandle();
Map<ColumnHandle, ColumnHandle> leftColumnHandlesMapping = joinApplicationResult.get().getLeftColumnHandles();
Map<ColumnHandle, ColumnHandle> rightColumnHandlesMapping = joinApplicationResult.get().getRightColumnHandles();
ImmutableMap.Builder<Symbol, ColumnHandle> assignmentsBuilder = ImmutableMap.builder();
assignmentsBuilder.putAll(left.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> leftColumnHandlesMapping.get(entry.getValue()))));
assignmentsBuilder.putAll(right.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> rightColumnHandlesMapping.get(entry.getValue()))));
Map<Symbol, ColumnHandle> assignments = assignmentsBuilder.buildOrThrow();
// convert enforced constraint
JoinNode.Type joinType = joinNode.getType();
TupleDomain<ColumnHandle> leftConstraint = deriveConstraint(left.getEnforcedConstraint(), leftColumnHandlesMapping, joinType == RIGHT || joinType == FULL);
TupleDomain<ColumnHandle> rightConstraint = deriveConstraint(right.getEnforcedConstraint(), rightColumnHandlesMapping, joinType == LEFT || joinType == FULL);
TupleDomain<ColumnHandle> newEnforcedConstraint = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().putAll(leftConstraint.getDomains().orElseThrow()).putAll(rightConstraint.getDomains().orElseThrow()).buildOrThrow());
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new TableScanNode(joinNode.getId(), handle, ImmutableList.copyOf(assignments.keySet()), assignments, newEnforcedConstraint, deriveTableStatisticsForPushdown(context.getStatsProvider(), context.getSession(), joinApplicationResult.get().isPrecalculateStatistics(), joinNode), false, Optional.empty()), Assignments.identity(joinNode.getOutputSymbols())));
}
use of io.trino.sql.planner.plan.JoinNode in project trino by trinodb.
the class TransformCorrelatedDistinctAggregationWithProjection method apply.
@Override
public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) {
// decorrelate nested plan
PlanNodeDecorrelator decorrelator = new PlanNodeDecorrelator(plannerContext, context.getSymbolAllocator(), context.getLookup());
Optional<PlanNodeDecorrelator.DecorrelatedNode> decorrelatedSource = decorrelator.decorrelateFilters(captures.get(AGGREGATION).getSource(), correlatedJoinNode.getCorrelation());
if (decorrelatedSource.isEmpty()) {
return Result.empty();
}
PlanNode source = decorrelatedSource.get().getNode();
// assign unique id on correlated join's input. It will be used to distinguish between original input rows after join
PlanNode inputWithUniqueId = new AssignUniqueId(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), context.getSymbolAllocator().newSymbol("unique", BIGINT));
JoinNode join = new JoinNode(context.getIdAllocator().getNextId(), JoinNode.Type.LEFT, inputWithUniqueId, source, ImmutableList.of(), inputWithUniqueId.getOutputSymbols(), source.getOutputSymbols(), false, decorrelatedSource.get().getCorrelatedPredicates(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of(), Optional.empty());
// restore aggregation
AggregationNode aggregation = captures.get(AGGREGATION);
aggregation = new AggregationNode(aggregation.getId(), join, aggregation.getAggregations(), singleGroupingSet(ImmutableList.<Symbol>builder().addAll(join.getLeftOutputSymbols()).addAll(aggregation.getGroupingKeys()).build()), ImmutableList.of(), aggregation.getStep(), Optional.empty(), Optional.empty());
// restrict outputs and apply projection
Set<Symbol> outputSymbols = new HashSet<>(correlatedJoinNode.getOutputSymbols());
List<Symbol> expectedAggregationOutputs = aggregation.getOutputSymbols().stream().filter(outputSymbols::contains).collect(toImmutableList());
Assignments assignments = Assignments.builder().putIdentities(expectedAggregationOutputs).putAll(captures.get(PROJECTION).getAssignments()).build();
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), aggregation, assignments));
}
use of io.trino.sql.planner.plan.JoinNode in project trino by trinodb.
the class TransformCorrelatedDistinctAggregationWithoutProjection method apply.
@Override
public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) {
// decorrelate nested plan
PlanNodeDecorrelator decorrelator = new PlanNodeDecorrelator(plannerContext, context.getSymbolAllocator(), context.getLookup());
Optional<PlanNodeDecorrelator.DecorrelatedNode> decorrelatedSource = decorrelator.decorrelateFilters(captures.get(AGGREGATION).getSource(), correlatedJoinNode.getCorrelation());
if (decorrelatedSource.isEmpty()) {
return Result.empty();
}
PlanNode source = decorrelatedSource.get().getNode();
// assign unique id on correlated join's input. It will be used to distinguish between original input rows after join
PlanNode inputWithUniqueId = new AssignUniqueId(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), context.getSymbolAllocator().newSymbol("unique", BIGINT));
JoinNode join = new JoinNode(context.getIdAllocator().getNextId(), JoinNode.Type.LEFT, inputWithUniqueId, source, ImmutableList.of(), inputWithUniqueId.getOutputSymbols(), source.getOutputSymbols(), false, decorrelatedSource.get().getCorrelatedPredicates(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of(), Optional.empty());
// restore aggregation
AggregationNode aggregation = captures.get(AGGREGATION);
aggregation = new AggregationNode(aggregation.getId(), join, aggregation.getAggregations(), singleGroupingSet(ImmutableList.<Symbol>builder().addAll(join.getLeftOutputSymbols()).addAll(aggregation.getGroupingKeys()).build()), ImmutableList.of(), aggregation.getStep(), Optional.empty(), Optional.empty());
// restrict outputs
Optional<PlanNode> project = restrictOutputs(context.getIdAllocator(), aggregation, ImmutableSet.copyOf(correlatedJoinNode.getOutputSymbols()));
return Result.ofPlanNode(project.orElse(aggregation));
}
Aggregations