use of io.trino.sql.tree.Expression in project trino by trinodb.
the class ParameterUtils method parameterExtractor.
public static Map<NodeRef<Parameter>, Expression> parameterExtractor(Statement statement, List<Expression> parameters) {
List<Parameter> parametersList = getParameters(statement).stream().sorted(Comparator.comparing(parameter -> parameter.getLocation().get(), Comparator.comparing(NodeLocation::getLineNumber).thenComparing(NodeLocation::getColumnNumber))).collect(toImmutableList());
ImmutableMap.Builder<NodeRef<Parameter>, Expression> builder = ImmutableMap.builder();
Iterator<Expression> iterator = parameters.iterator();
for (Parameter parameter : parametersList) {
builder.put(NodeRef.of(parameter), iterator.next());
}
return builder.buildOrThrow();
}
use of io.trino.sql.tree.Expression in project trino by trinodb.
the class PushLimitThroughProject method apply.
@Override
public Result apply(LimitNode parent, Captures captures, Context context) {
ProjectNode projectNode = captures.get(CHILD);
// Do not push down if the projection is made up of symbol references and exclusive dereferences. This prevents
// undoing of PushDownDereferencesThroughLimit. We still push limit in the case of overlapping dereferences since
// it enables PushDownDereferencesThroughLimit rule to push optimal dereferences.
Set<Expression> projections = ImmutableSet.copyOf(projectNode.getAssignments().getExpressions());
if (!extractRowSubscripts(projections, false, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes()).isEmpty() && exclusiveDereferences(projections, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes())) {
return Result.empty();
}
// for a LimitNode without ties and pre-sorted inputs, simply reorder the nodes
if (!parent.isWithTies() && !parent.requiresPreSortedInputs()) {
return Result.ofPlanNode(transpose(parent, projectNode));
}
// for a LimitNode with ties, the tiesResolvingScheme must be rewritten in terms of symbols before projection
SymbolMapper.Builder symbolMapper = SymbolMapper.builder();
Set<Symbol> symbolsForRewrite = ImmutableSet.<Symbol>builder().addAll(parent.getPreSortedInputs()).addAll(parent.getTiesResolvingScheme().map(OrderingScheme::getOrderBy).orElse(ImmutableList.of())).build();
for (Symbol symbol : symbolsForRewrite) {
Expression expression = projectNode.getAssignments().get(symbol);
// if a symbol results from some computation, the translation fails
if (!(expression instanceof SymbolReference)) {
return Result.empty();
}
symbolMapper.put(symbol, Symbol.from(expression));
}
LimitNode mappedLimitNode = symbolMapper.build().map(parent, projectNode.getSource());
return Result.ofPlanNode(projectNode.replaceChildren(ImmutableList.of(mappedLimitNode)));
}
use of io.trino.sql.tree.Expression in project trino by trinodb.
the class PushPredicateIntoTableScan method pushFilterIntoTableScan.
public static Optional<PlanNode> pushFilterIntoTableScan(FilterNode filterNode, TableScanNode node, boolean pruneWithPredicateExpression, Session session, SymbolAllocator symbolAllocator, PlannerContext plannerContext, TypeAnalyzer typeAnalyzer, StatsProvider statsProvider, DomainTranslator domainTranslator) {
if (!isAllowPushdownIntoConnectors(session)) {
return Optional.empty();
}
SplitExpression splitExpression = splitExpression(plannerContext, filterNode.getPredicate());
DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.getExtractionResult(plannerContext, session, splitExpression.getDeterministicPredicate(), symbolAllocator.getTypes());
TupleDomain<ColumnHandle> newDomain = decomposedPredicate.getTupleDomain().transformKeys(node.getAssignments()::get).intersect(node.getEnforcedConstraint());
Map<NodeRef<Expression>, Type> remainingExpressionTypes = typeAnalyzer.getTypes(session, symbolAllocator.getTypes(), decomposedPredicate.getRemainingExpression());
Optional<ConnectorExpression> connectorExpression = new ConnectorExpressionTranslator.SqlToConnectorExpressionTranslator(session, remainingExpressionTypes, plannerContext).process(decomposedPredicate.getRemainingExpression());
Map<String, ColumnHandle> connectorExpressionAssignments = connectorExpression.map(ignored -> node.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue))).orElse(ImmutableMap.of());
Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
Constraint constraint;
// use evaluator only when there is some predicate which could not be translated into tuple domain
if (pruneWithPredicateExpression && !TRUE_LITERAL.equals(decomposedPredicate.getRemainingExpression())) {
LayoutConstraintEvaluator evaluator = new LayoutConstraintEvaluator(plannerContext, typeAnalyzer, session, symbolAllocator.getTypes(), node.getAssignments(), combineConjuncts(plannerContext.getMetadata(), splitExpression.getDeterministicPredicate(), // which would be expensive to evaluate in the call to isCandidate below.
domainTranslator.toPredicate(session, newDomain.simplify().transformKeys(assignments::get))));
constraint = new Constraint(newDomain, connectorExpression.orElse(TRUE), connectorExpressionAssignments, evaluator::isCandidate, evaluator.getArguments());
} else {
// Currently, invoking the expression interpreter is very expensive.
// TODO invoke the interpreter unconditionally when the interpreter becomes cheap enough.
constraint = new Constraint(newDomain, connectorExpression.orElse(TRUE), connectorExpressionAssignments);
}
// check if new domain is wider than domain already provided by table scan
if (constraint.predicate().isEmpty() && // TODO do we need to track enforced ConnectorExpression in TableScanNode?
TRUE.equals(connectorExpression.orElse(TRUE)) && newDomain.contains(node.getEnforcedConstraint())) {
Expression resultingPredicate = createResultingPredicate(plannerContext, session, symbolAllocator, typeAnalyzer, splitExpression.getDynamicFilter(), TRUE_LITERAL, splitExpression.getNonDeterministicPredicate(), decomposedPredicate.getRemainingExpression());
if (!TRUE_LITERAL.equals(resultingPredicate)) {
return Optional.of(new FilterNode(filterNode.getId(), node, resultingPredicate));
}
return Optional.of(node);
}
if (newDomain.isNone()) {
// to turn the subtree into a Values node
return Optional.of(new ValuesNode(node.getId(), node.getOutputSymbols(), ImmutableList.of()));
}
Optional<ConstraintApplicationResult<TableHandle>> result = plannerContext.getMetadata().applyFilter(session, node.getTable(), constraint);
if (result.isEmpty()) {
return Optional.empty();
}
TableHandle newTable = result.get().getHandle();
TableProperties newTableProperties = plannerContext.getMetadata().getTableProperties(session, newTable);
Optional<TablePartitioning> newTablePartitioning = newTableProperties.getTablePartitioning();
if (newTableProperties.getPredicate().isNone()) {
return Optional.of(new ValuesNode(node.getId(), node.getOutputSymbols(), ImmutableList.of()));
}
TupleDomain<ColumnHandle> remainingFilter = result.get().getRemainingFilter();
Optional<ConnectorExpression> remainingConnectorExpression = result.get().getRemainingExpression();
boolean precalculateStatistics = result.get().isPrecalculateStatistics();
verifyTablePartitioning(session, plannerContext.getMetadata(), node, newTablePartitioning);
TableScanNode tableScan = new TableScanNode(node.getId(), newTable, node.getOutputSymbols(), node.getAssignments(), computeEnforced(newDomain, remainingFilter), // TODO (https://github.com/trinodb/trino/issues/8144) distinguish between predicate pushed down and remaining
deriveTableStatisticsForPushdown(statsProvider, session, precalculateStatistics, filterNode), node.isUpdateTarget(), node.getUseConnectorNodePartitioning());
Expression remainingDecomposedPredicate;
if (remainingConnectorExpression.isEmpty() || remainingConnectorExpression.equals(connectorExpression)) {
remainingDecomposedPredicate = decomposedPredicate.getRemainingExpression();
} else {
Map<String, Symbol> variableMappings = assignments.values().stream().collect(toImmutableMap(Symbol::getName, Function.identity()));
Expression translatedExpression = ConnectorExpressionTranslator.translate(session, remainingConnectorExpression.get(), plannerContext, variableMappings, new LiteralEncoder(plannerContext));
if (connectorExpression.isEmpty()) {
remainingDecomposedPredicate = ExpressionUtils.combineConjuncts(plannerContext.getMetadata(), translatedExpression, decomposedPredicate.getRemainingExpression());
} else {
remainingDecomposedPredicate = translatedExpression;
}
}
Expression resultingPredicate = createResultingPredicate(plannerContext, session, symbolAllocator, typeAnalyzer, splitExpression.getDynamicFilter(), domainTranslator.toPredicate(session, remainingFilter.transformKeys(assignments::get)), splitExpression.getNonDeterministicPredicate(), remainingDecomposedPredicate);
if (!TRUE_LITERAL.equals(resultingPredicate)) {
return Optional.of(new FilterNode(filterNode.getId(), tableScan, resultingPredicate));
}
return Optional.of(tableScan);
}
use of io.trino.sql.tree.Expression in project trino by trinodb.
the class PushDownDereferencesThroughWindow method apply.
@Override
public Result apply(ProjectNode projectNode, Captures captures, Context context) {
WindowNode windowNode = captures.get(CHILD);
// Extract dereferences for pushdown
Set<SubscriptExpression> dereferences = extractRowSubscripts(ImmutableList.<Expression>builder().addAll(projectNode.getAssignments().getExpressions()).addAll(windowNode.getWindowFunctions().values().stream().flatMap(function -> function.getArguments().stream()).collect(toImmutableList())).build(), false, context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes());
WindowNode.Specification specification = windowNode.getSpecification();
dereferences = dereferences.stream().filter(expression -> {
Symbol symbol = getBase(expression);
// Exclude partitionBy, orderBy and synthesized symbols
return !specification.getPartitionBy().contains(symbol) && !specification.getOrderingScheme().map(OrderingScheme::getOrderBy).orElse(ImmutableList.of()).contains(symbol) && !windowNode.getCreatedSymbols().contains(symbol);
}).collect(toImmutableSet());
if (dereferences.isEmpty()) {
return Result.empty();
}
// Create new symbols for dereference expressions
Assignments dereferenceAssignments = Assignments.of(dereferences, context.getSession(), context.getSymbolAllocator(), typeAnalyzer);
// Rewrite project node assignments using new symbols for dereference expressions
Map<Expression, SymbolReference> mappings = HashBiMap.create(dereferenceAssignments.getMap()).inverse().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().toSymbolReference()));
Assignments newAssignments = projectNode.getAssignments().rewrite(expression -> replaceExpression(expression, mappings));
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new WindowNode(windowNode.getId(), new ProjectNode(context.getIdAllocator().getNextId(), windowNode.getSource(), Assignments.builder().putIdentities(windowNode.getSource().getOutputSymbols()).putAll(dereferenceAssignments).build()), windowNode.getSpecification(), // Replace dereference expressions in functions
windowNode.getWindowFunctions().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> {
WindowNode.Function oldFunction = entry.getValue();
return new WindowNode.Function(oldFunction.getResolvedFunction(), oldFunction.getArguments().stream().map(expression -> replaceExpression(expression, mappings)).collect(toImmutableList()), oldFunction.getFrame(), oldFunction.isIgnoreNulls());
})), windowNode.getHashSymbol(), windowNode.getPrePartitionedInputs(), windowNode.getPreSortedOrderPrefix()), newAssignments));
}
use of io.trino.sql.tree.Expression in project trino by trinodb.
the class PushJoinIntoTableScan method apply.
@Override
public Result apply(JoinNode joinNode, Captures captures, Context context) {
if (joinNode.isCrossJoin()) {
return Result.empty();
}
TableScanNode left = captures.get(LEFT_TABLE_SCAN);
TableScanNode right = captures.get(RIGHT_TABLE_SCAN);
verify(!left.isUpdateTarget() && !right.isUpdateTarget(), "Unexpected Join over for-update table scan");
Expression effectiveFilter = getEffectiveFilter(joinNode);
FilterSplitResult filterSplitResult = splitFilter(effectiveFilter, left.getOutputSymbols(), right.getOutputSymbols(), context);
if (!filterSplitResult.getRemainingFilter().equals(BooleanLiteral.TRUE_LITERAL)) {
// TODO add extra filter node above join
return Result.empty();
}
if (left.getEnforcedConstraint().isNone() || right.getEnforcedConstraint().isNone()) {
// enforced constraint harder below.
return Result.empty();
}
Map<String, ColumnHandle> leftAssignments = left.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
Map<String, ColumnHandle> rightAssignments = right.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue));
/*
* We are (lazily) computing estimated statistics for join node and left and right table
* and passing those to connector via applyJoin.
*
* There are a couple reasons for this approach:
* - the engine knows how to estimate join and connector may not
* - the engine may have cached stats for the table scans (within context.getStatsProvider()), so can be able to provide information more inexpensively
* - in the future, the engine may be able to provide stats for table scan even in case when connector no longer can (see https://github.com/trinodb/trino/issues/6998)
* - the pushdown feasibility assessment logic may be different (or configured differently) for different connectors/catalogs.
*/
JoinStatistics joinStatistics = getJoinStatistics(joinNode, left, right, context);
Optional<JoinApplicationResult<TableHandle>> joinApplicationResult = metadata.applyJoin(context.getSession(), getJoinType(joinNode), left.getTable(), right.getTable(), filterSplitResult.getPushableConditions(), // TODO we could pass only subset of assignments here, those which are needed to resolve filterSplitResult.getPushableConditions
leftAssignments, rightAssignments, joinStatistics);
if (joinApplicationResult.isEmpty()) {
return Result.empty();
}
TableHandle handle = joinApplicationResult.get().getTableHandle();
Map<ColumnHandle, ColumnHandle> leftColumnHandlesMapping = joinApplicationResult.get().getLeftColumnHandles();
Map<ColumnHandle, ColumnHandle> rightColumnHandlesMapping = joinApplicationResult.get().getRightColumnHandles();
ImmutableMap.Builder<Symbol, ColumnHandle> assignmentsBuilder = ImmutableMap.builder();
assignmentsBuilder.putAll(left.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> leftColumnHandlesMapping.get(entry.getValue()))));
assignmentsBuilder.putAll(right.getAssignments().entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> rightColumnHandlesMapping.get(entry.getValue()))));
Map<Symbol, ColumnHandle> assignments = assignmentsBuilder.buildOrThrow();
// convert enforced constraint
JoinNode.Type joinType = joinNode.getType();
TupleDomain<ColumnHandle> leftConstraint = deriveConstraint(left.getEnforcedConstraint(), leftColumnHandlesMapping, joinType == RIGHT || joinType == FULL);
TupleDomain<ColumnHandle> rightConstraint = deriveConstraint(right.getEnforcedConstraint(), rightColumnHandlesMapping, joinType == LEFT || joinType == FULL);
TupleDomain<ColumnHandle> newEnforcedConstraint = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().putAll(leftConstraint.getDomains().orElseThrow()).putAll(rightConstraint.getDomains().orElseThrow()).buildOrThrow());
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new TableScanNode(joinNode.getId(), handle, ImmutableList.copyOf(assignments.keySet()), assignments, newEnforcedConstraint, deriveTableStatisticsForPushdown(context.getStatsProvider(), context.getSession(), joinApplicationResult.get().isPrecalculateStatistics(), joinNode), false, Optional.empty()), Assignments.identity(joinNode.getOutputSymbols())));
}
Aggregations