Search in sources :

Example 1 with Analysis

use of io.trino.sql.analyzer.Analysis in project trino by trinodb.

the class SqlQueryExecution method analyze.

private static Analysis analyze(PreparedQuery preparedQuery, QueryStateMachine stateMachine, WarningCollector warningCollector, AnalyzerFactory analyzerFactory) {
    stateMachine.beginAnalysis();
    requireNonNull(preparedQuery, "preparedQuery is null");
    Analyzer analyzer = analyzerFactory.createAnalyzer(stateMachine.getSession(), preparedQuery.getParameters(), parameterExtractor(preparedQuery.getStatement(), preparedQuery.getParameters()), warningCollector);
    Analysis analysis;
    try {
        analysis = analyzer.analyze(preparedQuery.getStatement());
    } catch (StackOverflowError e) {
        throw new TrinoException(STACK_OVERFLOW, "statement is too large (stack overflow during analysis)", e);
    }
    stateMachine.setUpdateType(analysis.getUpdateType());
    stateMachine.setReferencedTables(analysis.getReferencedTables());
    stateMachine.setRoutines(analysis.getRoutines());
    stateMachine.endAnalysis();
    return analysis;
}
Also used : Analysis(io.trino.sql.analyzer.Analysis) TrinoException(io.trino.spi.TrinoException) Analyzer(io.trino.sql.analyzer.Analyzer) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer)

Example 2 with Analysis

use of io.trino.sql.analyzer.Analysis in project trino by trinodb.

the class LogicalPlanner method getInsertPlan.

private RelationPlan getInsertPlan(Analysis analysis, Table table, Query query, TableHandle tableHandle, List<ColumnHandle> insertColumns, Optional<TableLayout> newTableLayout, Optional<WriterTarget> materializedViewRefreshWriterTarget) {
    TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle);
    Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap = buildLambdaDeclarationToSymbolMap(analysis, symbolAllocator);
    RelationPlanner planner = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, Optional.empty(), session, ImmutableMap.of());
    RelationPlan plan = planner.process(query, null);
    ImmutableList.Builder<Symbol> builder = ImmutableList.builder();
    for (int i = 0; i < plan.getFieldMappings().size(); i++) {
        if (!plan.getDescriptor().getFieldByIndex(i).isHidden()) {
            builder.add(plan.getFieldMappings().get(i));
        }
    }
    List<Symbol> visibleFieldMappings = builder.build();
    Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, tableHandle);
    Assignments.Builder assignments = Assignments.builder();
    boolean supportsMissingColumnsOnInsert = metadata.supportsMissingColumnsOnInsert(session, tableHandle);
    ImmutableList.Builder<ColumnMetadata> insertedColumnsBuilder = ImmutableList.builder();
    for (ColumnMetadata column : tableMetadata.getColumns()) {
        if (column.isHidden()) {
            continue;
        }
        Symbol output = symbolAllocator.newSymbol(column.getName(), column.getType());
        int index = insertColumns.indexOf(columns.get(column.getName()));
        if (index < 0) {
            if (supportsMissingColumnsOnInsert) {
                continue;
            }
            Expression cast = new Cast(new NullLiteral(), toSqlType(column.getType()));
            assignments.put(output, cast);
            insertedColumnsBuilder.add(column);
        } else {
            Symbol input = visibleFieldMappings.get(index);
            Type tableType = column.getType();
            Type queryType = symbolAllocator.getTypes().get(input);
            if (queryType.equals(tableType) || typeCoercion.isTypeOnlyCoercion(queryType, tableType)) {
                assignments.put(output, input.toSymbolReference());
            } else {
                Expression cast = noTruncationCast(input.toSymbolReference(), queryType, tableType);
                assignments.put(output, cast);
            }
            insertedColumnsBuilder.add(column);
        }
    }
    ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build());
    List<ColumnMetadata> insertedColumns = insertedColumnsBuilder.build();
    List<Field> fields = insertedColumns.stream().map(column -> Field.newUnqualified(column.getName(), column.getType())).collect(toImmutableList());
    Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields)).build();
    plan = new RelationPlan(projectNode, scope, projectNode.getOutputSymbols(), Optional.empty());
    plan = planner.addRowFilters(table, plan, failIfPredicateIsNotMeet(metadata, session, PERMISSION_DENIED, AccessDeniedException.PREFIX + "Cannot insert row that does not match to a row filter"), node -> {
        Scope accessControlScope = analysis.getAccessControlScope(table);
        // hidden fields are not accessible in insert
        return Scope.builder().like(accessControlScope).withRelationType(accessControlScope.getRelationId(), accessControlScope.getRelationType().withOnlyVisibleFields()).build();
    });
    List<String> insertedTableColumnNames = insertedColumns.stream().map(ColumnMetadata::getName).collect(toImmutableList());
    String catalogName = tableHandle.getCatalogName().getCatalogName();
    TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, catalogName, tableMetadata.getMetadata());
    if (materializedViewRefreshWriterTarget.isPresent()) {
        return createTableWriterPlan(analysis, plan.getRoot(), plan.getFieldMappings(), materializedViewRefreshWriterTarget.get(), insertedTableColumnNames, insertedColumns, newTableLayout, statisticsMetadata);
    }
    InsertReference insertTarget = new InsertReference(tableHandle, insertedTableColumnNames.stream().map(columns::get).collect(toImmutableList()));
    return createTableWriterPlan(analysis, plan.getRoot(), plan.getFieldMappings(), insertTarget, insertedTableColumnNames, insertedColumns, newTableLayout, statisticsMetadata);
}
Also used : Cast(io.trino.sql.tree.Cast) FIXED_HASH_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION) CostProvider(io.trino.cost.CostProvider) TypeSignatureProvider.fromTypes(io.trino.sql.analyzer.TypeSignatureProvider.fromTypes) Delete(io.trino.sql.tree.Delete) InsertReference(io.trino.sql.planner.plan.TableWriterNode.InsertReference) PlanNode(io.trino.sql.planner.plan.PlanNode) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) LongLiteral(io.trino.sql.tree.LongLiteral) Map(java.util.Map) OutputNode(io.trino.sql.planner.plan.OutputNode) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ExplainAnalyzeNode(io.trino.sql.planner.plan.ExplainAnalyzeNode) ExplainAnalyze(io.trino.sql.tree.ExplainAnalyze) Statement(io.trino.sql.tree.Statement) SystemSessionProperties.isCollectPlanStatisticsForAllQueries(io.trino.SystemSessionProperties.isCollectPlanStatisticsForAllQueries) StatisticsWriterNode(io.trino.sql.planner.plan.StatisticsWriterNode) TableExecute(io.trino.sql.tree.TableExecute) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) PlanSanityChecker(io.trino.sql.planner.sanity.PlanSanityChecker) RelationType(io.trino.sql.analyzer.RelationType) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) CachingStatsProvider(io.trino.cost.CachingStatsProvider) ValuesNode(io.trino.sql.planner.plan.ValuesNode) Session(io.trino.Session) AccessDeniedException(io.trino.spi.security.AccessDeniedException) TableExecuteNode(io.trino.sql.planner.plan.TableExecuteNode) PlanPrinter(io.trino.sql.planner.planprinter.PlanPrinter) QueryPlanner.visibleFields(io.trino.sql.planner.QueryPlanner.visibleFields) CreateReference(io.trino.sql.planner.plan.TableWriterNode.CreateReference) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) LimitNode(io.trino.sql.planner.plan.LimitNode) TableLayout(io.trino.metadata.TableLayout) TypeCoercion(io.trino.type.TypeCoercion) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) StatsAndCosts(io.trino.cost.StatsAndCosts) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) MetadataUtil.createQualifiedObjectName(io.trino.metadata.MetadataUtil.createQualifiedObjectName) PlanOptimizer(io.trino.sql.planner.optimizations.PlanOptimizer) NodeRef(io.trino.sql.tree.NodeRef) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Query(io.trino.sql.tree.Query) StringLiteral(io.trino.sql.tree.StringLiteral) TableStatisticAggregation(io.trino.sql.planner.StatisticsAggregationPlanner.TableStatisticAggregation) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) TableHandle(io.trino.metadata.TableHandle) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) CharType(io.trino.spi.type.CharType) Table(io.trino.sql.tree.Table) TableFinishNode(io.trino.sql.planner.plan.TableFinishNode) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) UnknownType(io.trino.type.UnknownType) RefreshMaterializedView(io.trino.sql.tree.RefreshMaterializedView) TableWriterNode(io.trino.sql.planner.plan.TableWriterNode) OPTIMIZED_AND_VALIDATED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED) Scope(io.trino.sql.analyzer.Scope) FilterNode(io.trino.sql.planner.plan.FilterNode) StatsCalculator(io.trino.cost.StatsCalculator) LambdaArgumentDeclaration(io.trino.sql.tree.LambdaArgumentDeclaration) RelationId(io.trino.sql.analyzer.RelationId) DeleteNode(io.trino.sql.planner.plan.DeleteNode) Update(io.trino.sql.tree.Update) INTEGER(io.trino.spi.type.IntegerType.INTEGER) FunctionCall(io.trino.sql.tree.FunctionCall) ImmutableMap(com.google.common.collect.ImmutableMap) ResolvedFunction(io.trino.metadata.ResolvedFunction) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) TrinoException(io.trino.spi.TrinoException) ROW_COUNT(io.trino.spi.statistics.TableStatisticType.ROW_COUNT) TableExecuteHandle(io.trino.metadata.TableExecuteHandle) StatsProvider(io.trino.cost.StatsProvider) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) CoalesceExpression(io.trino.sql.tree.CoalesceExpression) GenericLiteral(io.trino.sql.tree.GenericLiteral) Objects(java.util.Objects) TableMetadata(io.trino.metadata.TableMetadata) List(java.util.List) IfExpression(io.trino.sql.tree.IfExpression) BIGINT(io.trino.spi.type.BigintType.BIGINT) StandardErrorCode(io.trino.spi.StandardErrorCode) Analyze(io.trino.sql.tree.Analyze) Entry(java.util.Map.Entry) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) PlannerContext(io.trino.sql.PlannerContext) Analysis(io.trino.sql.analyzer.Analysis) PERMISSION_DENIED(io.trino.spi.StandardErrorCode.PERMISSION_DENIED) Logger(io.airlift.log.Logger) Type(io.trino.spi.type.Type) CreateTableAsSelect(io.trino.sql.tree.CreateTableAsSelect) HashMap(java.util.HashMap) StatisticAggregations(io.trino.sql.planner.plan.StatisticAggregations) SimpleImmutableEntry(java.util.AbstractMap.SimpleImmutableEntry) OPTIMIZED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED) Function(java.util.function.Function) Cast(io.trino.sql.tree.Cast) VarcharType(io.trino.spi.type.VarcharType) CachingCostProvider(io.trino.cost.CachingCostProvider) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) RefreshMaterializedViewNode(io.trino.sql.planner.plan.RefreshMaterializedViewNode) NullLiteral(io.trino.sql.tree.NullLiteral) Field(io.trino.sql.analyzer.Field) GREATER_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL) ProjectNode(io.trino.sql.planner.plan.ProjectNode) DISTRIBUTED_PLAN_SANITY_CHECKER(io.trino.sql.planner.sanity.PlanSanityChecker.DISTRIBUTED_PLAN_SANITY_CHECKER) WriterTarget(io.trino.sql.planner.plan.TableWriterNode.WriterTarget) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) UpdateNode(io.trino.sql.planner.plan.UpdateNode) QualifiedName(io.trino.sql.tree.QualifiedName) CostCalculator(io.trino.cost.CostCalculator) WarningCollector(io.trino.execution.warnings.WarningCollector) Row(io.trino.sql.tree.Row) Metadata(io.trino.metadata.Metadata) Insert(io.trino.sql.tree.Insert) Streams.zip(com.google.common.collect.Streams.zip) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) InsertReference(io.trino.sql.planner.plan.TableWriterNode.InsertReference) NodeRef(io.trino.sql.tree.NodeRef) Field(io.trino.sql.analyzer.Field) RelationType(io.trino.sql.analyzer.RelationType) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) TableMetadata(io.trino.metadata.TableMetadata) ColumnHandle(io.trino.spi.connector.ColumnHandle) RelationType(io.trino.sql.analyzer.RelationType) CharType(io.trino.spi.type.CharType) UnknownType(io.trino.type.UnknownType) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) Scope(io.trino.sql.analyzer.Scope) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) CoalesceExpression(io.trino.sql.tree.CoalesceExpression) IfExpression(io.trino.sql.tree.IfExpression) Expression(io.trino.sql.tree.Expression) ProjectNode(io.trino.sql.planner.plan.ProjectNode) NullLiteral(io.trino.sql.tree.NullLiteral)

Example 3 with Analysis

use of io.trino.sql.analyzer.Analysis in project trino by trinodb.

the class LogicalPlanner method createTableCreationPlan.

private RelationPlan createTableCreationPlan(Analysis analysis, Query query) {
    Analysis.Create create = analysis.getCreate().orElseThrow();
    QualifiedObjectName destination = create.getDestination().orElseThrow();
    RelationPlan plan = createRelationPlan(analysis, query);
    if (!create.isCreateTableAsSelectWithData()) {
        PlanNode root = new LimitNode(idAllocator.getNextId(), plan.getRoot(), 0L, false);
        plan = new RelationPlan(root, plan.getScope(), plan.getFieldMappings(), Optional.empty());
    }
    ConnectorTableMetadata tableMetadata = create.getMetadata().orElseThrow();
    Optional<TableLayout> newTableLayout = create.getLayout();
    List<String> columnNames = tableMetadata.getColumns().stream().filter(// todo this filter is redundant
    column -> !column.isHidden()).map(ColumnMetadata::getName).collect(toImmutableList());
    TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, destination.getCatalogName(), tableMetadata);
    return createTableWriterPlan(analysis, plan.getRoot(), visibleFields(plan), new CreateReference(destination.getCatalogName(), tableMetadata, newTableLayout), columnNames, tableMetadata.getColumns(), newTableLayout, statisticsMetadata);
}
Also used : TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) MetadataUtil.createQualifiedObjectName(io.trino.metadata.MetadataUtil.createQualifiedObjectName) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) PlanNode(io.trino.sql.planner.plan.PlanNode) LimitNode(io.trino.sql.planner.plan.LimitNode) Analysis(io.trino.sql.analyzer.Analysis) CreateReference(io.trino.sql.planner.plan.TableWriterNode.CreateReference) TableLayout(io.trino.metadata.TableLayout) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata)

Example 4 with Analysis

use of io.trino.sql.analyzer.Analysis in project trino by trinodb.

the class LogicalPlanner method createTableWriterPlan.

private RelationPlan createTableWriterPlan(Analysis analysis, PlanNode source, List<Symbol> symbols, WriterTarget target, List<String> columnNames, List<ColumnMetadata> columnMetadataList, Optional<TableLayout> writeTableLayout, TableStatisticsMetadata statisticsMetadata) {
    Optional<PartitioningScheme> partitioningScheme = Optional.empty();
    Optional<PartitioningScheme> preferredPartitioningScheme = Optional.empty();
    if (writeTableLayout.isPresent()) {
        List<Symbol> partitionFunctionArguments = new ArrayList<>();
        writeTableLayout.get().getPartitionColumns().stream().mapToInt(columnNames::indexOf).mapToObj(symbols::get).forEach(partitionFunctionArguments::add);
        List<Symbol> outputLayout = new ArrayList<>(symbols);
        Optional<PartitioningHandle> partitioningHandle = writeTableLayout.get().getPartitioning();
        if (partitioningHandle.isPresent()) {
            partitioningScheme = Optional.of(new PartitioningScheme(Partitioning.create(partitioningHandle.get(), partitionFunctionArguments), outputLayout));
        } else {
            // empty connector partitioning handle means evenly partitioning on partitioning columns
            preferredPartitioningScheme = Optional.of(new PartitioningScheme(Partitioning.create(FIXED_HASH_DISTRIBUTION, partitionFunctionArguments), outputLayout));
        }
    }
    verify(columnNames.size() == symbols.size(), "columnNames.size() != symbols.size(): %s and %s", columnNames, symbols);
    Map<String, Symbol> columnToSymbolMap = zip(columnNames.stream(), symbols.stream(), SimpleImmutableEntry::new).collect(toImmutableMap(Entry::getKey, Entry::getValue));
    Set<Symbol> notNullColumnSymbols = columnMetadataList.stream().filter(column -> !column.isNullable()).map(ColumnMetadata::getName).map(columnToSymbolMap::get).collect(toImmutableSet());
    if (!statisticsMetadata.isEmpty()) {
        TableStatisticAggregation result = statisticsAggregationPlanner.createStatisticsAggregation(statisticsMetadata, columnToSymbolMap);
        StatisticAggregations.Parts aggregations = result.getAggregations().createPartialAggregations(symbolAllocator, plannerContext);
        // partial aggregation is run within the TableWriteOperator to calculate the statistics for
        // the data consumed by the TableWriteOperator
        // final aggregation is run within the TableFinishOperator to summarize collected statistics
        // by the partial aggregation from all of the writer nodes
        StatisticAggregations partialAggregation = aggregations.getPartialAggregation();
        TableFinishNode commitNode = new TableFinishNode(idAllocator.getNextId(), new TableWriterNode(idAllocator.getNextId(), source, target, symbolAllocator.newSymbol("partialrows", BIGINT), symbolAllocator.newSymbol("fragment", VARBINARY), symbols, columnNames, notNullColumnSymbols, partitioningScheme, preferredPartitioningScheme, Optional.of(partialAggregation), Optional.of(result.getDescriptor().map(aggregations.getMappings()::get))), target, symbolAllocator.newSymbol("rows", BIGINT), Optional.of(aggregations.getFinalAggregation()), Optional.of(result.getDescriptor()));
        return new RelationPlan(commitNode, analysis.getRootScope(), commitNode.getOutputSymbols(), Optional.empty());
    }
    TableFinishNode commitNode = new TableFinishNode(idAllocator.getNextId(), new TableWriterNode(idAllocator.getNextId(), source, target, symbolAllocator.newSymbol("partialrows", BIGINT), symbolAllocator.newSymbol("fragment", VARBINARY), symbols, columnNames, notNullColumnSymbols, partitioningScheme, preferredPartitioningScheme, Optional.empty(), Optional.empty()), target, symbolAllocator.newSymbol("rows", BIGINT), Optional.empty(), Optional.empty());
    return new RelationPlan(commitNode, analysis.getRootScope(), commitNode.getOutputSymbols(), Optional.empty());
}
Also used : FIXED_HASH_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION) CostProvider(io.trino.cost.CostProvider) TypeSignatureProvider.fromTypes(io.trino.sql.analyzer.TypeSignatureProvider.fromTypes) Delete(io.trino.sql.tree.Delete) InsertReference(io.trino.sql.planner.plan.TableWriterNode.InsertReference) PlanNode(io.trino.sql.planner.plan.PlanNode) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) LongLiteral(io.trino.sql.tree.LongLiteral) Map(java.util.Map) OutputNode(io.trino.sql.planner.plan.OutputNode) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ExplainAnalyzeNode(io.trino.sql.planner.plan.ExplainAnalyzeNode) ExplainAnalyze(io.trino.sql.tree.ExplainAnalyze) Statement(io.trino.sql.tree.Statement) SystemSessionProperties.isCollectPlanStatisticsForAllQueries(io.trino.SystemSessionProperties.isCollectPlanStatisticsForAllQueries) StatisticsWriterNode(io.trino.sql.planner.plan.StatisticsWriterNode) TableExecute(io.trino.sql.tree.TableExecute) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) PlanSanityChecker(io.trino.sql.planner.sanity.PlanSanityChecker) RelationType(io.trino.sql.analyzer.RelationType) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) CachingStatsProvider(io.trino.cost.CachingStatsProvider) ValuesNode(io.trino.sql.planner.plan.ValuesNode) Session(io.trino.Session) AccessDeniedException(io.trino.spi.security.AccessDeniedException) TableExecuteNode(io.trino.sql.planner.plan.TableExecuteNode) PlanPrinter(io.trino.sql.planner.planprinter.PlanPrinter) QueryPlanner.visibleFields(io.trino.sql.planner.QueryPlanner.visibleFields) CreateReference(io.trino.sql.planner.plan.TableWriterNode.CreateReference) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) LimitNode(io.trino.sql.planner.plan.LimitNode) TableLayout(io.trino.metadata.TableLayout) TypeCoercion(io.trino.type.TypeCoercion) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) StatsAndCosts(io.trino.cost.StatsAndCosts) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) MetadataUtil.createQualifiedObjectName(io.trino.metadata.MetadataUtil.createQualifiedObjectName) PlanOptimizer(io.trino.sql.planner.optimizations.PlanOptimizer) NodeRef(io.trino.sql.tree.NodeRef) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Query(io.trino.sql.tree.Query) StringLiteral(io.trino.sql.tree.StringLiteral) TableStatisticAggregation(io.trino.sql.planner.StatisticsAggregationPlanner.TableStatisticAggregation) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) TableHandle(io.trino.metadata.TableHandle) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) CharType(io.trino.spi.type.CharType) Table(io.trino.sql.tree.Table) TableFinishNode(io.trino.sql.planner.plan.TableFinishNode) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) UnknownType(io.trino.type.UnknownType) RefreshMaterializedView(io.trino.sql.tree.RefreshMaterializedView) TableWriterNode(io.trino.sql.planner.plan.TableWriterNode) OPTIMIZED_AND_VALIDATED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED) Scope(io.trino.sql.analyzer.Scope) FilterNode(io.trino.sql.planner.plan.FilterNode) StatsCalculator(io.trino.cost.StatsCalculator) LambdaArgumentDeclaration(io.trino.sql.tree.LambdaArgumentDeclaration) RelationId(io.trino.sql.analyzer.RelationId) DeleteNode(io.trino.sql.planner.plan.DeleteNode) Update(io.trino.sql.tree.Update) INTEGER(io.trino.spi.type.IntegerType.INTEGER) FunctionCall(io.trino.sql.tree.FunctionCall) ImmutableMap(com.google.common.collect.ImmutableMap) ResolvedFunction(io.trino.metadata.ResolvedFunction) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) TrinoException(io.trino.spi.TrinoException) ROW_COUNT(io.trino.spi.statistics.TableStatisticType.ROW_COUNT) TableExecuteHandle(io.trino.metadata.TableExecuteHandle) StatsProvider(io.trino.cost.StatsProvider) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) CoalesceExpression(io.trino.sql.tree.CoalesceExpression) GenericLiteral(io.trino.sql.tree.GenericLiteral) Objects(java.util.Objects) TableMetadata(io.trino.metadata.TableMetadata) List(java.util.List) IfExpression(io.trino.sql.tree.IfExpression) BIGINT(io.trino.spi.type.BigintType.BIGINT) StandardErrorCode(io.trino.spi.StandardErrorCode) Analyze(io.trino.sql.tree.Analyze) Entry(java.util.Map.Entry) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) PlannerContext(io.trino.sql.PlannerContext) Analysis(io.trino.sql.analyzer.Analysis) PERMISSION_DENIED(io.trino.spi.StandardErrorCode.PERMISSION_DENIED) Logger(io.airlift.log.Logger) Type(io.trino.spi.type.Type) CreateTableAsSelect(io.trino.sql.tree.CreateTableAsSelect) HashMap(java.util.HashMap) StatisticAggregations(io.trino.sql.planner.plan.StatisticAggregations) SimpleImmutableEntry(java.util.AbstractMap.SimpleImmutableEntry) OPTIMIZED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED) Function(java.util.function.Function) Cast(io.trino.sql.tree.Cast) VarcharType(io.trino.spi.type.VarcharType) CachingCostProvider(io.trino.cost.CachingCostProvider) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) RefreshMaterializedViewNode(io.trino.sql.planner.plan.RefreshMaterializedViewNode) NullLiteral(io.trino.sql.tree.NullLiteral) Field(io.trino.sql.analyzer.Field) GREATER_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL) ProjectNode(io.trino.sql.planner.plan.ProjectNode) DISTRIBUTED_PLAN_SANITY_CHECKER(io.trino.sql.planner.sanity.PlanSanityChecker.DISTRIBUTED_PLAN_SANITY_CHECKER) WriterTarget(io.trino.sql.planner.plan.TableWriterNode.WriterTarget) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) UpdateNode(io.trino.sql.planner.plan.UpdateNode) QualifiedName(io.trino.sql.tree.QualifiedName) CostCalculator(io.trino.cost.CostCalculator) WarningCollector(io.trino.execution.warnings.WarningCollector) Row(io.trino.sql.tree.Row) Metadata(io.trino.metadata.Metadata) Insert(io.trino.sql.tree.Insert) Streams.zip(com.google.common.collect.Streams.zip) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TableStatisticAggregation(io.trino.sql.planner.StatisticsAggregationPlanner.TableStatisticAggregation) ArrayList(java.util.ArrayList) TableFinishNode(io.trino.sql.planner.plan.TableFinishNode) StatisticAggregations(io.trino.sql.planner.plan.StatisticAggregations) TableWriterNode(io.trino.sql.planner.plan.TableWriterNode)

Example 5 with Analysis

use of io.trino.sql.analyzer.Analysis in project trino by trinodb.

the class QueryPlanner method planGroupingSets.

private GroupingSetsPlan planGroupingSets(PlanBuilder subPlan, QuerySpecification node, GroupingSetAnalysis groupingSetAnalysis) {
    Map<Symbol, Symbol> groupingSetMappings = new LinkedHashMap<>();
    // Compute a set of artificial columns that will contain the values of the original columns
    // filtered by whether the column is included in the grouping set
    // This will become the basis for the scope for any column references
    Symbol[] fields = new Symbol[subPlan.getTranslations().getFieldSymbols().size()];
    for (FieldId field : groupingSetAnalysis.getAllFields()) {
        Symbol input = subPlan.getTranslations().getFieldSymbols().get(field.getFieldIndex());
        Symbol output = symbolAllocator.newSymbol(input, "gid");
        fields[field.getFieldIndex()] = output;
        groupingSetMappings.put(output, input);
    }
    Map<ScopeAware<Expression>, Symbol> complexExpressions = new HashMap<>();
    for (Expression expression : groupingSetAnalysis.getComplexExpressions()) {
        if (!complexExpressions.containsKey(scopeAwareKey(expression, analysis, subPlan.getScope()))) {
            Symbol input = subPlan.translate(expression);
            Symbol output = symbolAllocator.newSymbol(expression, analysis.getType(expression), "gid");
            complexExpressions.put(scopeAwareKey(expression, analysis, subPlan.getScope()), output);
            groupingSetMappings.put(output, input);
        }
    }
    // For the purpose of "distinct", we need to canonicalize column references that may have varying
    // syntactic forms (e.g., "t.a" vs "a"). Thus we need to enumerate grouping sets based on the underlying
    // fieldId associated with each column reference expression.
    // The catch is that simple group-by expressions can be arbitrary expressions (this is a departure from the SQL specification).
    // But, they don't affect the number of grouping sets or the behavior of "distinct" . We can compute all the candidate
    // grouping sets in terms of fieldId, dedup as appropriate and then cross-join them with the complex expressions.
    // This tracks the grouping sets before complex expressions are considered.
    // It's also used to compute the descriptors needed to implement grouping()
    List<Set<FieldId>> columnOnlyGroupingSets = enumerateGroupingSets(groupingSetAnalysis);
    if (node.getGroupBy().isPresent() && node.getGroupBy().get().isDistinct()) {
        columnOnlyGroupingSets = columnOnlyGroupingSets.stream().distinct().collect(toImmutableList());
    }
    // translate from FieldIds to Symbols
    List<List<Symbol>> sets = columnOnlyGroupingSets.stream().map(set -> set.stream().map(FieldId::getFieldIndex).map(index -> fields[index]).collect(toImmutableList())).collect(toImmutableList());
    // combine (cartesian product) with complex expressions
    List<List<Symbol>> groupingSets = sets.stream().map(set -> ImmutableList.<Symbol>builder().addAll(set).addAll(complexExpressions.values()).build()).collect(toImmutableList());
    // Generate GroupIdNode (multiple grouping sets) or ProjectNode (single grouping set)
    PlanNode groupId;
    Optional<Symbol> groupIdSymbol = Optional.empty();
    if (groupingSets.size() > 1) {
        groupIdSymbol = Optional.of(symbolAllocator.newSymbol("groupId", BIGINT));
        groupId = new GroupIdNode(idAllocator.getNextId(), subPlan.getRoot(), groupingSets, groupingSetMappings, subPlan.getRoot().getOutputSymbols(), groupIdSymbol.get());
    } else {
        Assignments.Builder assignments = Assignments.builder();
        assignments.putIdentities(subPlan.getRoot().getOutputSymbols());
        groupingSetMappings.forEach((key, value) -> assignments.put(key, value.toSymbolReference()));
        groupId = new ProjectNode(idAllocator.getNextId(), subPlan.getRoot(), assignments.build());
    }
    subPlan = new PlanBuilder(subPlan.getTranslations().withNewMappings(complexExpressions, Arrays.asList(fields)), groupId);
    return new GroupingSetsPlan(subPlan, columnOnlyGroupingSets, groupingSets, groupIdSymbol);
}
Also used : PatternRecognitionComponents(io.trino.sql.planner.RelationPlanner.PatternRecognitionComponents) Arrays(java.util.Arrays) TypeSignatureProvider.fromTypes(io.trino.sql.analyzer.TypeSignatureProvider.fromTypes) Delete(io.trino.sql.tree.Delete) PlanNode(io.trino.sql.planner.plan.PlanNode) Node(io.trino.sql.tree.Node) Offset(io.trino.sql.tree.Offset) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) LongLiteral(io.trino.sql.tree.LongLiteral) Map(java.util.Map) Union(io.trino.sql.tree.Union) FetchFirst(io.trino.sql.tree.FetchFirst) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) TableSchema(io.trino.metadata.TableSchema) SortItem(io.trino.sql.tree.SortItem) NodeUtils.getSortItemsFromOrderBy(io.trino.sql.NodeUtils.getSortItemsFromOrderBy) DEFAULT_FRAME(io.trino.sql.planner.plan.WindowNode.Frame.DEFAULT_FRAME) RelationType(io.trino.sql.analyzer.RelationType) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) AggregationNode.groupingSets(io.trino.sql.planner.plan.AggregationNode.groupingSets) DeleteTarget(io.trino.sql.planner.plan.TableWriterNode.DeleteTarget) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) DecimalLiteral(io.trino.sql.tree.DecimalLiteral) ValuesNode(io.trino.sql.planner.plan.ValuesNode) ExpressionAnalyzer.isNumericType(io.trino.sql.analyzer.ExpressionAnalyzer.isNumericType) Session(io.trino.Session) Iterables(com.google.common.collect.Iterables) LimitNode(io.trino.sql.planner.plan.LimitNode) TypeCoercion(io.trino.type.TypeCoercion) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) UpdateTarget(io.trino.sql.planner.plan.TableWriterNode.UpdateTarget) ScopeAware.scopeAwareKey(io.trino.sql.planner.ScopeAware.scopeAwareKey) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) NodeRef(io.trino.sql.tree.NodeRef) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) GroupingOperationRewriter.rewriteGroupingOperation(io.trino.sql.planner.GroupingOperationRewriter.rewriteGroupingOperation) NodeUtils(io.trino.sql.NodeUtils) INTERVAL_DAY_TIME(io.trino.type.IntervalDayTimeType.INTERVAL_DAY_TIME) Query(io.trino.sql.tree.Query) StringLiteral(io.trino.sql.tree.StringLiteral) Relation(io.trino.sql.tree.Relation) GroupingSetAnalysis(io.trino.sql.analyzer.Analysis.GroupingSetAnalysis) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) SortOrder(io.trino.spi.connector.SortOrder) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) TableHandle(io.trino.metadata.TableHandle) Table(io.trino.sql.tree.Table) GroupIdNode(io.trino.sql.planner.plan.GroupIdNode) YEAR(io.trino.sql.tree.IntervalLiteral.IntervalField.YEAR) OffsetNode(io.trino.sql.planner.plan.OffsetNode) ROWS(io.trino.sql.tree.WindowFrame.Type.ROWS) NullTreatment(io.trino.sql.tree.FunctionCall.NullTreatment) DAY(io.trino.sql.tree.IntervalLiteral.IntervalField.DAY) MeasureDefinition(io.trino.sql.tree.MeasureDefinition) INTERVAL_YEAR_MONTH(io.trino.type.IntervalYearMonthType.INTERVAL_YEAR_MONTH) Aggregation(io.trino.sql.planner.plan.AggregationNode.Aggregation) FilterNode(io.trino.sql.planner.plan.FilterNode) LambdaArgumentDeclaration(io.trino.sql.tree.LambdaArgumentDeclaration) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) OrderingScheme.sortItemToSortOrder(io.trino.sql.planner.OrderingScheme.sortItemToSortOrder) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) GROUPS(io.trino.sql.tree.WindowFrame.Type.GROUPS) DeleteNode(io.trino.sql.planner.plan.DeleteNode) Update(io.trino.sql.tree.Update) FunctionCall(io.trino.sql.tree.FunctionCall) QuerySpecification(io.trino.sql.tree.QuerySpecification) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) ResolvedFunction(io.trino.metadata.ResolvedFunction) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) IntervalLiteral(io.trino.sql.tree.IntervalLiteral) RANGE(io.trino.sql.tree.WindowFrame.Type.RANGE) VariableDefinition(io.trino.sql.tree.VariableDefinition) PatternRecognitionNode(io.trino.sql.planner.plan.PatternRecognitionNode) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) LESS_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL) GenericLiteral(io.trino.sql.tree.GenericLiteral) SimplePlanRewriter(io.trino.sql.planner.plan.SimplePlanRewriter) List(java.util.List) POSITIVE(io.trino.sql.tree.IntervalLiteral.Sign.POSITIVE) IfExpression(io.trino.sql.tree.IfExpression) ColumnSchema(io.trino.spi.connector.ColumnSchema) BIGINT(io.trino.spi.type.BigintType.BIGINT) WindowFrame(io.trino.sql.tree.WindowFrame) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) WindowNode(io.trino.sql.planner.plan.WindowNode) DecimalType(io.trino.spi.type.DecimalType) OrderBy(io.trino.sql.tree.OrderBy) PlannerContext(io.trino.sql.PlannerContext) Analysis(io.trino.sql.analyzer.Analysis) IntStream(java.util.stream.IntStream) FieldId(io.trino.sql.analyzer.FieldId) UnionNode(io.trino.sql.planner.plan.UnionNode) WindowOperation(io.trino.sql.tree.WindowOperation) Type(io.trino.spi.type.Type) LambdaExpression(io.trino.sql.tree.LambdaExpression) HashMap(java.util.HashMap) SystemSessionProperties.getMaxRecursionDepth(io.trino.SystemSessionProperties.getMaxRecursionDepth) SortNode(io.trino.sql.planner.plan.SortNode) Function(java.util.function.Function) Cast(io.trino.sql.tree.Cast) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) GREATER_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL) ProjectNode(io.trino.sql.planner.plan.ProjectNode) ResolvedWindow(io.trino.sql.analyzer.Analysis.ResolvedWindow) RowsPerMatch(io.trino.sql.tree.PatternRecognitionRelation.RowsPerMatch) Iterator(java.util.Iterator) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) UpdateNode(io.trino.sql.planner.plan.UpdateNode) QualifiedName(io.trino.sql.tree.QualifiedName) SystemSessionProperties.isSkipRedundantSort(io.trino.SystemSessionProperties.isSkipRedundantSort) FrameBound(io.trino.sql.tree.FrameBound) Set(java.util.Set) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) ImmutableSet(com.google.common.collect.ImmutableSet) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Assignments(io.trino.sql.planner.plan.Assignments) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) LinkedHashMap(java.util.LinkedHashMap) PlanNode(io.trino.sql.planner.plan.PlanNode) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) IfExpression(io.trino.sql.tree.IfExpression) Expression(io.trino.sql.tree.Expression) LambdaExpression(io.trino.sql.tree.LambdaExpression) GroupIdNode(io.trino.sql.planner.plan.GroupIdNode) FieldId(io.trino.sql.analyzer.FieldId) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ProjectNode(io.trino.sql.planner.plan.ProjectNode)

Aggregations

Analysis (io.trino.sql.analyzer.Analysis)17 Expression (io.trino.sql.tree.Expression)12 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)11 Session (io.trino.Session)11 TableHandle (io.trino.metadata.TableHandle)11 Query (io.trino.sql.tree.Query)11 List (java.util.List)11 Objects.requireNonNull (java.util.Objects.requireNonNull)11 Optional (java.util.Optional)11 PlannerContext (io.trino.sql.PlannerContext)10 PlanBuilder.newPlanBuilder (io.trino.sql.planner.PlanBuilder.newPlanBuilder)10 PlanNode (io.trino.sql.planner.plan.PlanNode)10 ComparisonExpression (io.trino.sql.tree.ComparisonExpression)10 NodeRef (io.trino.sql.tree.NodeRef)10 Map (java.util.Map)10 ImmutableList (com.google.common.collect.ImmutableList)9 ImmutableMap (com.google.common.collect.ImmutableMap)9 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)9 ImmutableSet.toImmutableSet (com.google.common.collect.ImmutableSet.toImmutableSet)9 ColumnHandle (io.trino.spi.connector.ColumnHandle)9