Search in sources :

Example 1 with EvaluatingNormalizer

use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.

the class WhereClauseAnalyzer method resolvePartitions.

public static PartitionResult resolvePartitions(Symbol query, DocTableInfo tableInfo, CoordinatorTxnCtx coordinatorTxnCtx, NodeContext nodeCtx) {
    assert tableInfo.isPartitioned() : "table must be partitioned in order to resolve partitions";
    assert !tableInfo.partitions().isEmpty() : "table must have at least one partition";
    PartitionReferenceResolver partitionReferenceResolver = preparePartitionResolver(tableInfo.partitionedByColumns());
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.PARTITION, partitionReferenceResolver, null);
    Symbol normalized;
    Map<Symbol, List<Literal>> queryPartitionMap = new HashMap<>();
    for (PartitionName partitionName : tableInfo.partitions()) {
        for (PartitionExpression partitionExpression : partitionReferenceResolver.expressions()) {
            partitionExpression.setNextRow(partitionName);
        }
        normalized = normalizer.normalize(query, coordinatorTxnCtx);
        assert normalized != null : "normalizing a query must not return null";
        if (normalized.equals(query)) {
            // no partition columns inside the where clause
            return new PartitionResult(query, Collections.emptyList());
        }
        boolean canMatch = WhereClause.canMatch(normalized);
        if (canMatch) {
            List<Literal> partitions = queryPartitionMap.get(normalized);
            if (partitions == null) {
                partitions = new ArrayList<>();
                queryPartitionMap.put(normalized, partitions);
            }
            partitions.add(Literal.of(partitionName.asIndexName()));
        }
    }
    if (queryPartitionMap.size() == 1) {
        Map.Entry<Symbol, List<Literal>> entry = Iterables.getOnlyElement(queryPartitionMap.entrySet());
        return new PartitionResult(entry.getKey(), Lists2.map(entry.getValue(), literal -> nullOrString(literal.value())));
    } else if (queryPartitionMap.size() > 0) {
        PartitionResult partitionResult = tieBreakPartitionQueries(normalizer, queryPartitionMap, coordinatorTxnCtx);
        return partitionResult == null ? // the query will then be evaluated correctly within each partition to see whether it matches or not
        new PartitionResult(query, Lists2.map(tableInfo.partitions(), PartitionName::asIndexName)) : partitionResult;
    } else {
        return new PartitionResult(Literal.BOOLEAN_FALSE, Collections.emptyList());
    }
}
Also used : Tuple(io.crate.common.collections.Tuple) ScalarsAndRefsToTrue(io.crate.analyze.ScalarsAndRefsToTrue) HashMap(java.util.HashMap) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) PartitionName(io.crate.metadata.PartitionName) ArrayList(java.util.ArrayList) Map(java.util.Map) StringUtils.nullOrString(io.crate.common.StringUtils.nullOrString) Nullable(javax.annotation.Nullable) DocTableInfo(io.crate.metadata.doc.DocTableInfo) NodeContext(io.crate.metadata.NodeContext) WhereClause(io.crate.analyze.WhereClause) PartitionReferenceResolver(io.crate.metadata.PartitionReferenceResolver) Reference(io.crate.metadata.Reference) Iterables(io.crate.common.collections.Iterables) Lists2(io.crate.common.collections.Lists2) List(java.util.List) RowGranularity(io.crate.metadata.RowGranularity) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Literal(io.crate.expression.symbol.Literal) Symbol(io.crate.expression.symbol.Symbol) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PartitionExpression(io.crate.expression.reference.partitioned.PartitionExpression) Collections(java.util.Collections) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) HashMap(java.util.HashMap) Symbol(io.crate.expression.symbol.Symbol) PartitionName(io.crate.metadata.PartitionName) PartitionExpression(io.crate.expression.reference.partitioned.PartitionExpression) Literal(io.crate.expression.symbol.Literal) ArrayList(java.util.ArrayList) List(java.util.List) PartitionReferenceResolver(io.crate.metadata.PartitionReferenceResolver) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with EvaluatingNormalizer

use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.

the class Count method build.

@Override
public ExecutionPlan build(PlannerContext plannerContext, Set<PlanHint> planHints, ProjectionBuilder projectionBuilder, int limit, int offset, @Nullable OrderBy order, @Nullable Integer pageSizeHint, Row params, SubQueryResults subQueryResults) {
    var normalizer = new EvaluatingNormalizer(plannerContext.nodeContext(), RowGranularity.CLUSTER, null, tableRelation);
    var binder = new SubQueryAndParamBinder(params, subQueryResults).andThen(x -> normalizer.normalize(x, plannerContext.transactionContext()));
    // bind all parameters and possible subQuery values and re-analyze the query
    // (could result in a NO_MATCH, routing could've changed, etc).
    WhereClause boundWhere = WhereClauseAnalyzer.resolvePartitions(where.map(binder), tableRelation, plannerContext.transactionContext(), plannerContext.nodeContext());
    Routing routing = plannerContext.allocateRouting(tableRelation.tableInfo(), boundWhere, RoutingProvider.ShardSelection.ANY, plannerContext.transactionContext().sessionContext());
    CountPhase countPhase = new CountPhase(plannerContext.nextExecutionPhaseId(), routing, Optimizer.optimizeCasts(boundWhere.queryOrFallback(), plannerContext), DistributionInfo.DEFAULT_BROADCAST);
    MergePhase mergePhase = new MergePhase(plannerContext.jobId(), plannerContext.nextExecutionPhaseId(), COUNT_PHASE_NAME, countPhase.nodeIds().size(), 1, Collections.singletonList(plannerContext.handlerNode()), Collections.singletonList(DataTypes.LONG), Collections.singletonList(MergeCountProjection.INSTANCE), DistributionInfo.DEFAULT_BROADCAST, null);
    return new CountPlan(countPhase, mergePhase);
}
Also used : MergePhase(io.crate.execution.dsl.phases.MergePhase) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) WhereClause(io.crate.analyze.WhereClause) Routing(io.crate.metadata.Routing) CountPhase(io.crate.execution.dsl.phases.CountPhase) CountPlan(io.crate.planner.node.dql.CountPlan)

Example 3 with EvaluatingNormalizer

use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.

the class NodeStatsCollectSource method filterNodes.

static Collection<DiscoveryNode> filterNodes(Collection<DiscoveryNode> nodes, Symbol predicate, NodeContext nodeCtx) {
    var expressions = SysNodesTableInfo.create().expressions();
    var nameExpr = expressions.get(SysNodesTableInfo.Columns.NAME).create();
    var idExpr = expressions.get(SysNodesTableInfo.Columns.ID).create();
    MapBackedRefResolver referenceResolver = new MapBackedRefResolver(Map.of(SysNodesTableInfo.Columns.NAME, nameExpr, SysNodesTableInfo.Columns.ID, idExpr));
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.DOC, referenceResolver, null);
    List<DiscoveryNode> newNodes = new ArrayList<>();
    for (DiscoveryNode node : nodes) {
        String nodeId = node.getId();
        NodeStatsContext statsContext = new NodeStatsContext(nodeId, node.getName());
        nameExpr.setNextRow(statsContext);
        idExpr.setNextRow(statsContext);
        Symbol normalized = normalizer.normalize(predicate, CoordinatorTxnCtx.systemTransactionContext());
        if (normalized.equals(predicate)) {
            // No local available sys nodes columns in where clause
            return nodes;
        }
        if (WhereClause.canMatch(normalized)) {
            newNodes.add(node);
        }
    }
    return newNodes;
}
Also used : DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) MapBackedRefResolver(io.crate.metadata.MapBackedRefResolver) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) Symbol(io.crate.expression.symbol.Symbol) ArrayList(java.util.ArrayList) NodeStatsContext(io.crate.expression.reference.sys.node.NodeStatsContext)

Example 4 with EvaluatingNormalizer

use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.

the class RoutedCollectPhase method normalize.

/**
 * normalizes the symbols of this node with the given normalizer
 *
 * @return a normalized node, if no changes occurred returns this
 */
public RoutedCollectPhase normalize(EvaluatingNormalizer normalizer, @Nonnull TransactionContext txnCtx) {
    RoutedCollectPhase result = this;
    Function<Symbol, Symbol> normalize = s -> normalizer.normalize(s, txnCtx);
    List<Symbol> newToCollect = Lists2.map(toCollect, normalize);
    boolean changed = !newToCollect.equals(toCollect);
    Symbol newWhereClause = normalizer.normalize(where, txnCtx);
    OrderBy orderBy = this.orderBy;
    if (orderBy != null) {
        orderBy = orderBy.map(normalize);
    }
    changed = changed || newWhereClause != where || orderBy != this.orderBy;
    if (changed) {
        result = new RoutedCollectPhase(jobId(), phaseId(), name(), routing, maxRowGranularity, newToCollect, projections, newWhereClause, distributionInfo);
        result.nodePageSizeHint(nodePageSizeHint);
        result.orderBy(orderBy);
    }
    return result;
}
Also used : TransactionContext(io.crate.metadata.TransactionContext) StreamOutput(org.elasticsearch.common.io.stream.StreamOutput) Set(java.util.Set) IOException(java.io.IOException) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) UUID(java.util.UUID) Function(java.util.function.Function) Lists2(io.crate.common.collections.Lists2) SymbolVisitors(io.crate.expression.symbol.SymbolVisitors) Objects(java.util.Objects) List(java.util.List) OrderBy(io.crate.analyze.OrderBy) RowGranularity(io.crate.metadata.RowGranularity) Routing(io.crate.metadata.Routing) Projection(io.crate.execution.dsl.projection.Projection) Symbol(io.crate.expression.symbol.Symbol) Symbols(io.crate.expression.symbol.Symbols) StreamInput(org.elasticsearch.common.io.stream.StreamInput) Paging(io.crate.data.Paging) SelectSymbol(io.crate.expression.symbol.SelectSymbol) ScopedSymbol(io.crate.expression.symbol.ScopedSymbol) DistributionInfo(io.crate.planner.distribution.DistributionInfo) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) OrderBy(io.crate.analyze.OrderBy) Symbol(io.crate.expression.symbol.Symbol) SelectSymbol(io.crate.expression.symbol.SelectSymbol) ScopedSymbol(io.crate.expression.symbol.ScopedSymbol)

Example 5 with EvaluatingNormalizer

use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.

the class CopyAnalyzer method analyzeCopyFrom.

AnalyzedCopyFrom analyzeCopyFrom(CopyFrom<Expression> node, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
    DocTableInfo tableInfo = (DocTableInfo) schemas.resolveTableInfo(node.table().getName(), Operation.INSERT, txnCtx.sessionContext().sessionUser(), txnCtx.sessionContext().searchPath());
    var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
    var exprAnalyzerWithoutFields = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.UNSUPPORTED, null);
    var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
    var normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.CLUSTER, null, new TableRelation(tableInfo));
    Table<Symbol> table = node.table().map(t -> exprAnalyzerWithFieldsAsString.convert(t, exprCtx));
    GenericProperties<Symbol> properties = node.properties().map(t -> exprAnalyzerWithoutFields.convert(t, exprCtx));
    Symbol uri = exprAnalyzerWithoutFields.convert(node.path(), exprCtx);
    if (node.isReturnSummary()) {
        return new AnalyzedCopyFromReturnSummary(tableInfo, table, properties, normalizer.normalize(uri, txnCtx));
    } else {
        return new AnalyzedCopyFrom(tableInfo, table, properties, normalizer.normalize(uri, txnCtx));
    }
}
Also used : DocTableInfo(io.crate.metadata.doc.DocTableInfo) ExpressionAnalysisContext(io.crate.analyze.expressions.ExpressionAnalysisContext) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) Symbol(io.crate.expression.symbol.Symbol) ExpressionAnalyzer(io.crate.analyze.expressions.ExpressionAnalyzer) TableRelation(io.crate.analyze.relations.TableRelation) DocTableRelation(io.crate.analyze.relations.DocTableRelation)

Aggregations

EvaluatingNormalizer (io.crate.expression.eval.EvaluatingNormalizer)24 Symbol (io.crate.expression.symbol.Symbol)16 DocTableRelation (io.crate.analyze.relations.DocTableRelation)9 CoordinatorTxnCtx (io.crate.metadata.CoordinatorTxnCtx)8 Test (org.junit.Test)7 RowGranularity (io.crate.metadata.RowGranularity)6 DocTableInfo (io.crate.metadata.doc.DocTableInfo)6 WhereClause (io.crate.analyze.WhereClause)5 ExpressionAnalysisContext (io.crate.analyze.expressions.ExpressionAnalysisContext)5 ExpressionAnalyzer (io.crate.analyze.expressions.ExpressionAnalyzer)5 Literal (io.crate.expression.symbol.Literal)5 NodeContext (io.crate.metadata.NodeContext)5 Routing (io.crate.metadata.Routing)5 ArrayList (java.util.ArrayList)5 Lists2 (io.crate.common.collections.Lists2)4 Nullable (javax.annotation.Nullable)4 AbstractTableRelation (io.crate.analyze.relations.AbstractTableRelation)3 AnalyzedRelation (io.crate.analyze.relations.AnalyzedRelation)3 FullQualifiedNameFieldProvider (io.crate.analyze.relations.FullQualifiedNameFieldProvider)3 RoutedCollectPhase (io.crate.execution.dsl.phases.RoutedCollectPhase)3