Search in sources :

Example 31 with CoordinatorTxnCtx

use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.

the class WhereClauseAnalyzer method tieBreakPartitionQueries.

@Nullable
private static PartitionResult tieBreakPartitionQueries(EvaluatingNormalizer normalizer, Map<Symbol, List<Literal>> queryPartitionMap, CoordinatorTxnCtx coordinatorTxnCtx) throws UnsupportedOperationException {
    /*
         * Got multiple normalized queries which all could match.
         * This might be the case if one partition resolved to null
         *
         * e.g.
         *
         *  p = 1 and x = 2
         *
         * might lead to
         *
         *  null and x = 2
         *  true and x = 2
         *
         * At this point it is unknown if they really match.
         * In order to figure out if they could potentially match all conditions involving references are now set to true
         *
         *  null and true   -> can't match
         *  true and true   -> can match, can use this query + partition
         *
         * If there is still more than 1 query that can match it's not possible to execute the query :(
         */
    List<Tuple<Symbol, List<Literal>>> canMatch = new ArrayList<>();
    for (Map.Entry<Symbol, List<Literal>> entry : queryPartitionMap.entrySet()) {
        Symbol query = entry.getKey();
        List<Literal> partitions = entry.getValue();
        Symbol normalized = normalizer.normalize(ScalarsAndRefsToTrue.rewrite(query), coordinatorTxnCtx);
        assert normalized instanceof Literal : "after normalization and replacing all reference occurrences with true there must only be a literal left";
        Object value = ((Literal) normalized).value();
        if (value != null && (Boolean) value) {
            canMatch.add(new Tuple<>(query, partitions));
        }
    }
    if (canMatch.size() == 1) {
        Tuple<Symbol, List<Literal>> symbolListTuple = canMatch.get(0);
        return new PartitionResult(symbolListTuple.v1(), Lists2.map(symbolListTuple.v2(), literal -> nullOrString(literal.value())));
    }
    return null;
}
Also used : Tuple(io.crate.common.collections.Tuple) ScalarsAndRefsToTrue(io.crate.analyze.ScalarsAndRefsToTrue) HashMap(java.util.HashMap) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) PartitionName(io.crate.metadata.PartitionName) ArrayList(java.util.ArrayList) Map(java.util.Map) StringUtils.nullOrString(io.crate.common.StringUtils.nullOrString) Nullable(javax.annotation.Nullable) DocTableInfo(io.crate.metadata.doc.DocTableInfo) NodeContext(io.crate.metadata.NodeContext) WhereClause(io.crate.analyze.WhereClause) PartitionReferenceResolver(io.crate.metadata.PartitionReferenceResolver) Reference(io.crate.metadata.Reference) Iterables(io.crate.common.collections.Iterables) Lists2(io.crate.common.collections.Lists2) List(java.util.List) RowGranularity(io.crate.metadata.RowGranularity) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Literal(io.crate.expression.symbol.Literal) Symbol(io.crate.expression.symbol.Symbol) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PartitionExpression(io.crate.expression.reference.partitioned.PartitionExpression) Collections(java.util.Collections) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) Symbol(io.crate.expression.symbol.Symbol) ArrayList(java.util.ArrayList) Literal(io.crate.expression.symbol.Literal) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) Tuple(io.crate.common.collections.Tuple) Nullable(javax.annotation.Nullable)

Example 32 with CoordinatorTxnCtx

use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.

the class ExpressionAnalyzerTest method testNonDeterministicFunctionsAlwaysNew.

@Test
public void testNonDeterministicFunctionsAlwaysNew() throws Exception {
    CoordinatorTxnCtx txnCtx = CoordinatorTxnCtx.systemTransactionContext();
    ExpressionAnalysisContext localContext = new ExpressionAnalysisContext(txnCtx.sessionContext());
    String functionName = CoalesceFunction.NAME;
    Symbol fn1 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_FALSE), null, localContext, txnCtx, expressions.nodeCtx);
    Symbol fn2 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_FALSE), null, localContext, txnCtx, expressions.nodeCtx);
    Symbol fn3 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_TRUE), null, localContext, txnCtx, expressions.nodeCtx);
    // different instances
    assertThat(fn1, allOf(not(sameInstance(fn2)), not(sameInstance(fn3))));
    // but equal
    assertThat(fn1, is(equalTo(fn2)));
    assertThat(fn1, is(not(equalTo(fn3))));
}
Also used : CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) ParameterSymbol(io.crate.expression.symbol.ParameterSymbol) ScopedSymbol(io.crate.expression.symbol.ScopedSymbol) Symbol(io.crate.expression.symbol.Symbol) CrateDummyClusterServiceUnitTest(io.crate.test.integration.CrateDummyClusterServiceUnitTest) Test(org.junit.Test)

Example 33 with CoordinatorTxnCtx

use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.

the class Session method quickExec.

/**
 * Execute a query in one step, avoiding the parse/bind/execute/sync procedure.
 * Opposed to using parse/bind/execute/sync this method is thread-safe.
 *
 * @param parse A function to parse the statement; This can be used to cache the parsed statement.
 *              Use {@link #quickExec(String, ResultReceiver, Row)} to use the regular parser
 */
public void quickExec(String statement, Function<String, Statement> parse, ResultReceiver<?> resultReceiver, Row params) {
    CoordinatorTxnCtx txnCtx = new CoordinatorTxnCtx(sessionContext);
    Statement parsedStmt = parse.apply(statement);
    AnalyzedStatement analyzedStatement = analyzer.analyze(parsedStmt, sessionContext, ParamTypeHints.EMPTY);
    RoutingProvider routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes());
    UUID jobId = UUIDs.dirtyUUID();
    ClusterState clusterState = planner.currentClusterState();
    PlannerContext plannerContext = new PlannerContext(clusterState, routingProvider, jobId, txnCtx, nodeCtx, 0, params);
    Plan plan;
    try {
        plan = planner.plan(analyzedStatement, plannerContext);
    } catch (Throwable t) {
        jobsLogs.logPreExecutionFailure(jobId, statement, SQLExceptions.messageOf(t), sessionContext.sessionUser());
        throw t;
    }
    StatementClassifier.Classification classification = StatementClassifier.classify(plan);
    jobsLogs.logExecutionStart(jobId, statement, sessionContext.sessionUser(), classification);
    JobsLogsUpdateListener jobsLogsUpdateListener = new JobsLogsUpdateListener(jobId, jobsLogs);
    if (!analyzedStatement.isWriteOperation()) {
        resultReceiver = new RetryOnFailureResultReceiver(executor.clusterService(), clusterState, // clusterState at the time of the index check is used
        indexName -> clusterState.metadata().hasIndex(indexName), resultReceiver, jobId, (newJobId, retryResultReceiver) -> retryQuery(newJobId, analyzedStatement, routingProvider, new RowConsumerToResultReceiver(retryResultReceiver, 0, jobsLogsUpdateListener), params, txnCtx, nodeCtx));
    }
    RowConsumerToResultReceiver consumer = new RowConsumerToResultReceiver(resultReceiver, 0, jobsLogsUpdateListener);
    plan.execute(executor, plannerContext, consumer, params, SubQueryResults.EMPTY);
}
Also used : ParamTypeHints(io.crate.analyze.ParamTypeHints) RetryOnFailureResultReceiver(io.crate.protocols.postgres.RetryOnFailureResultReceiver) Analyzer(io.crate.analyze.Analyzer) DependencyCarrier(io.crate.planner.DependencyCarrier) ClusterState(org.elasticsearch.cluster.ClusterState) Relations(io.crate.analyze.Relations) RowN(io.crate.data.RowN) TransactionState(io.crate.protocols.postgres.TransactionState) Map(java.util.Map) JobsLogsUpdateListener(io.crate.protocols.postgres.JobsLogsUpdateListener) TableInfo(io.crate.metadata.table.TableInfo) NodeContext(io.crate.metadata.NodeContext) AnalyzedStatement(io.crate.analyze.AnalyzedStatement) UUIDs(org.elasticsearch.common.UUIDs) UUID(java.util.UUID) Lists2(io.crate.common.collections.Lists2) List(java.util.List) Logger(org.apache.logging.log4j.Logger) AnalyzedDiscard(io.crate.analyze.AnalyzedDiscard) Row(io.crate.data.Row) Symbol(io.crate.expression.symbol.Symbol) AnalyzedBegin(io.crate.analyze.AnalyzedBegin) SubQueryResults(io.crate.planner.operators.SubQueryResults) Statement(io.crate.sql.tree.Statement) VisibleForTesting(io.crate.common.annotations.VisibleForTesting) Row1(io.crate.data.Row1) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) AnalyzedCommit(io.crate.analyze.AnalyzedCommit) AccessControl(io.crate.auth.AccessControl) QueriedSelectRelation(io.crate.analyze.QueriedSelectRelation) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Function(java.util.function.Function) JobsLogs(io.crate.execution.engine.collect.stats.JobsLogs) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Portal(io.crate.protocols.postgres.Portal) Symbols(io.crate.expression.symbol.Symbols) FormatCodes(io.crate.protocols.postgres.FormatCodes) SqlParser(io.crate.sql.parser.SqlParser) Nullable(javax.annotation.Nullable) Iterator(java.util.Iterator) RelationInfo(io.crate.metadata.RelationInfo) Target(io.crate.sql.tree.DiscardStatement.Target) DataType(io.crate.types.DataType) Planner(io.crate.planner.Planner) RoutingProvider(io.crate.metadata.RoutingProvider) RowConsumer(io.crate.data.RowConsumer) StatementClassifier(io.crate.planner.operators.StatementClassifier) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PlannerContext(io.crate.planner.PlannerContext) Plan(io.crate.planner.Plan) AnalyzedRelation(io.crate.analyze.relations.AnalyzedRelation) ReadOnlyException(io.crate.exceptions.ReadOnlyException) SQLExceptions(io.crate.exceptions.SQLExceptions) LogManager(org.apache.logging.log4j.LogManager) Randomness(org.elasticsearch.common.Randomness) AnalyzedDeallocate(io.crate.analyze.AnalyzedDeallocate) RoutingProvider(io.crate.metadata.RoutingProvider) ClusterState(org.elasticsearch.cluster.ClusterState) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) AnalyzedStatement(io.crate.analyze.AnalyzedStatement) Statement(io.crate.sql.tree.Statement) Plan(io.crate.planner.Plan) StatementClassifier(io.crate.planner.operators.StatementClassifier) PlannerContext(io.crate.planner.PlannerContext) JobsLogsUpdateListener(io.crate.protocols.postgres.JobsLogsUpdateListener) RetryOnFailureResultReceiver(io.crate.protocols.postgres.RetryOnFailureResultReceiver) AnalyzedStatement(io.crate.analyze.AnalyzedStatement) UUID(java.util.UUID)

Example 34 with CoordinatorTxnCtx

use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.

the class Session method bulkExec.

private CompletableFuture<?> bulkExec(Statement statement, List<DeferredExecution> toExec) {
    assert toExec.size() >= 1 : "Must have at least 1 deferred execution for bulk exec";
    var jobId = UUIDs.dirtyUUID();
    var routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes());
    var clusterState = executor.clusterService().state();
    var txnCtx = new CoordinatorTxnCtx(sessionContext);
    var plannerContext = new PlannerContext(clusterState, routingProvider, jobId, txnCtx, nodeCtx, 0, null);
    PreparedStmt firstPreparedStatement = toExec.get(0).portal().preparedStmt();
    AnalyzedStatement analyzedStatement = firstPreparedStatement.analyzedStatement();
    Plan plan;
    try {
        plan = planner.plan(analyzedStatement, plannerContext);
    } catch (Throwable t) {
        jobsLogs.logPreExecutionFailure(jobId, firstPreparedStatement.rawStatement(), SQLExceptions.messageOf(t), sessionContext.sessionUser());
        throw t;
    }
    jobsLogs.logExecutionStart(jobId, firstPreparedStatement.rawStatement(), sessionContext.sessionUser(), StatementClassifier.classify(plan));
    var bulkArgs = Lists2.map(toExec, x -> (Row) new RowN(x.portal().params().toArray()));
    List<CompletableFuture<Long>> rowCounts = plan.executeBulk(executor, plannerContext, bulkArgs, SubQueryResults.EMPTY);
    CompletableFuture<Void> allRowCounts = CompletableFuture.allOf(rowCounts.toArray(new CompletableFuture[0]));
    List<CompletableFuture<?>> resultReceiverFutures = Lists2.map(toExec, x -> x.resultReceiver().completionFuture());
    CompletableFuture<Void> allResultReceivers = CompletableFuture.allOf(resultReceiverFutures.toArray(new CompletableFuture[0]));
    return allRowCounts.exceptionally(// swallow exception - failures are set per item in emitResults
    t -> null).thenAccept(ignored -> emitRowCountsToResultReceivers(jobId, jobsLogs, toExec, rowCounts)).runAfterBoth(allResultReceivers, () -> {
    });
}
Also used : ParamTypeHints(io.crate.analyze.ParamTypeHints) RetryOnFailureResultReceiver(io.crate.protocols.postgres.RetryOnFailureResultReceiver) Analyzer(io.crate.analyze.Analyzer) DependencyCarrier(io.crate.planner.DependencyCarrier) ClusterState(org.elasticsearch.cluster.ClusterState) Relations(io.crate.analyze.Relations) RowN(io.crate.data.RowN) TransactionState(io.crate.protocols.postgres.TransactionState) Map(java.util.Map) JobsLogsUpdateListener(io.crate.protocols.postgres.JobsLogsUpdateListener) TableInfo(io.crate.metadata.table.TableInfo) NodeContext(io.crate.metadata.NodeContext) AnalyzedStatement(io.crate.analyze.AnalyzedStatement) UUIDs(org.elasticsearch.common.UUIDs) UUID(java.util.UUID) Lists2(io.crate.common.collections.Lists2) List(java.util.List) Logger(org.apache.logging.log4j.Logger) AnalyzedDiscard(io.crate.analyze.AnalyzedDiscard) Row(io.crate.data.Row) Symbol(io.crate.expression.symbol.Symbol) AnalyzedBegin(io.crate.analyze.AnalyzedBegin) SubQueryResults(io.crate.planner.operators.SubQueryResults) Statement(io.crate.sql.tree.Statement) VisibleForTesting(io.crate.common.annotations.VisibleForTesting) Row1(io.crate.data.Row1) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) AnalyzedCommit(io.crate.analyze.AnalyzedCommit) AccessControl(io.crate.auth.AccessControl) QueriedSelectRelation(io.crate.analyze.QueriedSelectRelation) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Function(java.util.function.Function) JobsLogs(io.crate.execution.engine.collect.stats.JobsLogs) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Portal(io.crate.protocols.postgres.Portal) Symbols(io.crate.expression.symbol.Symbols) FormatCodes(io.crate.protocols.postgres.FormatCodes) SqlParser(io.crate.sql.parser.SqlParser) Nullable(javax.annotation.Nullable) Iterator(java.util.Iterator) RelationInfo(io.crate.metadata.RelationInfo) Target(io.crate.sql.tree.DiscardStatement.Target) DataType(io.crate.types.DataType) Planner(io.crate.planner.Planner) RoutingProvider(io.crate.metadata.RoutingProvider) RowConsumer(io.crate.data.RowConsumer) StatementClassifier(io.crate.planner.operators.StatementClassifier) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PlannerContext(io.crate.planner.PlannerContext) Plan(io.crate.planner.Plan) AnalyzedRelation(io.crate.analyze.relations.AnalyzedRelation) ReadOnlyException(io.crate.exceptions.ReadOnlyException) SQLExceptions(io.crate.exceptions.SQLExceptions) LogManager(org.apache.logging.log4j.LogManager) Randomness(org.elasticsearch.common.Randomness) AnalyzedDeallocate(io.crate.analyze.AnalyzedDeallocate) RoutingProvider(io.crate.metadata.RoutingProvider) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) Plan(io.crate.planner.Plan) RowN(io.crate.data.RowN) CompletableFuture(java.util.concurrent.CompletableFuture) PlannerContext(io.crate.planner.PlannerContext) AnalyzedStatement(io.crate.analyze.AnalyzedStatement)

Example 35 with CoordinatorTxnCtx

use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.

the class CreateTableStatementAnalyzer method analyze.

public AnalyzedCreateTable analyze(CreateTable<Expression> createTable, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
    RelationName relationName = RelationName.of(createTable.name().getName(), txnCtx.sessionContext().searchPath().currentSchema());
    relationName.ensureValidForRelationCreation();
    var exprAnalyzerWithoutFields = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.UNSUPPORTED, null);
    var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
    var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
    Function<Expression, Symbol> exprMapper = y -> exprAnalyzerWithFieldsAsString.convert(y, exprCtx);
    // 1st phase, map and analyze everything EXCEPT:
    // - check constraints defined at any level (table or column)
    // - generated expressions
    // - default expressions
    Map<TableElement<Symbol>, TableElement<Expression>> analyzed = new LinkedHashMap<>();
    List<CheckConstraint<Expression>> checkConstraints = new ArrayList<>();
    for (int i = 0; i < createTable.tableElements().size(); i++) {
        TableElement<Expression> te = createTable.tableElements().get(i);
        if (te instanceof CheckConstraint) {
            checkConstraints.add((CheckConstraint<Expression>) te);
            continue;
        }
        TableElement<Symbol> analyzedTe = null;
        if (te instanceof ColumnDefinition) {
            ColumnDefinition<Expression> def = (ColumnDefinition<Expression>) te;
            List<ColumnConstraint<Symbol>> analyzedColumnConstraints = new ArrayList<>();
            for (int j = 0; j < def.constraints().size(); j++) {
                ColumnConstraint<Expression> cc = def.constraints().get(j);
                if (cc instanceof CheckColumnConstraint) {
                    // Re-frame the column check constraint as a table check constraint
                    CheckColumnConstraint<Expression> columnCheck = (CheckColumnConstraint<Expression>) cc;
                    checkConstraints.add(new CheckConstraint<>(columnCheck.name(), def.ident(), columnCheck.expression(), columnCheck.expressionStr()));
                    continue;
                }
                analyzedColumnConstraints.add(cc.map(exprMapper));
            }
            analyzedTe = new ColumnDefinition<>(def.ident(), null, null, def.type() == null ? null : def.type().map(exprMapper), analyzedColumnConstraints, false, def.isGenerated());
        }
        analyzed.put(analyzedTe == null ? te.map(exprMapper) : analyzedTe, te);
    }
    CreateTable<Symbol> analyzedCreateTable = new CreateTable<>(createTable.name().map(exprMapper), new ArrayList<>(analyzed.keySet()), createTable.partitionedBy().map(x -> x.map(exprMapper)), createTable.clusteredBy().map(x -> x.map(exprMapper)), createTable.properties().map(x -> exprAnalyzerWithoutFields.convert(x, exprCtx)), createTable.ifNotExists());
    AnalyzedTableElements<Symbol> analyzedTableElements = TableElementsAnalyzer.analyze(analyzedCreateTable.tableElements(), relationName, null);
    // 2nd phase, analyze and map with a reference resolver:
    // - generated/default expressions
    // - check constraints
    TableReferenceResolver referenceResolver = analyzedTableElements.referenceResolver(relationName);
    var exprAnalyzerWithReferences = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, referenceResolver, null);
    List<TableElement<Symbol>> tableElementsWithExpressions = new ArrayList<>();
    for (int i = 0; i < analyzedCreateTable.tableElements().size(); i++) {
        TableElement<Symbol> elementSymbol = analyzedCreateTable.tableElements().get(i);
        TableElement<Expression> elementExpression = analyzed.get(elementSymbol);
        tableElementsWithExpressions.add(elementExpression.mapExpressions(elementSymbol, x -> {
            Symbol symbol = exprAnalyzerWithReferences.convert(x, exprCtx);
            EnsureNoMatchPredicate.ensureNoMatchPredicate(symbol, "Cannot use MATCH in CREATE TABLE statements");
            return symbol;
        }));
    }
    checkConstraints.stream().map(x -> x.map(y -> exprAnalyzerWithReferences.convert(y, exprCtx))).forEach(te -> {
        analyzedCreateTable.tableElements().add(te);
        tableElementsWithExpressions.add(te);
        analyzedTableElements.addCheckConstraint(relationName, (CheckConstraint<Symbol>) te);
    });
    AnalyzedTableElements<Symbol> analyzedTableElementsWithExpressions = TableElementsAnalyzer.analyze(tableElementsWithExpressions, relationName, null, false);
    return new AnalyzedCreateTable(relationName, analyzedCreateTable, analyzedTableElements, analyzedTableElementsWithExpressions);
}
Also used : ExpressionAnalyzer(io.crate.analyze.expressions.ExpressionAnalyzer) CreateTable(io.crate.sql.tree.CreateTable) NodeContext(io.crate.metadata.NodeContext) TableElement(io.crate.sql.tree.TableElement) ColumnConstraint(io.crate.sql.tree.ColumnConstraint) RelationName(io.crate.metadata.RelationName) ExpressionAnalysisContext(io.crate.analyze.expressions.ExpressionAnalysisContext) FieldProvider(io.crate.analyze.relations.FieldProvider) CheckColumnConstraint(io.crate.sql.tree.CheckColumnConstraint) Function(java.util.function.Function) TableReferenceResolver(io.crate.analyze.expressions.TableReferenceResolver) CheckConstraint(io.crate.sql.tree.CheckConstraint) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ColumnDefinition(io.crate.sql.tree.ColumnDefinition) List(java.util.List) Symbol(io.crate.expression.symbol.Symbol) Map(java.util.Map) EnsureNoMatchPredicate(io.crate.planner.operators.EnsureNoMatchPredicate) Expression(io.crate.sql.tree.Expression) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) ExpressionAnalysisContext(io.crate.analyze.expressions.ExpressionAnalysisContext) Symbol(io.crate.expression.symbol.Symbol) ExpressionAnalyzer(io.crate.analyze.expressions.ExpressionAnalyzer) ArrayList(java.util.ArrayList) CreateTable(io.crate.sql.tree.CreateTable) TableElement(io.crate.sql.tree.TableElement) LinkedHashMap(java.util.LinkedHashMap) CheckColumnConstraint(io.crate.sql.tree.CheckColumnConstraint) ColumnConstraint(io.crate.sql.tree.ColumnConstraint) CheckColumnConstraint(io.crate.sql.tree.CheckColumnConstraint) RelationName(io.crate.metadata.RelationName) TableReferenceResolver(io.crate.analyze.expressions.TableReferenceResolver) CheckConstraint(io.crate.sql.tree.CheckConstraint) ColumnConstraint(io.crate.sql.tree.ColumnConstraint) CheckColumnConstraint(io.crate.sql.tree.CheckColumnConstraint) CheckConstraint(io.crate.sql.tree.CheckConstraint) ColumnDefinition(io.crate.sql.tree.ColumnDefinition) Expression(io.crate.sql.tree.Expression)

Aggregations

CoordinatorTxnCtx (io.crate.metadata.CoordinatorTxnCtx)44 Symbol (io.crate.expression.symbol.Symbol)27 NodeContext (io.crate.metadata.NodeContext)22 List (java.util.List)16 PlannerContext (io.crate.planner.PlannerContext)15 ArrayList (java.util.ArrayList)15 Map (java.util.Map)14 Lists2 (io.crate.common.collections.Lists2)13 Row (io.crate.data.Row)13 RelationName (io.crate.metadata.RelationName)13 Plan (io.crate.planner.Plan)13 SubQueryResults (io.crate.planner.operators.SubQueryResults)13 Function (java.util.function.Function)13 VisibleForTesting (io.crate.common.annotations.VisibleForTesting)12 RowConsumer (io.crate.data.RowConsumer)12 DependencyCarrier (io.crate.planner.DependencyCarrier)12 HashMap (java.util.HashMap)12 Test (org.junit.Test)12 Row1 (io.crate.data.Row1)11 DocTableInfo (io.crate.metadata.doc.DocTableInfo)11