Search in sources :

Example 1 with Lists2.mapTail

use of io.crate.common.collections.Lists2.mapTail in project crate by crate.

the class RoutedCollectPhase method normalize.

/**
 * normalizes the symbols of this node with the given normalizer
 *
 * @return a normalized node, if no changes occurred returns this
 */
public RoutedCollectPhase normalize(EvaluatingNormalizer normalizer, @Nonnull TransactionContext txnCtx) {
    RoutedCollectPhase result = this;
    Function<Symbol, Symbol> normalize = s -> normalizer.normalize(s, txnCtx);
    List<Symbol> newToCollect = Lists2.map(toCollect, normalize);
    boolean changed = !newToCollect.equals(toCollect);
    Symbol newWhereClause = normalizer.normalize(where, txnCtx);
    OrderBy orderBy = this.orderBy;
    if (orderBy != null) {
        orderBy = orderBy.map(normalize);
    }
    changed = changed || newWhereClause != where || orderBy != this.orderBy;
    if (changed) {
        result = new RoutedCollectPhase(jobId(), phaseId(), name(), routing, maxRowGranularity, newToCollect, projections, newWhereClause, distributionInfo);
        result.nodePageSizeHint(nodePageSizeHint);
        result.orderBy(orderBy);
    }
    return result;
}
Also used : TransactionContext(io.crate.metadata.TransactionContext) StreamOutput(org.elasticsearch.common.io.stream.StreamOutput) Set(java.util.Set) IOException(java.io.IOException) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) UUID(java.util.UUID) Function(java.util.function.Function) Lists2(io.crate.common.collections.Lists2) SymbolVisitors(io.crate.expression.symbol.SymbolVisitors) Objects(java.util.Objects) List(java.util.List) OrderBy(io.crate.analyze.OrderBy) RowGranularity(io.crate.metadata.RowGranularity) Routing(io.crate.metadata.Routing) Projection(io.crate.execution.dsl.projection.Projection) Symbol(io.crate.expression.symbol.Symbol) Symbols(io.crate.expression.symbol.Symbols) StreamInput(org.elasticsearch.common.io.stream.StreamInput) Paging(io.crate.data.Paging) SelectSymbol(io.crate.expression.symbol.SelectSymbol) ScopedSymbol(io.crate.expression.symbol.ScopedSymbol) DistributionInfo(io.crate.planner.distribution.DistributionInfo) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) OrderBy(io.crate.analyze.OrderBy) Symbol(io.crate.expression.symbol.Symbol) SelectSymbol(io.crate.expression.symbol.SelectSymbol) ScopedSymbol(io.crate.expression.symbol.ScopedSymbol)

Example 2 with Lists2.mapTail

use of io.crate.common.collections.Lists2.mapTail in project crate by crate.

the class JoinPlanBuilder method buildJoinTree.

static LogicalPlan buildJoinTree(List<AnalyzedRelation> from, Symbol whereClause, List<JoinPair> joinPairs, Function<AnalyzedRelation, LogicalPlan> plan, boolean hashJoinEnabled) {
    if (from.size() == 1) {
        return Filter.create(plan.apply(from.get(0)), whereClause);
    }
    Map<Set<RelationName>, Symbol> queryParts = QuerySplitter.split(whereClause);
    List<JoinPair> allJoinPairs = JoinOperations.convertImplicitJoinConditionsToJoinPairs(joinPairs, queryParts);
    boolean optimizeOrder = true;
    for (var joinPair : allJoinPairs) {
        if (hasAdditionalDependencies(joinPair)) {
            optimizeOrder = false;
            break;
        }
    }
    LinkedHashMap<Set<RelationName>, JoinPair> joinPairsByRelations = JoinOperations.buildRelationsToJoinPairsMap(allJoinPairs);
    Iterator<RelationName> it;
    if (optimizeOrder) {
        Collection<RelationName> orderedRelationNames = JoinOrdering.getOrderedRelationNames(Lists2.map(from, AnalyzedRelation::relationName), joinPairsByRelations.keySet(), queryParts.keySet());
        it = orderedRelationNames.iterator();
    } else {
        it = Lists2.map(from, AnalyzedRelation::relationName).iterator();
    }
    final RelationName lhsName = it.next();
    final RelationName rhsName = it.next();
    Set<RelationName> joinNames = new HashSet<>();
    joinNames.add(lhsName);
    joinNames.add(rhsName);
    JoinPair joinLhsRhs = joinPairsByRelations.remove(joinNames);
    final JoinType joinType;
    final Symbol joinCondition;
    if (joinLhsRhs == null) {
        joinType = JoinType.CROSS;
        joinCondition = null;
    } else {
        joinType = maybeInvertPair(rhsName, joinLhsRhs);
        joinCondition = joinLhsRhs.condition();
    }
    Map<RelationName, AnalyzedRelation> sources = from.stream().collect(Collectors.toMap(AnalyzedRelation::relationName, rel -> rel));
    AnalyzedRelation lhs = sources.get(lhsName);
    AnalyzedRelation rhs = sources.get(rhsName);
    LogicalPlan lhsPlan = plan.apply(lhs);
    LogicalPlan rhsPlan = plan.apply(rhs);
    Symbol query = removeParts(queryParts, lhsName, rhsName);
    LogicalPlan joinPlan = createJoinPlan(lhsPlan, rhsPlan, joinType, joinCondition, lhs, rhs, query, hashJoinEnabled);
    joinPlan = Filter.create(joinPlan, query);
    while (it.hasNext()) {
        AnalyzedRelation nextRel = sources.get(it.next());
        joinPlan = joinWithNext(plan, joinPlan, nextRel, joinNames, joinPairsByRelations, queryParts, lhs, hashJoinEnabled);
        joinNames.add(nextRel.relationName());
    }
    if (!queryParts.isEmpty()) {
        joinPlan = Filter.create(joinPlan, AndOperator.join(queryParts.values()));
        queryParts.clear();
    }
    assert joinPairsByRelations.isEmpty() : "Must've applied all joinPairs";
    return joinPlan;
}
Also used : Iterator(java.util.Iterator) RelationName(io.crate.metadata.RelationName) Collection(java.util.Collection) Set(java.util.Set) QuerySplitter(io.crate.analyze.relations.QuerySplitter) Function(java.util.function.Function) Collectors(java.util.stream.Collectors) Lists2(io.crate.common.collections.Lists2) JoinPair(io.crate.analyze.relations.JoinPair) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) Objects(java.util.Objects) List(java.util.List) AndOperator(io.crate.expression.operator.AndOperator) Stream(java.util.stream.Stream) JoinType(io.crate.planner.node.dql.join.JoinType) JoinOperations(io.crate.execution.engine.join.JoinOperations) Symbol(io.crate.expression.symbol.Symbol) Map(java.util.Map) AnalyzedRelation(io.crate.analyze.relations.AnalyzedRelation) EquiJoinDetector.isHashJoinPossible(io.crate.planner.operators.EquiJoinDetector.isHashJoinPossible) Collections(java.util.Collections) FieldsVisitor(io.crate.expression.symbol.FieldsVisitor) Nullable(javax.annotation.Nullable) Set(java.util.Set) HashSet(java.util.HashSet) Symbol(io.crate.expression.symbol.Symbol) JoinType(io.crate.planner.node.dql.join.JoinType) AnalyzedRelation(io.crate.analyze.relations.AnalyzedRelation) JoinPair(io.crate.analyze.relations.JoinPair) RelationName(io.crate.metadata.RelationName) HashSet(java.util.HashSet)

Example 3 with Lists2.mapTail

use of io.crate.common.collections.Lists2.mapTail in project crate by crate.

the class RefreshTablePlan method executeOrFail.

@Override
public void executeOrFail(DependencyCarrier dependencies, PlannerContext plannerContext, RowConsumer consumer, Row parameters, SubQueryResults subQueryResults) {
    if (analysis.tables().isEmpty()) {
        consumer.accept(InMemoryBatchIterator.empty(SENTINEL), null);
        return;
    }
    Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(plannerContext.transactionContext(), plannerContext.nodeContext(), x, parameters, subQueryResults);
    ArrayList<String> toRefresh = new ArrayList<>();
    for (Map.Entry<Table<Symbol>, DocTableInfo> table : analysis.tables().entrySet()) {
        var tableInfo = table.getValue();
        var tableSymbol = table.getKey();
        if (tableSymbol.partitionProperties().isEmpty()) {
            toRefresh.addAll(Arrays.asList(tableInfo.concreteOpenIndices()));
        } else {
            var partitionName = toPartitionName(tableInfo, Lists2.map(tableSymbol.partitionProperties(), p -> p.map(eval)));
            if (!tableInfo.partitions().contains(partitionName)) {
                throw new PartitionUnknownException(partitionName);
            }
            toRefresh.add(partitionName.asIndexName());
        }
    }
    RefreshRequest request = new RefreshRequest(toRefresh.toArray(String[]::new));
    request.indicesOptions(IndicesOptions.lenientExpandOpen());
    var transportRefreshAction = dependencies.transportActionProvider().transportRefreshAction();
    transportRefreshAction.execute(request, new OneRowActionListener<>(consumer, response -> new Row1(toRefresh.isEmpty() ? -1L : (long) toRefresh.size())));
}
Also used : AnalyzedRefreshTable(io.crate.analyze.AnalyzedRefreshTable) DocTableInfo(io.crate.metadata.doc.DocTableInfo) Arrays(java.util.Arrays) SENTINEL(io.crate.data.SentinelRow.SENTINEL) InMemoryBatchIterator(io.crate.data.InMemoryBatchIterator) Table(io.crate.sql.tree.Table) Function(java.util.function.Function) Lists2(io.crate.common.collections.Lists2) ArrayList(java.util.ArrayList) RowConsumer(io.crate.data.RowConsumer) DependencyCarrier(io.crate.planner.DependencyCarrier) SymbolEvaluator(io.crate.analyze.SymbolEvaluator) Row(io.crate.data.Row) Symbol(io.crate.expression.symbol.Symbol) PlannerContext(io.crate.planner.PlannerContext) Plan(io.crate.planner.Plan) Map(java.util.Map) SubQueryResults(io.crate.planner.operators.SubQueryResults) RefreshRequest(org.elasticsearch.action.admin.indices.refresh.RefreshRequest) IndicesOptions(org.elasticsearch.action.support.IndicesOptions) OneRowActionListener(io.crate.execution.support.OneRowActionListener) PartitionUnknownException(io.crate.exceptions.PartitionUnknownException) PartitionPropertiesAnalyzer.toPartitionName(io.crate.analyze.PartitionPropertiesAnalyzer.toPartitionName) Row1(io.crate.data.Row1) RefreshRequest(org.elasticsearch.action.admin.indices.refresh.RefreshRequest) DocTableInfo(io.crate.metadata.doc.DocTableInfo) PartitionUnknownException(io.crate.exceptions.PartitionUnknownException) AnalyzedRefreshTable(io.crate.analyze.AnalyzedRefreshTable) Table(io.crate.sql.tree.Table) ArrayList(java.util.ArrayList) Row1(io.crate.data.Row1) Map(java.util.Map)

Example 4 with Lists2.mapTail

use of io.crate.common.collections.Lists2.mapTail in project crate by crate.

the class Get method build.

@Override
public ExecutionPlan build(PlannerContext plannerContext, Set<PlanHint> hints, ProjectionBuilder projectionBuilder, int limitHint, int offsetHint, @Nullable OrderBy order, @Nullable Integer pageSizeHint, Row params, SubQueryResults subQueryResults) {
    HashMap<String, Map<ShardId, List<PKAndVersion>>> idsByShardByNode = new HashMap<>();
    DocTableInfo docTableInfo = tableRelation.tableInfo();
    for (DocKeys.DocKey docKey : docKeys) {
        String id = docKey.getId(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
        if (id == null) {
            continue;
        }
        List<String> partitionValues = docKey.getPartitionValues(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
        String indexName = indexName(docTableInfo, partitionValues);
        String routing = docKey.getRouting(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
        ShardRouting shardRouting;
        try {
            shardRouting = plannerContext.resolveShard(indexName, id, routing);
        } catch (IndexNotFoundException e) {
            if (docTableInfo.isPartitioned()) {
                continue;
            }
            throw e;
        }
        String currentNodeId = shardRouting.currentNodeId();
        if (currentNodeId == null) {
            // If relocating is fast enough this will work, otherwise it will result in a shard failure which
            // will cause a statement retry
            currentNodeId = shardRouting.relocatingNodeId();
            if (currentNodeId == null) {
                throw new ShardNotFoundException(shardRouting.shardId());
            }
        }
        Map<ShardId, List<PKAndVersion>> idsByShard = idsByShardByNode.get(currentNodeId);
        if (idsByShard == null) {
            idsByShard = new HashMap<>();
            idsByShardByNode.put(currentNodeId, idsByShard);
        }
        List<PKAndVersion> pkAndVersions = idsByShard.get(shardRouting.shardId());
        if (pkAndVersions == null) {
            pkAndVersions = new ArrayList<>();
            idsByShard.put(shardRouting.shardId(), pkAndVersions);
        }
        long version = docKey.version(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(Versions.MATCH_ANY);
        long sequenceNumber = docKey.sequenceNo(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(SequenceNumbers.UNASSIGNED_SEQ_NO);
        long primaryTerm = docKey.primaryTerm(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(SequenceNumbers.UNASSIGNED_PRIMARY_TERM);
        pkAndVersions.add(new PKAndVersion(id, version, sequenceNumber, primaryTerm));
    }
    var docKeyColumns = new ArrayList<>(docTableInfo.primaryKey());
    docKeyColumns.addAll(docTableInfo.partitionedBy());
    docKeyColumns.add(docTableInfo.clusteredBy());
    docKeyColumns.add(DocSysColumns.VERSION);
    docKeyColumns.add(DocSysColumns.SEQ_NO);
    docKeyColumns.add(DocSysColumns.PRIMARY_TERM);
    var binder = new SubQueryAndParamBinder(params, subQueryResults);
    List<Symbol> boundOutputs = Lists2.map(outputs, binder);
    var boundQuery = binder.apply(query);
    // Collect all columns which are used inside the query
    // If the query contains only DocKeys, no filter is needed as all DocKeys are handled by the PKLookupOperation
    AtomicBoolean requiresAdditionalFilteringOnNonDocKeyColumns = new AtomicBoolean(false);
    var toCollectSet = new LinkedHashSet<>(boundOutputs);
    Consumer<Reference> addRefIfMatch = ref -> {
        toCollectSet.add(ref);
        if (docKeyColumns.contains(ref.column()) == false) {
            requiresAdditionalFilteringOnNonDocKeyColumns.set(true);
        }
    };
    RefVisitor.visitRefs(boundQuery, addRefIfMatch);
    var toCollect = boundOutputs;
    ArrayList<Projection> projections = new ArrayList<>();
    if (requiresAdditionalFilteringOnNonDocKeyColumns.get()) {
        toCollect = List.copyOf(toCollectSet);
        var filterProjection = ProjectionBuilder.filterProjection(toCollect, boundQuery);
        filterProjection.requiredGranularity(RowGranularity.SHARD);
        projections.add(filterProjection);
        // reduce outputs which have been added for the filter projection
        var evalProjection = new EvalProjection(InputColumn.mapToInputColumns(boundOutputs), RowGranularity.SHARD);
        projections.add(evalProjection);
    }
    var collect = new Collect(new PKLookupPhase(plannerContext.jobId(), plannerContext.nextExecutionPhaseId(), docTableInfo.partitionedBy(), toCollect, idsByShardByNode), TopN.NO_LIMIT, 0, toCollect.size(), docKeys.size(), null);
    for (var projection : projections) {
        collect.addProjection(projection);
    }
    return collect;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ShardRouting(org.elasticsearch.cluster.routing.ShardRouting) ShardId(org.elasticsearch.index.shard.ShardId) IndexParts(io.crate.metadata.IndexParts) InputColumn(io.crate.expression.symbol.InputColumn) Versions(org.elasticsearch.common.lucene.uid.Versions) RelationName(io.crate.metadata.RelationName) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) PartitionName(io.crate.metadata.PartitionName) ShardNotFoundException(org.elasticsearch.index.shard.ShardNotFoundException) ArrayList(java.util.ArrayList) RefVisitor(io.crate.expression.symbol.RefVisitor) TableStats(io.crate.statistics.TableStats) IndexNotFoundException(org.elasticsearch.index.IndexNotFoundException) Map(java.util.Map) SelectSymbol(io.crate.expression.symbol.SelectSymbol) PKLookupPhase(io.crate.execution.dsl.phases.PKLookupPhase) TopN(io.crate.execution.engine.pipeline.TopN) Nullable(javax.annotation.Nullable) LinkedHashSet(java.util.LinkedHashSet) ProjectionBuilder(io.crate.execution.dsl.projection.builder.ProjectionBuilder) DocSysColumns(io.crate.metadata.doc.DocSysColumns) DocTableInfo(io.crate.metadata.doc.DocTableInfo) DocKeys(io.crate.analyze.where.DocKeys) SequenceNumbers(org.elasticsearch.index.seqno.SequenceNumbers) Collection(java.util.Collection) Reference(io.crate.metadata.Reference) Set(java.util.Set) Lists2(io.crate.common.collections.Lists2) ExecutionPlan(io.crate.planner.ExecutionPlan) Consumer(java.util.function.Consumer) List(java.util.List) OrderBy(io.crate.analyze.OrderBy) RowGranularity(io.crate.metadata.RowGranularity) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Row(io.crate.data.Row) Projection(io.crate.execution.dsl.projection.Projection) Symbol(io.crate.expression.symbol.Symbol) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PlannerContext(io.crate.planner.PlannerContext) Collect(io.crate.planner.node.dql.Collect) EvalProjection(io.crate.execution.dsl.projection.EvalProjection) DocTableInfo(io.crate.metadata.doc.DocTableInfo) DocKeys(io.crate.analyze.where.DocKeys) HashMap(java.util.HashMap) Collect(io.crate.planner.node.dql.Collect) SelectSymbol(io.crate.expression.symbol.SelectSymbol) Symbol(io.crate.expression.symbol.Symbol) ArrayList(java.util.ArrayList) Projection(io.crate.execution.dsl.projection.Projection) EvalProjection(io.crate.execution.dsl.projection.EvalProjection) ShardId(org.elasticsearch.index.shard.ShardId) PKLookupPhase(io.crate.execution.dsl.phases.PKLookupPhase) ArrayList(java.util.ArrayList) List(java.util.List) Reference(io.crate.metadata.Reference) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ShardNotFoundException(org.elasticsearch.index.shard.ShardNotFoundException) EvalProjection(io.crate.execution.dsl.projection.EvalProjection) IndexNotFoundException(org.elasticsearch.index.IndexNotFoundException) ShardRouting(org.elasticsearch.cluster.routing.ShardRouting) HashMap(java.util.HashMap) Map(java.util.Map)

Example 5 with Lists2.mapTail

use of io.crate.common.collections.Lists2.mapTail in project crate by crate.

the class CopyToPlan method bind.

@VisibleForTesting
public static BoundCopyTo bind(AnalyzedCopyTo copyTo, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row parameters, SubQueryResults subQueryResults) {
    Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, parameters, subQueryResults);
    DocTableInfo table = (DocTableInfo) copyTo.tableInfo();
    List<String> partitions = resolvePartitions(Lists2.map(copyTo.table().partitionProperties(), x -> x.map(eval)), table);
    List<Symbol> outputs = new ArrayList<>();
    Map<ColumnIdent, Symbol> overwrites = null;
    boolean columnsDefined = false;
    final List<String> outputNames = new ArrayList<>(copyTo.columns().size());
    if (!copyTo.columns().isEmpty()) {
        // TODO: remove outputNames?
        for (Symbol symbol : copyTo.columns()) {
            assert symbol instanceof Reference : "Only references are expected here";
            RefVisitor.visitRefs(symbol, r -> outputNames.add(r.column().sqlFqn()));
            outputs.add(DocReferences.toSourceLookup(symbol));
        }
        columnsDefined = true;
    } else {
        Reference sourceRef;
        if (table.isPartitioned() && partitions.isEmpty()) {
            // table is partitioned, insert partitioned columns into the output
            overwrites = new HashMap<>();
            for (Reference reference : table.partitionedByColumns()) {
                if (!(reference instanceof GeneratedReference)) {
                    overwrites.put(reference.column(), reference);
                }
            }
            if (overwrites.size() > 0) {
                sourceRef = table.getReference(DocSysColumns.DOC);
            } else {
                sourceRef = table.getReference(DocSysColumns.RAW);
            }
        } else {
            sourceRef = table.getReference(DocSysColumns.RAW);
        }
        outputs = List.of(sourceRef);
    }
    Settings settings = genericPropertiesToSettings(copyTo.properties().map(eval));
    WriterProjection.CompressionType compressionType = settingAsEnum(WriterProjection.CompressionType.class, COMPRESSION_SETTING.get(settings));
    WriterProjection.OutputFormat outputFormat = settingAsEnum(WriterProjection.OutputFormat.class, OUTPUT_FORMAT_SETTING.get(settings));
    if (!columnsDefined && outputFormat == WriterProjection.OutputFormat.JSON_ARRAY) {
        throw new UnsupportedFeatureException("Output format not supported without specifying columns.");
    }
    WhereClause whereClause = new WhereClause(copyTo.whereClause(), partitions, Collections.emptySet());
    return new BoundCopyTo(outputs, table, whereClause, Literal.of(DataTypes.STRING.sanitizeValue(eval.apply(copyTo.uri()))), compressionType, outputFormat, outputNames.isEmpty() ? null : outputNames, columnsDefined, overwrites, settings);
}
Also used : UnsupportedFeatureException(io.crate.exceptions.UnsupportedFeatureException) GenericPropertiesConverter.genericPropertiesToSettings(io.crate.analyze.GenericPropertiesConverter.genericPropertiesToSettings) Match(io.crate.planner.optimizer.matcher.Match) BoundCopyTo(io.crate.analyze.BoundCopyTo) DependencyCarrier(io.crate.planner.DependencyCarrier) RefVisitor(io.crate.expression.symbol.RefVisitor) SymbolEvaluator(io.crate.analyze.SymbolEvaluator) TableStats(io.crate.statistics.TableStats) Settings(org.elasticsearch.common.settings.Settings) Map(java.util.Map) OUTPUT_FORMAT_SETTING(io.crate.analyze.CopyStatementSettings.OUTPUT_FORMAT_SETTING) DocSysColumns(io.crate.metadata.doc.DocSysColumns) DocTableInfo(io.crate.metadata.doc.DocTableInfo) NodeContext(io.crate.metadata.NodeContext) LogicalPlan(io.crate.planner.operators.LogicalPlan) Captures(io.crate.planner.optimizer.matcher.Captures) GeneratedReference(io.crate.metadata.GeneratedReference) Collect(io.crate.planner.operators.Collect) Set(java.util.Set) Lists2(io.crate.common.collections.Lists2) ExecutionPlan(io.crate.planner.ExecutionPlan) OptimizeCollectWhereClauseAccess(io.crate.planner.optimizer.rule.OptimizeCollectWhereClauseAccess) DocReferences(io.crate.metadata.DocReferences) List(java.util.List) Row(io.crate.data.Row) Symbol(io.crate.expression.symbol.Symbol) DataTypes(io.crate.types.DataTypes) SubQueryResults(io.crate.planner.operators.SubQueryResults) VisibleForTesting(io.crate.common.annotations.VisibleForTesting) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) PartitionPropertiesAnalyzer(io.crate.analyze.PartitionPropertiesAnalyzer) HashMap(java.util.HashMap) Function(java.util.function.Function) ArrayList(java.util.ArrayList) Assignment(io.crate.sql.tree.Assignment) COMPRESSION_SETTING(io.crate.analyze.CopyStatementSettings.COMPRESSION_SETTING) AnalyzedCopyTo(io.crate.analyze.AnalyzedCopyTo) PartitionUnknownException(io.crate.exceptions.PartitionUnknownException) ProjectionBuilder(io.crate.execution.dsl.projection.builder.ProjectionBuilder) MergeCountProjection(io.crate.execution.dsl.projection.MergeCountProjection) WhereClause(io.crate.analyze.WhereClause) NodeOperationTree(io.crate.execution.dsl.phases.NodeOperationTree) ColumnIdent(io.crate.metadata.ColumnIdent) Reference(io.crate.metadata.Reference) NodeOperationTreeGenerator(io.crate.execution.engine.NodeOperationTreeGenerator) Merge(io.crate.planner.Merge) RowConsumer(io.crate.data.RowConsumer) WriterProjection(io.crate.execution.dsl.projection.WriterProjection) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Literal(io.crate.expression.symbol.Literal) PlannerContext(io.crate.planner.PlannerContext) CopyStatementSettings.settingAsEnum(io.crate.analyze.CopyStatementSettings.settingAsEnum) Plan(io.crate.planner.Plan) Collections(java.util.Collections) DocTableInfo(io.crate.metadata.doc.DocTableInfo) GeneratedReference(io.crate.metadata.GeneratedReference) UnsupportedFeatureException(io.crate.exceptions.UnsupportedFeatureException) BoundCopyTo(io.crate.analyze.BoundCopyTo) Symbol(io.crate.expression.symbol.Symbol) GeneratedReference(io.crate.metadata.GeneratedReference) Reference(io.crate.metadata.Reference) ArrayList(java.util.ArrayList) WhereClause(io.crate.analyze.WhereClause) WriterProjection(io.crate.execution.dsl.projection.WriterProjection) ColumnIdent(io.crate.metadata.ColumnIdent) GenericPropertiesConverter.genericPropertiesToSettings(io.crate.analyze.GenericPropertiesConverter.genericPropertiesToSettings) Settings(org.elasticsearch.common.settings.Settings) VisibleForTesting(io.crate.common.annotations.VisibleForTesting)

Aggregations

Lists2 (io.crate.common.collections.Lists2)21 Symbol (io.crate.expression.symbol.Symbol)17 List (java.util.List)17 CoordinatorTxnCtx (io.crate.metadata.CoordinatorTxnCtx)13 Map (java.util.Map)13 NodeContext (io.crate.metadata.NodeContext)12 ArrayList (java.util.ArrayList)12 Row (io.crate.data.Row)10 DocTableInfo (io.crate.metadata.doc.DocTableInfo)10 Nullable (javax.annotation.Nullable)10 RelationName (io.crate.metadata.RelationName)8 SubQueryResults (io.crate.planner.operators.SubQueryResults)8 HashMap (java.util.HashMap)8 Function (java.util.function.Function)8 Literal (io.crate.expression.symbol.Literal)7 PlannerContext (io.crate.planner.PlannerContext)7 DataType (io.crate.types.DataType)7 Schemas (io.crate.metadata.Schemas)6 RowConsumer (io.crate.data.RowConsumer)5 EvaluatingNormalizer (io.crate.expression.eval.EvaluatingNormalizer)5