Search in sources :

Example 1 with NodeContext

use of io.crate.metadata.NodeContext in project crate by crate.

the class WhereClauseAnalyzer method resolvePartitions.

public static PartitionResult resolvePartitions(Symbol query, DocTableInfo tableInfo, CoordinatorTxnCtx coordinatorTxnCtx, NodeContext nodeCtx) {
    assert tableInfo.isPartitioned() : "table must be partitioned in order to resolve partitions";
    assert !tableInfo.partitions().isEmpty() : "table must have at least one partition";
    PartitionReferenceResolver partitionReferenceResolver = preparePartitionResolver(tableInfo.partitionedByColumns());
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.PARTITION, partitionReferenceResolver, null);
    Symbol normalized;
    Map<Symbol, List<Literal>> queryPartitionMap = new HashMap<>();
    for (PartitionName partitionName : tableInfo.partitions()) {
        for (PartitionExpression partitionExpression : partitionReferenceResolver.expressions()) {
            partitionExpression.setNextRow(partitionName);
        }
        normalized = normalizer.normalize(query, coordinatorTxnCtx);
        assert normalized != null : "normalizing a query must not return null";
        if (normalized.equals(query)) {
            // no partition columns inside the where clause
            return new PartitionResult(query, Collections.emptyList());
        }
        boolean canMatch = WhereClause.canMatch(normalized);
        if (canMatch) {
            List<Literal> partitions = queryPartitionMap.get(normalized);
            if (partitions == null) {
                partitions = new ArrayList<>();
                queryPartitionMap.put(normalized, partitions);
            }
            partitions.add(Literal.of(partitionName.asIndexName()));
        }
    }
    if (queryPartitionMap.size() == 1) {
        Map.Entry<Symbol, List<Literal>> entry = Iterables.getOnlyElement(queryPartitionMap.entrySet());
        return new PartitionResult(entry.getKey(), Lists2.map(entry.getValue(), literal -> nullOrString(literal.value())));
    } else if (queryPartitionMap.size() > 0) {
        PartitionResult partitionResult = tieBreakPartitionQueries(normalizer, queryPartitionMap, coordinatorTxnCtx);
        return partitionResult == null ? // the query will then be evaluated correctly within each partition to see whether it matches or not
        new PartitionResult(query, Lists2.map(tableInfo.partitions(), PartitionName::asIndexName)) : partitionResult;
    } else {
        return new PartitionResult(Literal.BOOLEAN_FALSE, Collections.emptyList());
    }
}
Also used : Tuple(io.crate.common.collections.Tuple) ScalarsAndRefsToTrue(io.crate.analyze.ScalarsAndRefsToTrue) HashMap(java.util.HashMap) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) PartitionName(io.crate.metadata.PartitionName) ArrayList(java.util.ArrayList) Map(java.util.Map) StringUtils.nullOrString(io.crate.common.StringUtils.nullOrString) Nullable(javax.annotation.Nullable) DocTableInfo(io.crate.metadata.doc.DocTableInfo) NodeContext(io.crate.metadata.NodeContext) WhereClause(io.crate.analyze.WhereClause) PartitionReferenceResolver(io.crate.metadata.PartitionReferenceResolver) Reference(io.crate.metadata.Reference) Iterables(io.crate.common.collections.Iterables) Lists2(io.crate.common.collections.Lists2) List(java.util.List) RowGranularity(io.crate.metadata.RowGranularity) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Literal(io.crate.expression.symbol.Literal) Symbol(io.crate.expression.symbol.Symbol) AbstractTableRelation(io.crate.analyze.relations.AbstractTableRelation) PartitionExpression(io.crate.expression.reference.partitioned.PartitionExpression) Collections(java.util.Collections) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) EvaluatingNormalizer(io.crate.expression.eval.EvaluatingNormalizer) HashMap(java.util.HashMap) Symbol(io.crate.expression.symbol.Symbol) PartitionName(io.crate.metadata.PartitionName) PartitionExpression(io.crate.expression.reference.partitioned.PartitionExpression) Literal(io.crate.expression.symbol.Literal) ArrayList(java.util.ArrayList) List(java.util.List) PartitionReferenceResolver(io.crate.metadata.PartitionReferenceResolver) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with NodeContext

use of io.crate.metadata.NodeContext in project crate by crate.

the class MoveFilterBeneathWindowAgg method apply.

@Override
public LogicalPlan apply(Filter filter, Captures captures, TableStats tableStats, TransactionContext txnCtx, NodeContext nodeCtx) {
    WindowAgg windowAgg = captures.get(windowAggCapture);
    WindowDefinition windowDefinition = windowAgg.windowDefinition();
    List<WindowFunction> windowFunctions = windowAgg.windowFunctions();
    Predicate<Symbol> containsWindowFunction = symbol -> symbol instanceof WindowFunction && windowFunctions.contains(symbol);
    Symbol predicate = filter.query();
    List<Symbol> filterParts = AndOperator.split(predicate);
    ArrayList<Symbol> remainingFilterSymbols = new ArrayList<>();
    ArrayList<Symbol> windowPartitionedBasedFilters = new ArrayList<>();
    for (Symbol part : filterParts) {
        if (SymbolVisitors.any(containsWindowFunction, part) == false && windowDefinition.partitions().containsAll(extractColumns(part))) {
            windowPartitionedBasedFilters.add(part);
        } else {
            remainingFilterSymbols.add(part);
        }
    }
    assert remainingFilterSymbols.size() > 0 || windowPartitionedBasedFilters.size() > 0 : "Splitting the filter symbol must result in at least one group";
    /* SELECT ROW_NUMBER() OVER(PARTITION BY id)
         * WHERE `x = 1`
         *
         * `x` is not the window partition column.
         * We cannot push down the filter as it would change the window aggregation value
         *
         */
    if (windowPartitionedBasedFilters.isEmpty()) {
        return null;
    }
    /* SELECT ROW_NUMBER() OVER(PARTITION BY id)
         * WHERE `id = 1`
         * remainingFilterSymbols:                  []
         * windowPartitionedBasedFilters:           [id = 1]
         *
         * Filter
         *  |
         * WindowsAgg
         *
         * transforms to
         *
         * WindowAgg
         *  |
         * Filter (id = 1)
         */
    if (remainingFilterSymbols.isEmpty()) {
        return transpose(filter, windowAgg);
    }
    /* WHERE `ROW_NUMBER() OVER(PARTITION BY id) = 2 AND id = 1`
         * remainingFilterSymbols:                  [ROW_NUMBER() OVER(PARTITION BY id) = 2]
         * windowPartitionedBasedFilters:           [id = 1]
         *
         * Filter
         *  |
         * WindowsAgg
         *
         * transforms to
         *
         * Filter (ROW_NUMBER() OVER(PARTITION BY id) = 2)
         *  |
         * WindowAgg
         *  |
         * Filter (id = 1)
         */
    LogicalPlan newWindowAgg = windowAgg.replaceSources(List.of(new Filter(windowAgg.source(), AndOperator.join(windowPartitionedBasedFilters))));
    return new Filter(newWindowAgg, AndOperator.join(remainingFilterSymbols));
}
Also used : TransactionContext(io.crate.metadata.TransactionContext) NodeContext(io.crate.metadata.NodeContext) LogicalPlanner.extractColumns(io.crate.planner.operators.LogicalPlanner.extractColumns) LogicalPlan(io.crate.planner.operators.LogicalPlan) Captures(io.crate.planner.optimizer.matcher.Captures) Predicate(java.util.function.Predicate) Util.transpose(io.crate.planner.optimizer.rule.Util.transpose) Pattern(io.crate.planner.optimizer.matcher.Pattern) Rule(io.crate.planner.optimizer.Rule) SymbolVisitors(io.crate.expression.symbol.SymbolVisitors) ArrayList(java.util.ArrayList) List(java.util.List) AndOperator(io.crate.expression.operator.AndOperator) TableStats(io.crate.statistics.TableStats) WindowDefinition(io.crate.analyze.WindowDefinition) Symbol(io.crate.expression.symbol.Symbol) Pattern.typeOf(io.crate.planner.optimizer.matcher.Pattern.typeOf) WindowFunction(io.crate.expression.symbol.WindowFunction) Patterns.source(io.crate.planner.optimizer.matcher.Patterns.source) Filter(io.crate.planner.operators.Filter) WindowAgg(io.crate.planner.operators.WindowAgg) Capture(io.crate.planner.optimizer.matcher.Capture) WindowFunction(io.crate.expression.symbol.WindowFunction) WindowAgg(io.crate.planner.operators.WindowAgg) Filter(io.crate.planner.operators.Filter) Symbol(io.crate.expression.symbol.Symbol) ArrayList(java.util.ArrayList) LogicalPlan(io.crate.planner.operators.LogicalPlan) WindowDefinition(io.crate.analyze.WindowDefinition)

Example 3 with NodeContext

use of io.crate.metadata.NodeContext in project crate by crate.

the class CsvReaderBenchmark method create_temp_file_and_uri.

@Setup
public void create_temp_file_and_uri() throws IOException {
    NodeContext nodeCtx = new NodeContext(new Functions(Map.of()));
    inputFactory = new InputFactory(nodeCtx);
    tempFile = File.createTempFile("temp", null);
    fileUri = tempFile.toURI().getPath();
    try (OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tempFile), StandardCharsets.UTF_8)) {
        writer.write("name,id\n");
        writer.write("Arthur,4\n");
        writer.write("Trillian,5\n");
        writer.write("Emma,5\n");
        writer.write("Emily,9\n");
        writer.write("Sarah,5\n");
        writer.write("John,5\n");
        writer.write("Mical,9\n");
        writer.write("Mary,5\n");
        writer.write("Jimmy,9\n");
        writer.write("Tom,5\n");
        writer.write("Neil,0\n");
        writer.write("Rose,5\n");
        writer.write("Gobnait,5\n");
        writer.write("Rory,1\n");
        writer.write("Martin,11\n");
        writer.write("Arthur,4\n");
        writer.write("Trillian,5\n");
        writer.write("Emma,5\n");
        writer.write("Emily,9\n");
        writer.write("Sarah,5\n");
        writer.write("John,5\n");
        writer.write("Mical,9\n");
        writer.write("Mary,5\n");
        writer.write("Jimmy,9\n");
        writer.write("Tom,5\n");
        writer.write("Neil,0\n");
        writer.write("Rose,5\n");
        writer.write("Gobnait,5\n");
        writer.write("Rory,1\n");
        writer.write("Martin,11\n");
    }
}
Also used : LocalFsFileInputFactory(io.crate.execution.engine.collect.files.LocalFsFileInputFactory) InputFactory(io.crate.expression.InputFactory) NodeContext(io.crate.metadata.NodeContext) FileOutputStream(java.io.FileOutputStream) Functions(io.crate.metadata.Functions) OutputStreamWriter(java.io.OutputStreamWriter) Setup(org.openjdk.jmh.annotations.Setup)

Example 4 with NodeContext

use of io.crate.metadata.NodeContext in project crate by crate.

the class JsonReaderBenchmark method create_temp_file_and_uri.

@Setup
public void create_temp_file_and_uri() throws IOException {
    NodeContext nodeCtx = new NodeContext(new Functions(Map.of()));
    inputFactory = new InputFactory(nodeCtx);
    tempFile = File.createTempFile("temp", null);
    fileUri = tempFile.toURI().getPath();
    try (OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tempFile), StandardCharsets.UTF_8)) {
        writer.write("{\"name\": \"Arthur\", \"id\": 4\\n");
        writer.write("{\"id\": 5, \"name\": \"Trillian\"\n");
        writer.write("{\"id\": 5, \"name\": \"Emma\"\n");
        writer.write("{\"id\": 9, \"name\": \"Emily\"\n");
        writer.write("{\"id\": 5, \"name\": \"Sarah\"\n");
        writer.write("{\"id\": 5, \"name\": \"John\"\n");
        writer.write("{\"id\": 9, \"name\": \"Mical\"\n");
        writer.write("{\"id\": 5, \"name\": \"Mary\"\n");
        writer.write("{\"id\": 9, \"name\": \"Jimmy\"\n");
        writer.write("{\"id\": 5, \"name\": \"Tom\"\n");
        writer.write("{\"id\": 0, \"name\": \"Neil\"\n");
        writer.write("{\"id\": 5, \"name\": \"Rose\"\n");
        writer.write("{\"id\": 5, \"name\": \"Gobnait\"\n");
        writer.write("{\"id\": 1, \"name\": \"Rory\"\n");
        writer.write("{\"id\": 11, \"name\": \"Martin\"\n");
        writer.write("{\"id\": 5, \"name\": \"Trillian\"\n");
        writer.write("{\"id\": 5, \"name\": \"Emma\"\n");
        writer.write("{\"id\": 9, \"name\": \"Emily\"\n");
        writer.write("{\"id\": 5, \"name\": \"Sarah\"\n");
        writer.write("{\"id\": 5, \"name\": \"John\"\n");
        writer.write("{\"id\": 9, \"name\": \"Mical\"\n");
        writer.write("{\"id\": 5, \"name\": \"Mary\"\n");
        writer.write("{\"id\": 9, \"name\": \"Jimmy\"\n");
        writer.write("{\"id\": 5, \"name\": \"Tom\"\n");
        writer.write("{\"id\": 0, \"name\": \"Neil\"\n");
        writer.write("{\"id\": 5, \"name\": \"Rose\"\n");
        writer.write("{\"id\": 5, \"name\": \"Gobnait\"\n");
        writer.write("{\"id\": 1, \"name\": \"Rory\"\n");
        writer.write("{\"id\": 11, \"name\": \"Martin\"\n");
    }
}
Also used : LocalFsFileInputFactory(io.crate.execution.engine.collect.files.LocalFsFileInputFactory) InputFactory(io.crate.expression.InputFactory) NodeContext(io.crate.metadata.NodeContext) FileOutputStream(java.io.FileOutputStream) Functions(io.crate.metadata.Functions) OutputStreamWriter(java.io.OutputStreamWriter) Setup(org.openjdk.jmh.annotations.Setup)

Example 5 with NodeContext

use of io.crate.metadata.NodeContext in project crate by crate.

the class CopyToPlan method bind.

@VisibleForTesting
public static BoundCopyTo bind(AnalyzedCopyTo copyTo, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row parameters, SubQueryResults subQueryResults) {
    Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, parameters, subQueryResults);
    DocTableInfo table = (DocTableInfo) copyTo.tableInfo();
    List<String> partitions = resolvePartitions(Lists2.map(copyTo.table().partitionProperties(), x -> x.map(eval)), table);
    List<Symbol> outputs = new ArrayList<>();
    Map<ColumnIdent, Symbol> overwrites = null;
    boolean columnsDefined = false;
    final List<String> outputNames = new ArrayList<>(copyTo.columns().size());
    if (!copyTo.columns().isEmpty()) {
        // TODO: remove outputNames?
        for (Symbol symbol : copyTo.columns()) {
            assert symbol instanceof Reference : "Only references are expected here";
            RefVisitor.visitRefs(symbol, r -> outputNames.add(r.column().sqlFqn()));
            outputs.add(DocReferences.toSourceLookup(symbol));
        }
        columnsDefined = true;
    } else {
        Reference sourceRef;
        if (table.isPartitioned() && partitions.isEmpty()) {
            // table is partitioned, insert partitioned columns into the output
            overwrites = new HashMap<>();
            for (Reference reference : table.partitionedByColumns()) {
                if (!(reference instanceof GeneratedReference)) {
                    overwrites.put(reference.column(), reference);
                }
            }
            if (overwrites.size() > 0) {
                sourceRef = table.getReference(DocSysColumns.DOC);
            } else {
                sourceRef = table.getReference(DocSysColumns.RAW);
            }
        } else {
            sourceRef = table.getReference(DocSysColumns.RAW);
        }
        outputs = List.of(sourceRef);
    }
    Settings settings = genericPropertiesToSettings(copyTo.properties().map(eval));
    WriterProjection.CompressionType compressionType = settingAsEnum(WriterProjection.CompressionType.class, COMPRESSION_SETTING.get(settings));
    WriterProjection.OutputFormat outputFormat = settingAsEnum(WriterProjection.OutputFormat.class, OUTPUT_FORMAT_SETTING.get(settings));
    if (!columnsDefined && outputFormat == WriterProjection.OutputFormat.JSON_ARRAY) {
        throw new UnsupportedFeatureException("Output format not supported without specifying columns.");
    }
    WhereClause whereClause = new WhereClause(copyTo.whereClause(), partitions, Collections.emptySet());
    return new BoundCopyTo(outputs, table, whereClause, Literal.of(DataTypes.STRING.sanitizeValue(eval.apply(copyTo.uri()))), compressionType, outputFormat, outputNames.isEmpty() ? null : outputNames, columnsDefined, overwrites, settings);
}
Also used : UnsupportedFeatureException(io.crate.exceptions.UnsupportedFeatureException) GenericPropertiesConverter.genericPropertiesToSettings(io.crate.analyze.GenericPropertiesConverter.genericPropertiesToSettings) Match(io.crate.planner.optimizer.matcher.Match) BoundCopyTo(io.crate.analyze.BoundCopyTo) DependencyCarrier(io.crate.planner.DependencyCarrier) RefVisitor(io.crate.expression.symbol.RefVisitor) SymbolEvaluator(io.crate.analyze.SymbolEvaluator) TableStats(io.crate.statistics.TableStats) Settings(org.elasticsearch.common.settings.Settings) Map(java.util.Map) OUTPUT_FORMAT_SETTING(io.crate.analyze.CopyStatementSettings.OUTPUT_FORMAT_SETTING) DocSysColumns(io.crate.metadata.doc.DocSysColumns) DocTableInfo(io.crate.metadata.doc.DocTableInfo) NodeContext(io.crate.metadata.NodeContext) LogicalPlan(io.crate.planner.operators.LogicalPlan) Captures(io.crate.planner.optimizer.matcher.Captures) GeneratedReference(io.crate.metadata.GeneratedReference) Collect(io.crate.planner.operators.Collect) Set(java.util.Set) Lists2(io.crate.common.collections.Lists2) ExecutionPlan(io.crate.planner.ExecutionPlan) OptimizeCollectWhereClauseAccess(io.crate.planner.optimizer.rule.OptimizeCollectWhereClauseAccess) DocReferences(io.crate.metadata.DocReferences) List(java.util.List) Row(io.crate.data.Row) Symbol(io.crate.expression.symbol.Symbol) DataTypes(io.crate.types.DataTypes) SubQueryResults(io.crate.planner.operators.SubQueryResults) VisibleForTesting(io.crate.common.annotations.VisibleForTesting) CoordinatorTxnCtx(io.crate.metadata.CoordinatorTxnCtx) PartitionPropertiesAnalyzer(io.crate.analyze.PartitionPropertiesAnalyzer) HashMap(java.util.HashMap) Function(java.util.function.Function) ArrayList(java.util.ArrayList) Assignment(io.crate.sql.tree.Assignment) COMPRESSION_SETTING(io.crate.analyze.CopyStatementSettings.COMPRESSION_SETTING) AnalyzedCopyTo(io.crate.analyze.AnalyzedCopyTo) PartitionUnknownException(io.crate.exceptions.PartitionUnknownException) ProjectionBuilder(io.crate.execution.dsl.projection.builder.ProjectionBuilder) MergeCountProjection(io.crate.execution.dsl.projection.MergeCountProjection) WhereClause(io.crate.analyze.WhereClause) NodeOperationTree(io.crate.execution.dsl.phases.NodeOperationTree) ColumnIdent(io.crate.metadata.ColumnIdent) Reference(io.crate.metadata.Reference) NodeOperationTreeGenerator(io.crate.execution.engine.NodeOperationTreeGenerator) Merge(io.crate.planner.Merge) RowConsumer(io.crate.data.RowConsumer) WriterProjection(io.crate.execution.dsl.projection.WriterProjection) DocTableRelation(io.crate.analyze.relations.DocTableRelation) Literal(io.crate.expression.symbol.Literal) PlannerContext(io.crate.planner.PlannerContext) CopyStatementSettings.settingAsEnum(io.crate.analyze.CopyStatementSettings.settingAsEnum) Plan(io.crate.planner.Plan) Collections(java.util.Collections) DocTableInfo(io.crate.metadata.doc.DocTableInfo) GeneratedReference(io.crate.metadata.GeneratedReference) UnsupportedFeatureException(io.crate.exceptions.UnsupportedFeatureException) BoundCopyTo(io.crate.analyze.BoundCopyTo) Symbol(io.crate.expression.symbol.Symbol) GeneratedReference(io.crate.metadata.GeneratedReference) Reference(io.crate.metadata.Reference) ArrayList(java.util.ArrayList) WhereClause(io.crate.analyze.WhereClause) WriterProjection(io.crate.execution.dsl.projection.WriterProjection) ColumnIdent(io.crate.metadata.ColumnIdent) GenericPropertiesConverter.genericPropertiesToSettings(io.crate.analyze.GenericPropertiesConverter.genericPropertiesToSettings) Settings(org.elasticsearch.common.settings.Settings) VisibleForTesting(io.crate.common.annotations.VisibleForTesting)

Aggregations

NodeContext (io.crate.metadata.NodeContext)29 Symbol (io.crate.expression.symbol.Symbol)16 Row (io.crate.data.Row)14 CoordinatorTxnCtx (io.crate.metadata.CoordinatorTxnCtx)12 List (java.util.List)12 SymbolEvaluator (io.crate.analyze.SymbolEvaluator)10 VisibleForTesting (io.crate.common.annotations.VisibleForTesting)10 Plan (io.crate.planner.Plan)10 PlannerContext (io.crate.planner.PlannerContext)10 SubQueryResults (io.crate.planner.operators.SubQueryResults)10 Function (java.util.function.Function)10 Nullable (javax.annotation.Nullable)10 RowConsumer (io.crate.data.RowConsumer)9 TransactionContext (io.crate.metadata.TransactionContext)9 DependencyCarrier (io.crate.planner.DependencyCarrier)9 ArrayList (java.util.ArrayList)9 Settings (org.elasticsearch.common.settings.Settings)9 Row1 (io.crate.data.Row1)8 OneRowActionListener (io.crate.execution.support.OneRowActionListener)8 Map (java.util.Map)7