Search in sources :

Example 1 with EvaluatingNormalizer

use of io.crate.analyze.EvaluatingNormalizer in project crate by crate.

the class WhereClauseAnalyzer method resolvePartitions.

private static WhereClause resolvePartitions(WhereClause whereClause, DocTableInfo tableInfo, Functions functions, TransactionContext transactionContext) {
    assert tableInfo.isPartitioned() : "table must be partitioned in order to resolve partitions";
    assert whereClause.partitions().isEmpty() : "partitions must not be analyzed twice";
    if (tableInfo.partitions().isEmpty()) {
        // table is partitioned but has no data / no partitions
        return WhereClause.NO_MATCH;
    }
    PartitionReferenceResolver partitionReferenceResolver = preparePartitionResolver(tableInfo.partitionedByColumns());
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(functions, RowGranularity.PARTITION, ReplaceMode.COPY, partitionReferenceResolver, null);
    Symbol normalized;
    Map<Symbol, List<Literal>> queryPartitionMap = new HashMap<>();
    for (PartitionName partitionName : tableInfo.partitions()) {
        for (PartitionExpression partitionExpression : partitionReferenceResolver.expressions()) {
            partitionExpression.setNextRow(partitionName);
        }
        normalized = normalizer.normalize(whereClause.query(), transactionContext);
        assert normalized != null : "normalizing a query must not return null";
        if (normalized.equals(whereClause.query())) {
            // no partition columns inside the where clause
            return whereClause;
        }
        boolean canMatch = WhereClause.canMatch(normalized);
        if (canMatch) {
            List<Literal> partitions = queryPartitionMap.get(normalized);
            if (partitions == null) {
                partitions = new ArrayList<>();
                queryPartitionMap.put(normalized, partitions);
            }
            partitions.add(Literal.of(partitionName.asIndexName()));
        }
    }
    if (queryPartitionMap.size() == 1) {
        Map.Entry<Symbol, List<Literal>> entry = Iterables.getOnlyElement(queryPartitionMap.entrySet());
        whereClause = new WhereClause(entry.getKey(), whereClause.docKeys().orElse(null), new ArrayList<String>(entry.getValue().size()));
        whereClause.partitions(entry.getValue());
        return whereClause;
    } else if (queryPartitionMap.size() > 0) {
        return tieBreakPartitionQueries(normalizer, queryPartitionMap, whereClause, transactionContext);
    } else {
        return WhereClause.NO_MATCH;
    }
}
Also used : EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) Symbol(io.crate.analyze.symbol.Symbol) WhereClause(io.crate.analyze.WhereClause) PartitionExpression(io.crate.operation.reference.partitioned.PartitionExpression) Literal(io.crate.analyze.symbol.Literal) ImmutableList(com.google.common.collect.ImmutableList)

Example 2 with EvaluatingNormalizer

use of io.crate.analyze.EvaluatingNormalizer in project crate by crate.

the class SelectPlannerTest method testNoSoftLimitOnUnlimitedChildRelation.

@Test
public void testNoSoftLimitOnUnlimitedChildRelation() throws Exception {
    int softLimit = 10_000;
    EvaluatingNormalizer normalizer = EvaluatingNormalizer.functionOnlyNormalizer(e.functions(), ReplaceMode.COPY);
    Planner.Context plannerContext = new Planner.Context(e.planner, clusterService, UUID.randomUUID(), null, normalizer, new TransactionContext(SessionContext.SYSTEM_SESSION), softLimit, 0);
    Limits limits = plannerContext.getLimits(new QuerySpec());
    assertThat(limits.finalLimit(), is(TopN.NO_LIMIT));
}
Also used : SessionContext(io.crate.action.sql.SessionContext) EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) QuerySpec(io.crate.analyze.QuerySpec) Test(org.junit.Test) CrateUnitTest(io.crate.test.integration.CrateUnitTest)

Example 3 with EvaluatingNormalizer

use of io.crate.analyze.EvaluatingNormalizer in project crate by crate.

the class NodeStatsCollectSource method nodeIds.

@Nullable
static Collection<DiscoveryNode> nodeIds(WhereClause whereClause, Collection<DiscoveryNode> nodes, Functions functions) {
    if (!whereClause.hasQuery()) {
        return nodes;
    }
    LocalSysColReferenceResolver localSysColReferenceResolver = new LocalSysColReferenceResolver(ImmutableList.of(SysNodesTableInfo.Columns.NAME, SysNodesTableInfo.Columns.ID));
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(functions, RowGranularity.DOC, ReplaceMode.COPY, localSysColReferenceResolver, null);
    List<DiscoveryNode> newNodes = new ArrayList<>();
    for (DiscoveryNode node : nodes) {
        String nodeId = node.getId();
        for (RowCollectExpression<NodeStatsContext, ?> expression : localSysColReferenceResolver.expressions()) {
            expression.setNextRow(new NodeStatsContext(nodeId, node.name()));
        }
        Symbol normalized = normalizer.normalize(whereClause.query(), null);
        if (normalized.equals(whereClause.query())) {
            // No local available sys nodes columns in where clause
            return nodes;
        }
        if (WhereClause.canMatch(normalized)) {
            newNodes.add(node);
        }
    }
    return newNodes;
}
Also used : DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) Symbol(io.crate.analyze.symbol.Symbol) ArrayList(java.util.ArrayList) NodeStatsContext(io.crate.operation.reference.sys.node.NodeStatsContext) Nullable(org.elasticsearch.common.Nullable)

Example 4 with EvaluatingNormalizer

use of io.crate.analyze.EvaluatingNormalizer in project crate by crate.

the class SystemCollectSource method getCollector.

@Override
public CrateCollector getCollector(CollectPhase phase, BatchConsumer consumer, JobCollectContext jobCollectContext) {
    RoutedCollectPhase collectPhase = (RoutedCollectPhase) phase;
    // sys.operations can contain a _node column - these refs need to be normalized into literals
    EvaluatingNormalizer normalizer = new EvaluatingNormalizer(functions, RowGranularity.DOC, ReplaceMode.COPY, new NodeSysReferenceResolver(nodeSysExpression), null);
    final RoutedCollectPhase routedCollectPhase = collectPhase.normalize(normalizer, null);
    Map<String, Map<String, List<Integer>>> locations = collectPhase.routing().locations();
    String table = Iterables.getOnlyElement(locations.get(clusterService.localNode().getId()).keySet());
    Supplier<CompletableFuture<? extends Iterable<?>>> iterableGetter = iterableGetters.get(table);
    assert iterableGetter != null : "iterableGetter for " + table + " must exist";
    boolean requiresScroll = consumer.requiresScroll();
    return BatchIteratorCollectorBridge.newInstance(() -> iterableGetter.get().thenApply(dataIterable -> RowsBatchIterator.newInstance(dataIterableToRowsIterable(routedCollectPhase, requiresScroll, dataIterable), collectPhase.toCollect().size())), consumer);
}
Also used : BatchIteratorCollectorBridge(io.crate.operation.collect.BatchIteratorCollectorBridge) Iterables(com.google.common.collect.Iterables) CompletableFuture(java.util.concurrent.CompletableFuture) ReplaceMode(io.crate.metadata.ReplaceMode) Function(java.util.function.Function) Supplier(java.util.function.Supplier) RoutedCollectPhase(io.crate.planner.node.dql.RoutedCollectPhase) Inject(org.elasticsearch.common.inject.Inject) JobCollectContext(io.crate.operation.collect.JobCollectContext) SysSnapshots(io.crate.operation.reference.sys.snapshot.SysSnapshots) ImmutableList(com.google.common.collect.ImmutableList) Functions(io.crate.metadata.Functions) BatchConsumer(io.crate.data.BatchConsumer) io.crate.metadata.sys(io.crate.metadata.sys) NodeSysReferenceResolver(io.crate.operation.reference.sys.node.local.NodeSysReferenceResolver) ClusterService(org.elasticsearch.cluster.ClusterService) Map(java.util.Map) SysRowUpdater(io.crate.operation.reference.sys.SysRowUpdater) SysRepositoriesService(io.crate.operation.reference.sys.repositories.SysRepositoriesService) RowsBatchIterator(io.crate.data.RowsBatchIterator) CrateCollector(io.crate.operation.collect.CrateCollector) SysCheck(io.crate.operation.reference.sys.check.SysCheck) ImmutableMap(com.google.common.collect.ImmutableMap) RowsTransformer(io.crate.operation.collect.RowsTransformer) Set(java.util.Set) SysChecker(io.crate.operation.reference.sys.check.SysChecker) TableIdent(io.crate.metadata.TableIdent) RowContextReferenceResolver(io.crate.operation.reference.sys.RowContextReferenceResolver) JobsLogs(io.crate.operation.collect.stats.JobsLogs) CollectPhase(io.crate.planner.node.dql.CollectPhase) PgCatalogTables(io.crate.metadata.pg_catalog.PgCatalogTables) InputFactory(io.crate.operation.InputFactory) SysNodeChecks(io.crate.operation.reference.sys.check.node.SysNodeChecks) io.crate.metadata.information(io.crate.metadata.information) SummitsIterable(io.crate.operation.collect.files.SummitsIterable) List(java.util.List) RowGranularity(io.crate.metadata.RowGranularity) Row(io.crate.data.Row) NodeSysExpression(io.crate.operation.reference.sys.node.local.NodeSysExpression) PgTypeTable(io.crate.metadata.pg_catalog.PgTypeTable) EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) CompletableFuture(java.util.concurrent.CompletableFuture) EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) SummitsIterable(io.crate.operation.collect.files.SummitsIterable) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) RoutedCollectPhase(io.crate.planner.node.dql.RoutedCollectPhase) NodeSysReferenceResolver(io.crate.operation.reference.sys.node.local.NodeSysReferenceResolver)

Example 5 with EvaluatingNormalizer

use of io.crate.analyze.EvaluatingNormalizer in project crate by crate.

the class NestedLoopConsumerTest method initPlanner.

@Before
public void initPlanner() throws Exception {
    ClusterService clusterService = new NoopClusterService();
    TableStats tableStats = getTableStats();
    e = SQLExecutor.builder(clusterService).enableDefaultTables().setTableStats(tableStats).addDocTable(emptyRoutingTable).build();
    Functions functions = e.functions();
    EvaluatingNormalizer normalizer = EvaluatingNormalizer.functionOnlyNormalizer(functions, ReplaceMode.COPY);
    plannerContext = new Planner.Context(e.planner, clusterService, UUID.randomUUID(), new ConsumingPlanner(clusterService, functions, tableStats), normalizer, new TransactionContext(SessionContext.SYSTEM_SESSION), 0, 0);
    consumer = new NestedLoopConsumer(clusterService, functions, tableStats);
}
Also used : NoopClusterService(org.elasticsearch.test.cluster.NoopClusterService) ClusterService(org.elasticsearch.cluster.ClusterService) EvaluatingNormalizer(io.crate.analyze.EvaluatingNormalizer) NoopClusterService(org.elasticsearch.test.cluster.NoopClusterService) Before(org.junit.Before)

Aggregations

EvaluatingNormalizer (io.crate.analyze.EvaluatingNormalizer)5 ImmutableList (com.google.common.collect.ImmutableList)2 Symbol (io.crate.analyze.symbol.Symbol)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 Iterables (com.google.common.collect.Iterables)1 SessionContext (io.crate.action.sql.SessionContext)1 QuerySpec (io.crate.analyze.QuerySpec)1 WhereClause (io.crate.analyze.WhereClause)1 Literal (io.crate.analyze.symbol.Literal)1 BatchConsumer (io.crate.data.BatchConsumer)1 Row (io.crate.data.Row)1 RowsBatchIterator (io.crate.data.RowsBatchIterator)1 Functions (io.crate.metadata.Functions)1 ReplaceMode (io.crate.metadata.ReplaceMode)1 RowGranularity (io.crate.metadata.RowGranularity)1 TableIdent (io.crate.metadata.TableIdent)1 io.crate.metadata.information (io.crate.metadata.information)1 PgCatalogTables (io.crate.metadata.pg_catalog.PgCatalogTables)1 PgTypeTable (io.crate.metadata.pg_catalog.PgTypeTable)1 io.crate.metadata.sys (io.crate.metadata.sys)1