use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.
the class WhereClauseAnalyzer method resolvePartitions.
public static PartitionResult resolvePartitions(Symbol query, DocTableInfo tableInfo, CoordinatorTxnCtx coordinatorTxnCtx, NodeContext nodeCtx) {
assert tableInfo.isPartitioned() : "table must be partitioned in order to resolve partitions";
assert !tableInfo.partitions().isEmpty() : "table must have at least one partition";
PartitionReferenceResolver partitionReferenceResolver = preparePartitionResolver(tableInfo.partitionedByColumns());
EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.PARTITION, partitionReferenceResolver, null);
Symbol normalized;
Map<Symbol, List<Literal>> queryPartitionMap = new HashMap<>();
for (PartitionName partitionName : tableInfo.partitions()) {
for (PartitionExpression partitionExpression : partitionReferenceResolver.expressions()) {
partitionExpression.setNextRow(partitionName);
}
normalized = normalizer.normalize(query, coordinatorTxnCtx);
assert normalized != null : "normalizing a query must not return null";
if (normalized.equals(query)) {
// no partition columns inside the where clause
return new PartitionResult(query, Collections.emptyList());
}
boolean canMatch = WhereClause.canMatch(normalized);
if (canMatch) {
List<Literal> partitions = queryPartitionMap.get(normalized);
if (partitions == null) {
partitions = new ArrayList<>();
queryPartitionMap.put(normalized, partitions);
}
partitions.add(Literal.of(partitionName.asIndexName()));
}
}
if (queryPartitionMap.size() == 1) {
Map.Entry<Symbol, List<Literal>> entry = Iterables.getOnlyElement(queryPartitionMap.entrySet());
return new PartitionResult(entry.getKey(), Lists2.map(entry.getValue(), literal -> nullOrString(literal.value())));
} else if (queryPartitionMap.size() > 0) {
PartitionResult partitionResult = tieBreakPartitionQueries(normalizer, queryPartitionMap, coordinatorTxnCtx);
return partitionResult == null ? // the query will then be evaluated correctly within each partition to see whether it matches or not
new PartitionResult(query, Lists2.map(tableInfo.partitions(), PartitionName::asIndexName)) : partitionResult;
} else {
return new PartitionResult(Literal.BOOLEAN_FALSE, Collections.emptyList());
}
}
use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.
the class Count method build.
@Override
public ExecutionPlan build(PlannerContext plannerContext, Set<PlanHint> planHints, ProjectionBuilder projectionBuilder, int limit, int offset, @Nullable OrderBy order, @Nullable Integer pageSizeHint, Row params, SubQueryResults subQueryResults) {
var normalizer = new EvaluatingNormalizer(plannerContext.nodeContext(), RowGranularity.CLUSTER, null, tableRelation);
var binder = new SubQueryAndParamBinder(params, subQueryResults).andThen(x -> normalizer.normalize(x, plannerContext.transactionContext()));
// bind all parameters and possible subQuery values and re-analyze the query
// (could result in a NO_MATCH, routing could've changed, etc).
WhereClause boundWhere = WhereClauseAnalyzer.resolvePartitions(where.map(binder), tableRelation, plannerContext.transactionContext(), plannerContext.nodeContext());
Routing routing = plannerContext.allocateRouting(tableRelation.tableInfo(), boundWhere, RoutingProvider.ShardSelection.ANY, plannerContext.transactionContext().sessionContext());
CountPhase countPhase = new CountPhase(plannerContext.nextExecutionPhaseId(), routing, Optimizer.optimizeCasts(boundWhere.queryOrFallback(), plannerContext), DistributionInfo.DEFAULT_BROADCAST);
MergePhase mergePhase = new MergePhase(plannerContext.jobId(), plannerContext.nextExecutionPhaseId(), COUNT_PHASE_NAME, countPhase.nodeIds().size(), 1, Collections.singletonList(plannerContext.handlerNode()), Collections.singletonList(DataTypes.LONG), Collections.singletonList(MergeCountProjection.INSTANCE), DistributionInfo.DEFAULT_BROADCAST, null);
return new CountPlan(countPhase, mergePhase);
}
use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.
the class NodeStatsCollectSource method filterNodes.
static Collection<DiscoveryNode> filterNodes(Collection<DiscoveryNode> nodes, Symbol predicate, NodeContext nodeCtx) {
var expressions = SysNodesTableInfo.create().expressions();
var nameExpr = expressions.get(SysNodesTableInfo.Columns.NAME).create();
var idExpr = expressions.get(SysNodesTableInfo.Columns.ID).create();
MapBackedRefResolver referenceResolver = new MapBackedRefResolver(Map.of(SysNodesTableInfo.Columns.NAME, nameExpr, SysNodesTableInfo.Columns.ID, idExpr));
EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.DOC, referenceResolver, null);
List<DiscoveryNode> newNodes = new ArrayList<>();
for (DiscoveryNode node : nodes) {
String nodeId = node.getId();
NodeStatsContext statsContext = new NodeStatsContext(nodeId, node.getName());
nameExpr.setNextRow(statsContext);
idExpr.setNextRow(statsContext);
Symbol normalized = normalizer.normalize(predicate, CoordinatorTxnCtx.systemTransactionContext());
if (normalized.equals(predicate)) {
// No local available sys nodes columns in where clause
return nodes;
}
if (WhereClause.canMatch(normalized)) {
newNodes.add(node);
}
}
return newNodes;
}
use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.
the class RoutedCollectPhase method normalize.
/**
* normalizes the symbols of this node with the given normalizer
*
* @return a normalized node, if no changes occurred returns this
*/
public RoutedCollectPhase normalize(EvaluatingNormalizer normalizer, @Nonnull TransactionContext txnCtx) {
RoutedCollectPhase result = this;
Function<Symbol, Symbol> normalize = s -> normalizer.normalize(s, txnCtx);
List<Symbol> newToCollect = Lists2.map(toCollect, normalize);
boolean changed = !newToCollect.equals(toCollect);
Symbol newWhereClause = normalizer.normalize(where, txnCtx);
OrderBy orderBy = this.orderBy;
if (orderBy != null) {
orderBy = orderBy.map(normalize);
}
changed = changed || newWhereClause != where || orderBy != this.orderBy;
if (changed) {
result = new RoutedCollectPhase(jobId(), phaseId(), name(), routing, maxRowGranularity, newToCollect, projections, newWhereClause, distributionInfo);
result.nodePageSizeHint(nodePageSizeHint);
result.orderBy(orderBy);
}
return result;
}
use of io.crate.expression.eval.EvaluatingNormalizer in project crate by crate.
the class CopyAnalyzer method analyzeCopyFrom.
AnalyzedCopyFrom analyzeCopyFrom(CopyFrom<Expression> node, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
DocTableInfo tableInfo = (DocTableInfo) schemas.resolveTableInfo(node.table().getName(), Operation.INSERT, txnCtx.sessionContext().sessionUser(), txnCtx.sessionContext().searchPath());
var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
var exprAnalyzerWithoutFields = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.UNSUPPORTED, null);
var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
var normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.CLUSTER, null, new TableRelation(tableInfo));
Table<Symbol> table = node.table().map(t -> exprAnalyzerWithFieldsAsString.convert(t, exprCtx));
GenericProperties<Symbol> properties = node.properties().map(t -> exprAnalyzerWithoutFields.convert(t, exprCtx));
Symbol uri = exprAnalyzerWithoutFields.convert(node.path(), exprCtx);
if (node.isReturnSummary()) {
return new AnalyzedCopyFromReturnSummary(tableInfo, table, properties, normalizer.normalize(uri, txnCtx));
} else {
return new AnalyzedCopyFrom(tableInfo, table, properties, normalizer.normalize(uri, txnCtx));
}
}
Aggregations