use of io.crate.execution.dsl.projection.Projection in project crate by crate.
the class Get method build.
@Override
public ExecutionPlan build(PlannerContext plannerContext, Set<PlanHint> hints, ProjectionBuilder projectionBuilder, int limitHint, int offsetHint, @Nullable OrderBy order, @Nullable Integer pageSizeHint, Row params, SubQueryResults subQueryResults) {
HashMap<String, Map<ShardId, List<PKAndVersion>>> idsByShardByNode = new HashMap<>();
DocTableInfo docTableInfo = tableRelation.tableInfo();
for (DocKeys.DocKey docKey : docKeys) {
String id = docKey.getId(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
if (id == null) {
continue;
}
List<String> partitionValues = docKey.getPartitionValues(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
String indexName = indexName(docTableInfo, partitionValues);
String routing = docKey.getRouting(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults);
ShardRouting shardRouting;
try {
shardRouting = plannerContext.resolveShard(indexName, id, routing);
} catch (IndexNotFoundException e) {
if (docTableInfo.isPartitioned()) {
continue;
}
throw e;
}
String currentNodeId = shardRouting.currentNodeId();
if (currentNodeId == null) {
// If relocating is fast enough this will work, otherwise it will result in a shard failure which
// will cause a statement retry
currentNodeId = shardRouting.relocatingNodeId();
if (currentNodeId == null) {
throw new ShardNotFoundException(shardRouting.shardId());
}
}
Map<ShardId, List<PKAndVersion>> idsByShard = idsByShardByNode.get(currentNodeId);
if (idsByShard == null) {
idsByShard = new HashMap<>();
idsByShardByNode.put(currentNodeId, idsByShard);
}
List<PKAndVersion> pkAndVersions = idsByShard.get(shardRouting.shardId());
if (pkAndVersions == null) {
pkAndVersions = new ArrayList<>();
idsByShard.put(shardRouting.shardId(), pkAndVersions);
}
long version = docKey.version(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(Versions.MATCH_ANY);
long sequenceNumber = docKey.sequenceNo(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(SequenceNumbers.UNASSIGNED_SEQ_NO);
long primaryTerm = docKey.primaryTerm(plannerContext.transactionContext(), plannerContext.nodeContext(), params, subQueryResults).orElse(SequenceNumbers.UNASSIGNED_PRIMARY_TERM);
pkAndVersions.add(new PKAndVersion(id, version, sequenceNumber, primaryTerm));
}
var docKeyColumns = new ArrayList<>(docTableInfo.primaryKey());
docKeyColumns.addAll(docTableInfo.partitionedBy());
docKeyColumns.add(docTableInfo.clusteredBy());
docKeyColumns.add(DocSysColumns.VERSION);
docKeyColumns.add(DocSysColumns.SEQ_NO);
docKeyColumns.add(DocSysColumns.PRIMARY_TERM);
var binder = new SubQueryAndParamBinder(params, subQueryResults);
List<Symbol> boundOutputs = Lists2.map(outputs, binder);
var boundQuery = binder.apply(query);
// Collect all columns which are used inside the query
// If the query contains only DocKeys, no filter is needed as all DocKeys are handled by the PKLookupOperation
AtomicBoolean requiresAdditionalFilteringOnNonDocKeyColumns = new AtomicBoolean(false);
var toCollectSet = new LinkedHashSet<>(boundOutputs);
Consumer<Reference> addRefIfMatch = ref -> {
toCollectSet.add(ref);
if (docKeyColumns.contains(ref.column()) == false) {
requiresAdditionalFilteringOnNonDocKeyColumns.set(true);
}
};
RefVisitor.visitRefs(boundQuery, addRefIfMatch);
var toCollect = boundOutputs;
ArrayList<Projection> projections = new ArrayList<>();
if (requiresAdditionalFilteringOnNonDocKeyColumns.get()) {
toCollect = List.copyOf(toCollectSet);
var filterProjection = ProjectionBuilder.filterProjection(toCollect, boundQuery);
filterProjection.requiredGranularity(RowGranularity.SHARD);
projections.add(filterProjection);
// reduce outputs which have been added for the filter projection
var evalProjection = new EvalProjection(InputColumn.mapToInputColumns(boundOutputs), RowGranularity.SHARD);
projections.add(evalProjection);
}
var collect = new Collect(new PKLookupPhase(plannerContext.jobId(), plannerContext.nextExecutionPhaseId(), docTableInfo.partitionedBy(), toCollect, idsByShardByNode), TopN.NO_LIMIT, 0, toCollect.size(), docKeys.size(), null);
for (var projection : projections) {
collect.addProjection(projection);
}
return collect;
}
use of io.crate.execution.dsl.projection.Projection in project crate by crate.
the class Collect method executesOnShard.
@Override
public boolean executesOnShard() {
List<Projection> projections = collectPhase.projections();
if (projections.isEmpty()) {
return collectPhase instanceof RoutedCollectPhase && ((RoutedCollectPhase) collectPhase).routing().containsShards();
}
Projection lastProjection = projections.get(projections.size() - 1);
return lastProjection.requiredGranularity() == RowGranularity.SHARD;
}
use of io.crate.execution.dsl.projection.Projection in project crate by crate.
the class WindowAgg method build.
@Override
public ExecutionPlan build(PlannerContext plannerContext, Set<PlanHint> planHints, ProjectionBuilder projectionBuilder, int limit, int offset, @Nullable OrderBy order, @Nullable Integer pageSizeHint, Row params, SubQueryResults subQueryResults) {
InputColumns.SourceSymbols sourceSymbols = new InputColumns.SourceSymbols(source.outputs());
SubQueryAndParamBinder binder = new SubQueryAndParamBinder(params, subQueryResults);
Function<Symbol, Symbol> toInputCols = binder.andThen(s -> InputColumns.create(s, sourceSymbols));
List<WindowFunction> boundWindowFunctions = (List<WindowFunction>) (List) Lists2.map(windowFunctions, toInputCols);
List<Projection> projections = new ArrayList<>();
WindowAggProjection windowAggProjection = new WindowAggProjection(windowDefinition.map(toInputCols), boundWindowFunctions, InputColumns.create(this.standalone, sourceSymbols));
projections.add(windowAggProjection);
ExecutionPlan sourcePlan = source.build(plannerContext, planHints, projectionBuilder, TopN.NO_LIMIT, TopN.NO_OFFSET, null, pageSizeHint, params, subQueryResults);
ResultDescription resultDescription = sourcePlan.resultDescription();
boolean executesOnHandler = executesOnHandler(plannerContext.handlerNode(), resultDescription.nodeIds());
boolean nonDistExecution = windowDefinition.partitions().isEmpty() || resultDescription.hasRemainingLimitOrOffset() || executesOnHandler;
if (nonDistExecution) {
sourcePlan = Merge.ensureOnHandler(sourcePlan, plannerContext);
for (Projection projection : projections) {
sourcePlan.addProjection(projection);
}
} else {
sourcePlan.setDistributionInfo(new DistributionInfo(DistributionType.MODULO, source.outputs().indexOf(windowDefinition.partitions().iterator().next())));
MergePhase distWindowAgg = new MergePhase(UUIDs.dirtyUUID(), plannerContext.nextExecutionPhaseId(), "distWindowAgg", resultDescription.nodeIds().size(), resultDescription.numOutputs(), resultDescription.nodeIds(), resultDescription.streamOutputs(), projections, DistributionInfo.DEFAULT_BROADCAST, null);
return new Merge(sourcePlan, distWindowAgg, TopN.NO_LIMIT, TopN.NO_OFFSET, windowAggProjection.outputs().size(), resultDescription.maxRowsPerNode(), null);
}
return sourcePlan;
}
use of io.crate.execution.dsl.projection.Projection in project crate by crate.
the class GroupByOptimizedIterator method getSingleStringKeyGroupProjection.
private static GroupProjection getSingleStringKeyGroupProjection(Collection<? extends Projection> shardProjections) {
if (shardProjections.size() != 1) {
return null;
}
Projection shardProjection = shardProjections.iterator().next();
if (!(shardProjection instanceof GroupProjection)) {
return null;
}
GroupProjection groupProjection = (GroupProjection) shardProjection;
if (groupProjection.keys().size() != 1 || groupProjection.keys().get(0).valueType() != DataTypes.STRING) {
return null;
}
return groupProjection;
}
use of io.crate.execution.dsl.projection.Projection in project crate by crate.
the class AbstractProjectionsPhase method writeTo.
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
assert jobId != null : "jobId must not be null";
out.writeLong(jobId.getMostSignificantBits());
out.writeLong(jobId.getLeastSignificantBits());
out.writeVInt(executionPhaseId);
int numCols = outputTypes.size();
out.writeVInt(numCols);
for (int i = 0; i < numCols; i++) {
DataTypes.toStream(outputTypes.get(i), out);
}
if (hasProjections()) {
out.writeVInt(projections.size());
for (Projection p : projections) {
Projection.toStream(p, out);
}
} else {
out.writeVInt(0);
}
}
Aggregations