use of io.prestosql.sql.tree.SymbolReference in project hetu-core by openlookeng.
the class TestScalarStatsCalculator method testCastBigintToDouble.
@Test
public void testCastBigintToDouble() {
PlanNodeStatsEstimate inputStatistics = PlanNodeStatsEstimate.builder().addSymbolStatistics(new Symbol("a"), SymbolStatsEstimate.builder().setNullsFraction(0.3).setLowValue(2.0).setHighValue(10.0).setDistinctValuesCount(4).setAverageRowSize(2.0).build()).build();
assertCalculate(new Cast(new SymbolReference("a"), "double"), inputStatistics).lowValue(2.0).highValue(10.0).distinctValuesCount(4).nullsFraction(0.3).dataSizeUnknown();
}
use of io.prestosql.sql.tree.SymbolReference in project hetu-core by openlookeng.
the class PushProjectionThroughUnion method apply.
@Override
public Result apply(ProjectNode parent, Captures captures, Context context) {
UnionNode source = captures.get(CHILD);
// OutputLayout of the resultant Union, will be same as the layout of the Project
List<Symbol> outputLayout = parent.getOutputSymbols();
// Mapping from the output symbol to ordered list of symbols from each of the sources
ImmutableListMultimap.Builder<Symbol, Symbol> mappings = ImmutableListMultimap.builder();
// sources for the resultant UnionNode
ImmutableList.Builder<PlanNode> outputSources = ImmutableList.builder();
for (int i = 0; i < source.getSources().size(); i++) {
// Map: output of union -> input of this source to the union
Map<Symbol, SymbolReference> outputToInput = sourceSymbolMap(source, i);
// assignments for the new ProjectNode
Assignments.Builder assignments = Assignments.builder();
// mapping from current ProjectNode to new ProjectNode, used to identify the output layout
Map<Symbol, Symbol> projectSymbolMapping = new HashMap<>();
// Translate the assignments in the ProjectNode using symbols of the source of the UnionNode
for (Map.Entry<Symbol, RowExpression> entry : parent.getAssignments().entrySet()) {
RowExpression translatedExpression;
Map<VariableReferenceExpression, VariableReferenceExpression> variable = new HashMap<>();
Map<Symbol, Type> symbols = context.getSymbolAllocator().getSymbols();
for (Symbol symbolMap : source.getSymbolMapping().keySet()) {
Symbol symboli = source.getSymbolMapping().get(symbolMap).get(i);
variable.put(new VariableReferenceExpression(symbolMap.getName(), symbols.get(symbolMap)), new VariableReferenceExpression(symboli.getName(), symbols.get(symboli)));
}
translatedExpression = RowExpressionVariableInliner.inlineVariables(variable, entry.getValue());
Symbol symbol = context.getSymbolAllocator().newSymbol(translatedExpression);
assignments.put(symbol, translatedExpression);
projectSymbolMapping.put(entry.getKey(), symbol);
}
outputSources.add(new ProjectNode(context.getIdAllocator().getNextId(), source.getSources().get(i), assignments.build()));
outputLayout.forEach(symbol -> mappings.put(symbol, projectSymbolMapping.get(symbol)));
}
return Result.ofPlanNode(new UnionNode(parent.getId(), outputSources.build(), mappings.build(), ImmutableList.copyOf(mappings.build().keySet())));
}
use of io.prestosql.sql.tree.SymbolReference in project hetu-core by openlookeng.
the class TransformUnCorrelatedInPredicateSubQuerySelfJoinToAggregate method transformProjectNode.
private Optional<ProjectNode> transformProjectNode(Context context, ProjectNode projectNode) {
// IN predicate requires only one projection
if (projectNode.getOutputSymbols().size() > 1) {
return Optional.empty();
}
PlanNode source = context.getLookup().resolve(projectNode.getSource());
if (source instanceof CTEScanNode) {
source = getChildFilterNode(context, context.getLookup().resolve(((CTEScanNode) source).getSource()));
}
if (!(source instanceof FilterNode && context.getLookup().resolve(((FilterNode) source).getSource()) instanceof JoinNode)) {
return Optional.empty();
}
FilterNode filter = (FilterNode) source;
Expression predicate = OriginalExpressionUtils.castToExpression(filter.getPredicate());
List<SymbolReference> allPredicateSymbols = new ArrayList<>();
getAllSymbols(predicate, allPredicateSymbols);
JoinNode joinNode = (JoinNode) context.getLookup().resolve(((FilterNode) source).getSource());
if (!isSelfJoin(projectNode, predicate, joinNode, context.getLookup())) {
// Check next level for Self Join
PlanNode left = context.getLookup().resolve(joinNode.getLeft());
boolean changed = false;
if (left instanceof ProjectNode) {
Optional<ProjectNode> transformResult = transformProjectNode(context, (ProjectNode) left);
if (transformResult.isPresent()) {
joinNode = new JoinNode(joinNode.getId(), joinNode.getType(), transformResult.get(), joinNode.getRight(), joinNode.getCriteria(), joinNode.getOutputSymbols(), joinNode.getFilter(), joinNode.getLeftHashSymbol(), joinNode.getRightHashSymbol(), joinNode.getDistributionType(), joinNode.isSpillable(), joinNode.getDynamicFilters());
changed = true;
}
}
PlanNode right = context.getLookup().resolve(joinNode.getRight());
if (right instanceof ProjectNode) {
Optional<ProjectNode> transformResult = transformProjectNode(context, (ProjectNode) right);
if (transformResult.isPresent()) {
joinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), transformResult.get(), joinNode.getCriteria(), joinNode.getOutputSymbols(), joinNode.getFilter(), joinNode.getLeftHashSymbol(), joinNode.getRightHashSymbol(), joinNode.getDistributionType(), joinNode.isSpillable(), joinNode.getDynamicFilters());
changed = true;
}
}
if (changed) {
FilterNode transformedFilter = new FilterNode(filter.getId(), joinNode, filter.getPredicate());
ProjectNode transformedProject = new ProjectNode(projectNode.getId(), transformedFilter, projectNode.getAssignments());
return Optional.of(transformedProject);
}
return Optional.empty();
}
// Choose the table to use based on projected output.
TableScanNode leftTable = (TableScanNode) context.getLookup().resolve(joinNode.getLeft());
TableScanNode rightTable = (TableScanNode) context.getLookup().resolve(joinNode.getRight());
TableScanNode tableToUse;
List<RowExpression> aggregationSymbols;
AggregationNode.GroupingSetDescriptor groupingSetDescriptor;
Assignments projectionsForCTE = null;
Assignments projectionsFromFilter = null;
// Use non-projected column for aggregation
if (context.getLookup().resolve(projectNode.getSource()) instanceof CTEScanNode) {
CTEScanNode cteScanNode = (CTEScanNode) context.getLookup().resolve(projectNode.getSource());
ProjectNode childProjectOfCte = (ProjectNode) context.getLookup().resolve(cteScanNode.getSource());
List<Symbol> completeOutputSymbols = new ArrayList<>();
rightTable.getOutputSymbols().forEach((s) -> completeOutputSymbols.add(s));
leftTable.getOutputSymbols().forEach((s) -> completeOutputSymbols.add(s));
List<Symbol> outputSymbols = new ArrayList<>();
for (int i = 0; i < completeOutputSymbols.size(); i++) {
Symbol outputSymbol = completeOutputSymbols.get(i);
for (Symbol symbol : projectNode.getOutputSymbols()) {
if (childProjectOfCte.getAssignments().getMap().containsKey(symbol)) {
if (((SymbolReference) OriginalExpressionUtils.castToExpression(childProjectOfCte.getAssignments().getMap().get(symbol))).getName().equals(outputSymbol.getName())) {
outputSymbols.add(outputSymbol);
}
}
}
}
Map<Symbol, RowExpression> projectionsForCTEMap = new HashMap<>();
Map<Symbol, RowExpression> projectionsFromFilterMap = new HashMap<>();
for (Map.Entry entry : childProjectOfCte.getAssignments().getMap().entrySet()) {
if (entry.getKey().equals(getOnlyElement(projectNode.getOutputSymbols()))) {
projectionsForCTEMap.put((Symbol) entry.getKey(), (RowExpression) entry.getValue());
}
if (entry.getKey().equals(getOnlyElement(projectNode.getOutputSymbols()))) {
projectionsFromFilterMap.put(getOnlyElement(outputSymbols), (RowExpression) entry.getValue());
}
}
projectionsForCTE = new Assignments(projectionsForCTEMap);
projectionsFromFilter = new Assignments(projectionsFromFilterMap);
tableToUse = leftTable.getOutputSymbols().contains(getOnlyElement(outputSymbols)) ? leftTable : rightTable;
aggregationSymbols = allPredicateSymbols.stream().filter(s -> tableToUse.getOutputSymbols().contains(SymbolUtils.from(s))).filter(s -> !outputSymbols.contains(SymbolUtils.from(s))).map(OriginalExpressionUtils::castToRowExpression).collect(Collectors.toList());
// Create aggregation
groupingSetDescriptor = new AggregationNode.GroupingSetDescriptor(ImmutableList.copyOf(outputSymbols), 1, ImmutableSet.of());
} else {
tableToUse = leftTable.getOutputSymbols().contains(getOnlyElement(projectNode.getOutputSymbols())) ? leftTable : rightTable;
aggregationSymbols = allPredicateSymbols.stream().filter(s -> tableToUse.getOutputSymbols().contains(SymbolUtils.from(s))).filter(s -> !projectNode.getOutputSymbols().contains(SymbolUtils.from(s))).map(OriginalExpressionUtils::castToRowExpression).collect(Collectors.toList());
// Create aggregation
groupingSetDescriptor = new AggregationNode.GroupingSetDescriptor(ImmutableList.copyOf(projectNode.getOutputSymbols()), 1, ImmutableSet.of());
}
AggregationNode.Aggregation aggregation = new AggregationNode.Aggregation(new CallExpression("count", functionResolution.countFunction(), BIGINT, aggregationSymbols), aggregationSymbols, // mark DISTINCT since NOT_EQUALS predicate
true, Optional.empty(), Optional.empty(), Optional.empty());
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsBuilder = ImmutableMap.builder();
Symbol countSymbol = context.getSymbolAllocator().newSymbol(aggregation.getFunctionCall().getDisplayName(), BIGINT);
aggregationsBuilder.put(countSymbol, aggregation);
AggregationNode aggregationNode = new AggregationNode(context.getIdAllocator().getNextId(), tableToUse, aggregationsBuilder.build(), groupingSetDescriptor, ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty(), AggregationNode.AggregationType.HASH, Optional.empty());
// Filter rows with count < 1 from aggregation results to match the NOT_EQUALS clause in original query.
FilterNode filterNode = new FilterNode(context.getIdAllocator().getNextId(), aggregationNode, OriginalExpressionUtils.castToRowExpression(new ComparisonExpression(ComparisonExpression.Operator.GREATER_THAN, SymbolUtils.toSymbolReference(countSymbol), new GenericLiteral("BIGINT", "1"))));
// Project the aggregated+filtered rows.
ProjectNode transformedSubquery = new ProjectNode(projectNode.getId(), filterNode, projectNode.getAssignments());
if (context.getLookup().resolve(projectNode.getSource()) instanceof CTEScanNode) {
CTEScanNode cteScanNode = (CTEScanNode) context.getLookup().resolve(projectNode.getSource());
PlanNode projectNodeForCTE = context.getLookup().resolve(cteScanNode.getSource());
PlanNode projectNodeFromFilter = context.getLookup().resolve(projectNodeForCTE);
projectNodeFromFilter = new ProjectNode(projectNodeFromFilter.getId(), filterNode, projectionsFromFilter);
projectNodeForCTE = new ProjectNode(projectNodeForCTE.getId(), projectNodeFromFilter, projectionsForCTE);
cteScanNode = (CTEScanNode) cteScanNode.replaceChildren(ImmutableList.of(projectNodeForCTE));
cteScanNode.setOutputSymbols(projectNode.getOutputSymbols());
transformedSubquery = new ProjectNode(projectNode.getId(), cteScanNode, projectNode.getAssignments());
}
return Optional.of(transformedSubquery);
}
use of io.prestosql.sql.tree.SymbolReference in project hetu-core by openlookeng.
the class AggregationRewriteWithCube method createScanNode.
public CubeRewriteResult createScanNode(AggregationNode originalAggregationNode, PlanNode filterNode, TableHandle cubeTableHandle, Map<String, ColumnHandle> cubeColumnsMap, List<ColumnMetadata> cubeColumnsMetadata, boolean exactGroupsMatch) {
Set<Symbol> cubeScanSymbols = new HashSet<>();
Map<Symbol, ColumnHandle> symbolAssignments = new HashMap<>();
Set<CubeRewriteResult.DimensionSource> dimensionSymbols = new HashSet<>();
Set<CubeRewriteResult.AggregatorSource> aggregationColumns = new HashSet<>();
Set<CubeRewriteResult.AverageAggregatorSource> averageAggregationColumns = new HashSet<>();
Map<Symbol, ColumnMetadata> symbolMetadataMap = new HashMap<>();
Map<String, ColumnMetadata> columnMetadataMap = cubeColumnsMetadata.stream().collect(Collectors.toMap(ColumnMetadata::getName, Function.identity()));
boolean computeAvgDividingSumByCount = true;
Set<Symbol> filterSymbols = new HashSet<>();
if (filterNode != null) {
filterSymbols.addAll(SymbolsExtractor.extractUnique(((FilterNode) filterNode).getPredicate()));
}
for (Symbol filterSymbol : filterSymbols) {
if (symbolMappings.containsKey(filterSymbol.getName()) && symbolMappings.get(filterSymbol.getName()) instanceof ColumnHandle) {
// output symbol references of the columns in original table
ColumnHandle originalColumn = (ColumnHandle) symbolMappings.get(filterSymbol.getName());
ColumnHandle cubeScanColumn = cubeColumnsMap.get(originalColumn.getColumnName());
ColumnMetadata columnMetadata = columnMetadataMap.get(cubeScanColumn.getColumnName());
Symbol cubeScanSymbol = symbolAllocator.newSymbol(cubeScanColumn.getColumnName(), columnMetadata.getType());
cubeScanSymbols.add(cubeScanSymbol);
symbolAssignments.put(cubeScanSymbol, cubeScanColumn);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
rewrittenMappings.put(filterSymbol.getName(), cubeScanSymbol);
dimensionSymbols.add(new CubeRewriteResult.DimensionSource(filterSymbol, cubeScanSymbol));
}
}
for (Symbol originalAggOutputSymbol : originalAggregationNode.getOutputSymbols()) {
if (symbolMappings.containsKey(originalAggOutputSymbol.getName()) && symbolMappings.get(originalAggOutputSymbol.getName()) instanceof ColumnHandle) {
// output symbol references of the columns in original table - column part of group by clause
ColumnHandle originalColumn = (ColumnHandle) symbolMappings.get(originalAggOutputSymbol.getName());
ColumnHandle cubeScanColumn = cubeColumnsMap.get(originalColumn.getColumnName());
ColumnMetadata columnMetadata = columnMetadataMap.get(cubeScanColumn.getColumnName());
if (!symbolAssignments.containsValue(cubeScanColumn)) {
Symbol cubeScanSymbol = symbolAllocator.newSymbol(cubeScanColumn.getColumnName(), columnMetadata.getType());
cubeScanSymbols.add(cubeScanSymbol);
symbolAssignments.put(cubeScanSymbol, cubeScanColumn);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
dimensionSymbols.add(new CubeRewriteResult.DimensionSource(originalAggOutputSymbol, cubeScanSymbol));
} else {
Symbol cubeScanSymbol = symbolAssignments.keySet().stream().filter(key -> cubeScanColumn.equals(symbolAssignments.get(key))).findFirst().get();
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
dimensionSymbols.add(new CubeRewriteResult.DimensionSource(originalAggOutputSymbol, cubeScanSymbol));
}
} else if (originalAggregationNode.getAggregations().containsKey(originalAggOutputSymbol)) {
// output symbol is mapped to an aggregation
AggregationNode.Aggregation aggregation = originalAggregationNode.getAggregations().get(originalAggOutputSymbol);
String aggFunction = aggregation.getFunctionCall().getDisplayName();
List<Expression> arguments = aggregation.getArguments() == null ? null : aggregation.getArguments().stream().map(OriginalExpressionUtils::castToExpression).collect(Collectors.toList());
if (arguments != null && !arguments.isEmpty() && (!(arguments.get(0) instanceof SymbolReference))) {
log.info("Not a symbol reference in aggregation function. Agg Function = %s, Arguments = %s", aggFunction, arguments);
continue;
}
Object mappedValue = arguments == null || arguments.isEmpty() ? null : symbolMappings.get(((SymbolReference) arguments.get(0)).getName());
if (mappedValue == null || (mappedValue instanceof LongLiteral && ((LongLiteral) mappedValue).getValue() == 1)) {
// COUNT aggregation
if (CubeAggregateFunction.COUNT.getName().equals(aggFunction) && !aggregation.isDistinct()) {
// COUNT 1
AggregationSignature aggregationSignature = AggregationSignature.count();
String cubeColumnName = cubeMetadata.getColumn(aggregationSignature).orElseThrow(() -> new PrestoException(CUBE_ERROR, "Cannot find column associated with aggregation " + aggregationSignature));
ColumnHandle cubeColHandle = cubeColumnsMap.get(cubeColumnName);
if (!symbolAssignments.containsValue(cubeColHandle)) {
ColumnMetadata columnMetadata = columnMetadataMap.get(cubeColHandle.getColumnName());
Symbol cubeScanSymbol = symbolAllocator.newSymbol(cubeColHandle.getColumnName(), columnMetadata.getType());
cubeScanSymbols.add(cubeScanSymbol);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
symbolAssignments.put(cubeScanSymbol, cubeColHandle);
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(originalAggOutputSymbol, cubeScanSymbol));
}
}
} else if (mappedValue instanceof ColumnHandle) {
String originalColumnName = ((ColumnHandle) mappedValue).getColumnName();
boolean distinct = originalAggregationNode.getAggregations().get(originalAggOutputSymbol).isDistinct();
switch(aggFunction) {
case "min":
case "max":
case "sum":
case "count":
AggregationSignature aggregationSignature = new AggregationSignature(aggFunction, originalColumnName, distinct);
String cubeColumnName = cubeMetadata.getColumn(aggregationSignature).orElseThrow(() -> new PrestoException(CUBE_ERROR, "Cannot find column associated with aggregation " + aggregationSignature));
ColumnHandle cubeColHandle = cubeColumnsMap.get(cubeColumnName);
if (!symbolAssignments.containsValue(cubeColHandle)) {
ColumnMetadata columnMetadata = columnMetadataMap.get(cubeColHandle.getColumnName());
Symbol cubeScanSymbol = symbolAllocator.newSymbol(cubeColHandle.getColumnName(), columnMetadata.getType());
cubeScanSymbols.add(cubeScanSymbol);
symbolAssignments.put(cubeScanSymbol, cubeColHandle);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(originalAggOutputSymbol, cubeScanSymbol));
} else {
ColumnMetadata columnMetadata = columnMetadataMap.get(cubeColHandle.getColumnName());
Symbol cubeScanSymbol = symbolAssignments.keySet().stream().filter(key -> cubeColHandle.equals(symbolAssignments.get(key))).findFirst().get();
cubeScanSymbols.add(cubeScanSymbol);
symbolAssignments.put(cubeScanSymbol, cubeColHandle);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(originalAggOutputSymbol, cubeScanSymbol));
}
break;
case "avg":
AggregationSignature avgAggregationSignature = new AggregationSignature(aggFunction, originalColumnName, distinct);
if (exactGroupsMatch && cubeMetadata.getColumn(avgAggregationSignature).isPresent()) {
computeAvgDividingSumByCount = false;
String avgCubeColumnName = cubeMetadata.getColumn(avgAggregationSignature).orElseThrow(() -> new PrestoException(CUBE_ERROR, "Cannot find column associated with aggregation " + avgAggregationSignature));
ColumnHandle avgCubeColHandle = cubeColumnsMap.get(avgCubeColumnName);
if (!symbolAssignments.containsValue(avgCubeColHandle)) {
ColumnMetadata columnMetadata = columnMetadataMap.get(avgCubeColHandle.getColumnName());
Symbol cubeScanSymbol = symbolAllocator.newSymbol(avgCubeColHandle.getColumnName(), columnMetadata.getType());
cubeScanSymbols.add(cubeScanSymbol);
symbolAssignments.put(cubeScanSymbol, avgCubeColHandle);
symbolMetadataMap.put(cubeScanSymbol, columnMetadata);
rewrittenMappings.put(originalAggOutputSymbol.getName(), cubeScanSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(originalAggOutputSymbol, cubeScanSymbol));
}
} else {
AggregationSignature sumSignature = new AggregationSignature(SUM.getName(), originalColumnName, distinct);
String sumColumnName = cubeMetadata.getColumn(sumSignature).orElseThrow(() -> new PrestoException(CUBE_ERROR, "Cannot find column associated with aggregation " + sumSignature));
ColumnHandle sumColumnHandle = cubeColumnsMap.get(sumColumnName);
Symbol sumSymbol = null;
if (!symbolAssignments.containsValue(sumColumnHandle)) {
ColumnMetadata columnMetadata = columnMetadataMap.get(sumColumnHandle.getColumnName());
sumSymbol = symbolAllocator.newSymbol("sum_" + originalColumnName + "_" + originalAggOutputSymbol.getName(), columnMetadata.getType());
cubeScanSymbols.add(sumSymbol);
symbolAssignments.put(sumSymbol, sumColumnHandle);
symbolMetadataMap.put(sumSymbol, columnMetadata);
rewrittenMappings.put(sumSymbol.getName(), sumSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(sumSymbol, sumSymbol));
} else {
for (Map.Entry<Symbol, ColumnHandle> assignment : symbolAssignments.entrySet()) {
if (assignment.getValue().equals(sumColumnHandle)) {
sumSymbol = assignment.getKey();
break;
}
}
}
AggregationSignature countSignature = new AggregationSignature(COUNT.getName(), originalColumnName, distinct);
String countColumnName = cubeMetadata.getColumn(countSignature).orElseThrow(() -> new PrestoException(CUBE_ERROR, "Cannot find column associated with aggregation " + countSignature));
ColumnHandle countColumnHandle = cubeColumnsMap.get(countColumnName);
Symbol countSymbol = null;
if (!symbolAssignments.containsValue(countColumnHandle)) {
ColumnMetadata columnMetadata = columnMetadataMap.get(countColumnHandle.getColumnName());
countSymbol = symbolAllocator.newSymbol("count_" + originalColumnName + "_" + originalAggOutputSymbol.getName(), columnMetadata.getType());
cubeScanSymbols.add(countSymbol);
symbolAssignments.put(countSymbol, countColumnHandle);
symbolMetadataMap.put(countSymbol, columnMetadata);
rewrittenMappings.put(countSymbol.getName(), countSymbol);
aggregationColumns.add(new CubeRewriteResult.AggregatorSource(countSymbol, countSymbol));
} else {
for (Map.Entry<Symbol, ColumnHandle> assignment : symbolAssignments.entrySet()) {
if (assignment.getValue().equals(countColumnHandle)) {
countSymbol = assignment.getKey();
break;
}
}
}
averageAggregationColumns.add(new CubeRewriteResult.AverageAggregatorSource(originalAggOutputSymbol, sumSymbol, countSymbol));
}
break;
default:
throw new PrestoException(StandardErrorCode.GENERIC_INTERNAL_ERROR, "Unsupported aggregation function " + aggFunction);
}
} else {
log.info("Aggregation function argument is not a Column Handle. Agg Function = %s, Arguments = %s", aggFunction, arguments);
}
}
}
// Scan output order is important for partitioned cubes. Otherwise, incorrect results may be produced.
// Refer: https://gitee.com/openlookeng/hetu-core/issues/I4LAYC
List<Symbol> scanOutput = new ArrayList<>(cubeScanSymbols);
scanOutput.sort(Comparator.comparingInt(outSymbol -> cubeColumnsMetadata.indexOf(symbolMetadataMap.get(outSymbol))));
TableScanNode tableScanNode = TableScanNode.newInstance(idAllocator.getNextId(), cubeTableHandle, scanOutput, symbolAssignments, ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, false);
return new CubeRewriteResult(tableScanNode, symbolMetadataMap, dimensionSymbols, aggregationColumns, averageAggregationColumns, computeAvgDividingSumByCount);
}
use of io.prestosql.sql.tree.SymbolReference in project hetu-core by openlookeng.
the class AggregationRewriteWithCube method rewrite.
public PlanNode rewrite(AggregationNode originalAggregationNode, PlanNode filterNode) {
QualifiedObjectName starTreeTableName = QualifiedObjectName.valueOf(cubeMetadata.getCubeName());
TableHandle cubeTableHandle = metadata.getTableHandle(session, starTreeTableName).orElseThrow(() -> new CubeNotFoundException(starTreeTableName.toString()));
Map<String, ColumnHandle> cubeColumnsMap = metadata.getColumnHandles(session, cubeTableHandle);
TableMetadata cubeTableMetadata = metadata.getTableMetadata(session, cubeTableHandle);
List<ColumnMetadata> cubeColumnMetadataList = cubeTableMetadata.getColumns();
// Add group by
List<Symbol> groupings = new ArrayList<>(originalAggregationNode.getGroupingKeys().size());
for (Symbol symbol : originalAggregationNode.getGroupingKeys()) {
Object column = symbolMappings.get(symbol.getName());
if (column instanceof ColumnHandle) {
groupings.add(new Symbol(((ColumnHandle) column).getColumnName()));
}
}
Set<String> cubeGroups = cubeMetadata.getGroup();
boolean exactGroupsMatch = false;
if (groupings.size() == cubeGroups.size()) {
exactGroupsMatch = groupings.stream().map(Symbol::getName).map(String::toLowerCase).allMatch(cubeGroups::contains);
}
CubeRewriteResult cubeRewriteResult = createScanNode(originalAggregationNode, filterNode, cubeTableHandle, cubeColumnsMap, cubeColumnMetadataList, exactGroupsMatch);
PlanNode planNode = cubeRewriteResult.getTableScanNode();
// Add filter node
if (filterNode != null) {
Expression expression = castToExpression(((FilterNode) filterNode).getPredicate());
expression = rewriteExpression(expression, rewrittenMappings);
planNode = new FilterNode(idAllocator.getNextId(), planNode, castToRowExpression(expression));
}
if (!exactGroupsMatch) {
Map<Symbol, Symbol> cubeScanToAggOutputMap = new HashMap<>();
// Rewrite AggregationNode using Cube table
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsBuilder = ImmutableMap.builder();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
ColumnHandle cubeColHandle = cubeRewriteResult.getTableScanNode().getAssignments().get(aggregatorSource.getScanSymbol());
ColumnMetadata cubeColumnMetadata = cubeRewriteResult.getSymbolMetadataMap().get(aggregatorSource.getScanSymbol());
Type type = cubeColumnMetadata.getType();
AggregationSignature aggregationSignature = cubeMetadata.getAggregationSignature(cubeColumnMetadata.getName()).orElseThrow(() -> new ColumnNotFoundException(new SchemaTableName(starTreeTableName.getSchemaName(), starTreeTableName.getObjectName()), cubeColHandle.getColumnName()));
String aggFunction = COUNT.getName().equals(aggregationSignature.getFunction()) ? "sum" : aggregationSignature.getFunction();
SymbolReference argument = toSymbolReference(aggregatorSource.getScanSymbol());
FunctionHandle functionHandle = metadata.getFunctionAndTypeManager().lookupFunction(aggFunction, TypeSignatureProvider.fromTypeSignatures(type.getTypeSignature()));
cubeScanToAggOutputMap.put(aggregatorSource.getScanSymbol(), aggregatorSource.getOriginalAggSymbol());
aggregationsBuilder.put(aggregatorSource.getOriginalAggSymbol(), new AggregationNode.Aggregation(new CallExpression(aggFunction, functionHandle, type, ImmutableList.of(OriginalExpressionUtils.castToRowExpression(argument))), ImmutableList.of(OriginalExpressionUtils.castToRowExpression(argument)), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
List<Symbol> groupingKeys = originalAggregationNode.getGroupingKeys().stream().map(Symbol::getName).map(rewrittenMappings::get).collect(Collectors.toList());
planNode = new AggregationNode(idAllocator.getNextId(), planNode, aggregationsBuilder.build(), singleGroupingSet(groupingKeys), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty(), AggregationNode.AggregationType.HASH, Optional.empty());
AggregationNode aggNode = (AggregationNode) planNode;
if (!cubeRewriteResult.getAvgAggregationColumns().isEmpty()) {
if (!cubeRewriteResult.getComputeAvgDividingSumByCount()) {
Map<Symbol, Expression> aggregateAssignments = new HashMap<>();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
aggregateAssignments.put(aggregatorSource.getOriginalAggSymbol(), toSymbolReference(aggregatorSource.getScanSymbol()));
}
planNode = new ProjectNode(idAllocator.getNextId(), aggNode, new Assignments(aggregateAssignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
} else {
// If there was an AVG aggregation, map it to AVG = SUM/COUNT
Map<Symbol, Expression> projections = new HashMap<>();
aggNode.getGroupingKeys().forEach(symbol -> projections.put(symbol, toSymbolReference(symbol)));
aggNode.getAggregations().keySet().stream().filter(symbol -> symbolMappings.containsValue(symbol.getName())).forEach(aggSymbol -> projections.put(aggSymbol, toSymbolReference(aggSymbol)));
// Add AVG = SUM / COUNT
for (CubeRewriteResult.AverageAggregatorSource avgAggSource : cubeRewriteResult.getAvgAggregationColumns()) {
Symbol sumSymbol = cubeScanToAggOutputMap.get(avgAggSource.getSum());
Symbol countSymbol = cubeScanToAggOutputMap.get(avgAggSource.getCount());
Type avgResultType = typeProvider.get(avgAggSource.getOriginalAggSymbol());
ArithmeticBinaryExpression division = new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.DIVIDE, new Cast(toSymbolReference(sumSymbol), avgResultType.getTypeSignature().toString()), new Cast(toSymbolReference(countSymbol), avgResultType.getTypeSignature().toString()));
projections.put(avgAggSource.getOriginalAggSymbol(), division);
}
planNode = new ProjectNode(idAllocator.getNextId(), aggNode, new Assignments(projections.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
}
}
// Safety check to remove redundant symbols and rename original column names to intermediate names
if (!planNode.getOutputSymbols().equals(originalAggregationNode.getOutputSymbols())) {
// Map new symbol names to the old symbols
Map<Symbol, Expression> assignments = new HashMap<>();
Set<Symbol> planNodeOutput = new HashSet<>(planNode.getOutputSymbols());
for (Symbol originalAggOutputSymbol : originalAggregationNode.getOutputSymbols()) {
if (!planNodeOutput.contains(originalAggOutputSymbol)) {
// Must be grouping key
assignments.put(originalAggOutputSymbol, toSymbolReference(rewrittenMappings.get(originalAggOutputSymbol.getName())));
} else {
// Should be an expression and must have the same name in the new plan node
assignments.put(originalAggOutputSymbol, toSymbolReference(originalAggOutputSymbol));
}
}
planNode = new ProjectNode(idAllocator.getNextId(), planNode, new Assignments(assignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
return planNode;
}
Aggregations