use of io.prestosql.sql.planner.TypeProvider in project hetu-core by openlookeng.
the class CubeOptimizer method rewriteAggregationNode.
private PlanNode rewriteAggregationNode(CubeRewriteResult cubeRewriteResult, PlanNode inputPlanNode) {
TypeProvider typeProvider = context.getSymbolAllocator().getTypes();
// Add group by
List<Symbol> groupings = aggregationNode.getGroupingKeys().stream().map(Symbol::getName).map(columnRewritesMap::get).map(optimizedPlanMappings::get).collect(Collectors.toList());
Map<Symbol, Symbol> cubeScanToAggOutputMap = new HashMap<>();
// Rewrite AggregationNode using Cube table
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsBuilder = ImmutableMap.builder();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
Type type = cubeRewriteResult.getSymbolMetadataMap().get(aggregatorSource.getOriginalAggSymbol()).getType();
TypeSignature typeSignature = type.getTypeSignature();
ColumnHandle cubeColHandle = cubeRewriteResult.getTableScanNode().getAssignments().get(aggregatorSource.getScanSymbol());
ColumnMetadata cubeColumnMetadata = metadata.getColumnMetadata(context.getSession(), cubeTableHandle, cubeColHandle);
AggregationSignature aggregationSignature = cubeMetadata.getAggregationSignature(cubeColumnMetadata.getName()).orElseThrow(() -> new ColumnNotFoundException(new SchemaTableName("", ""), cubeColHandle.getColumnName()));
String aggFunction = COUNT.getName().equals(aggregationSignature.getFunction()) ? SUM.getName() : aggregationSignature.getFunction();
SymbolReference argument = toSymbolReference(aggregatorSource.getScanSymbol());
FunctionHandle functionHandle = metadata.getFunctionAndTypeManager().lookupFunction(aggFunction, TypeSignatureProvider.fromTypeSignatures(typeSignature));
cubeScanToAggOutputMap.put(aggregatorSource.getScanSymbol(), aggregatorSource.getOriginalAggSymbol());
aggregationsBuilder.put(aggregatorSource.getOriginalAggSymbol(), new AggregationNode.Aggregation(new CallExpression(aggFunction, functionHandle, type, ImmutableList.of(castToRowExpression(argument))), ImmutableList.of(castToRowExpression(argument)), false, Optional.empty(), Optional.empty(), Optional.empty()));
}
PlanNode planNode = inputPlanNode;
AggregationNode aggNode = new AggregationNode(context.getIdAllocator().getNextId(), planNode, aggregationsBuilder.build(), singleGroupingSet(groupings), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty(), AggregationNode.AggregationType.HASH, Optional.empty());
if (cubeRewriteResult.getAvgAggregationColumns().isEmpty()) {
return aggNode;
}
if (!cubeRewriteResult.getComputeAvgDividingSumByCount()) {
Map<Symbol, Expression> aggregateAssignments = new HashMap<>();
for (CubeRewriteResult.AggregatorSource aggregatorSource : cubeRewriteResult.getAggregationColumns()) {
aggregateAssignments.put(aggregatorSource.getOriginalAggSymbol(), toSymbolReference(aggregatorSource.getScanSymbol()));
}
planNode = new ProjectNode(context.getIdAllocator().getNextId(), aggNode, new Assignments(aggregateAssignments.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
} else {
// If there was an AVG aggregation, map it to AVG = SUM/COUNT
Map<Symbol, Expression> projections = new HashMap<>();
aggNode.getGroupingKeys().forEach(symbol -> projections.put(symbol, toSymbolReference(symbol)));
aggNode.getAggregations().keySet().stream().filter(originalAggregationsMap::containsValue).forEach(aggSymbol -> projections.put(aggSymbol, toSymbolReference(aggSymbol)));
// Add AVG = SUM / COUNT
for (CubeRewriteResult.AverageAggregatorSource avgAggSource : cubeRewriteResult.getAvgAggregationColumns()) {
Symbol sumSymbol = cubeScanToAggOutputMap.get(avgAggSource.getSum());
Symbol countSymbol = cubeScanToAggOutputMap.get(avgAggSource.getCount());
Type avgResultType = typeProvider.get(avgAggSource.getOriginalAggSymbol());
ArithmeticBinaryExpression division = new ArithmeticBinaryExpression(ArithmeticBinaryExpression.Operator.DIVIDE, new Cast(toSymbolReference(sumSymbol), avgResultType.getTypeSignature().toString()), new Cast(toSymbolReference(countSymbol), avgResultType.getTypeSignature().toString()));
projections.put(avgAggSource.getOriginalAggSymbol(), division);
}
return new ProjectNode(context.getIdAllocator().getNextId(), aggNode, new Assignments(projections.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> castToRowExpression(entry.getValue())))));
}
return planNode;
}
use of io.prestosql.sql.planner.TypeProvider in project hetu-core by openlookeng.
the class TestExpressionEquivalence method assertEquivalent.
private static void assertEquivalent(@Language("SQL") String left, @Language("SQL") String right) {
ParsingOptions parsingOptions = new ParsingOptions(AS_DOUBLE);
Expression leftExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(left, parsingOptions));
Expression rightExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(right, parsingOptions));
Set<Symbol> symbols = extractUnique(ImmutableList.of(leftExpression, rightExpression));
TypeProvider types = TypeProvider.copyOf(symbols.stream().collect(toMap(identity(), TestExpressionEquivalence::generateType)));
assertTrue(EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, leftExpression, rightExpression, types), format("Expected (%s) and (%s) to be equivalent", left, right));
assertTrue(EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, rightExpression, leftExpression, types), format("Expected (%s) and (%s) to be equivalent", right, left));
}
use of io.prestosql.sql.planner.TypeProvider in project hetu-core by openlookeng.
the class PlanPrinter method jsonFragmentPlan.
public static String jsonFragmentPlan(PlanNode root, Map<Symbol, Type> symbols, Metadata metadata, Session session) {
TypeProvider typeProvider = TypeProvider.copyOf(symbols.entrySet().stream().distinct().collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)));
TableInfoSupplier supplier = new TableInfoSupplier(metadata, session);
ValuePrinter printer = new ValuePrinter(metadata, session);
return new PlanPrinter(root, typeProvider, Optional.empty(), supplier, printer, StatsAndCosts.empty(), Optional.empty(), metadata).toJson();
}
use of io.prestosql.sql.planner.TypeProvider in project hetu-core by openlookeng.
the class StatsNormalizer method normalize.
private PlanNodeStatsEstimate normalize(PlanNodeStatsEstimate stats, Optional<Collection<Symbol>> outputSymbols, TypeProvider types) {
if (stats.isOutputRowCountUnknown()) {
return PlanNodeStatsEstimate.unknown();
}
PlanNodeStatsEstimate.Builder normalized = PlanNodeStatsEstimate.buildFrom(stats);
Predicate<Symbol> symbolFilter = outputSymbols.map(ImmutableSet::copyOf).map(set -> (Predicate<Symbol>) set::contains).orElse(symbol -> true);
for (Symbol symbol : stats.getSymbolsWithKnownStatistics()) {
if (!symbolFilter.test(symbol)) {
normalized.removeSymbolStatistics(symbol);
continue;
}
SymbolStatsEstimate symbolStats = stats.getSymbolStatistics(symbol);
SymbolStatsEstimate normalizedSymbolStats = stats.getOutputRowCount() == 0 ? SymbolStatsEstimate.zero() : normalizeSymbolStats(symbol, symbolStats, stats, types);
if (normalizedSymbolStats.isUnknown()) {
normalized.removeSymbolStatistics(symbol);
continue;
}
if (!Objects.equals(normalizedSymbolStats, symbolStats)) {
normalized.addSymbolStatistics(symbol, normalizedSymbolStats);
}
}
return normalized.build();
}
use of io.prestosql.sql.planner.TypeProvider in project hetu-core by openlookeng.
the class JoinStatsRule method computeInnerJoinStats.
private PlanNodeStatsEstimate computeInnerJoinStats(JoinNode node, PlanNodeStatsEstimate crossJoinStats, Session session, TypeProvider types) {
List<EquiJoinClause> equiJoinCriteria = node.getCriteria();
Map<Integer, Symbol> layout = new HashMap<>();
int channel = 0;
for (Symbol symbol : node.getOutputSymbols()) {
layout.put(channel++, symbol);
}
if (equiJoinCriteria.isEmpty()) {
if (!node.getFilter().isPresent()) {
return crossJoinStats;
}
// TODO: this might explode stats
if (isExpression(node.getFilter().get())) {
return filterStatsCalculator.filterStats(crossJoinStats, castToExpression(node.getFilter().get()), session, types);
} else {
return filterStatsCalculator.filterStats(crossJoinStats, node.getFilter().get(), session, types, layout);
}
}
PlanNodeStatsEstimate equiJoinEstimate = filterByEquiJoinClauses(crossJoinStats, node.getCriteria(), session, types);
if (equiJoinEstimate.isOutputRowCountUnknown()) {
return PlanNodeStatsEstimate.unknown();
}
if (!node.getFilter().isPresent()) {
return equiJoinEstimate;
}
PlanNodeStatsEstimate filteredEquiJoinEstimate;
if (isExpression(node.getFilter().get())) {
filteredEquiJoinEstimate = filterStatsCalculator.filterStats(equiJoinEstimate, castToExpression(node.getFilter().get()), session, types);
} else {
filteredEquiJoinEstimate = filterStatsCalculator.filterStats(equiJoinEstimate, node.getFilter().get(), session, types, layout);
}
if (filteredEquiJoinEstimate.isOutputRowCountUnknown()) {
return normalizer.normalize(equiJoinEstimate.mapOutputRowCount(rowCount -> rowCount * UNKNOWN_FILTER_COEFFICIENT), types);
}
return filteredEquiJoinEstimate;
}
Aggregations