use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class TestTypeValidator method testValidProject.
@Test
public void testValidProject() {
Expression expression1 = new Cast(columnB.toSymbolReference(), toSqlType(BIGINT));
Expression expression2 = new Cast(columnC.toSymbolReference(), toSqlType(BIGINT));
Assignments assignments = Assignments.builder().put(symbolAllocator.newSymbol(expression1, BIGINT), expression1).put(symbolAllocator.newSymbol(expression2, BIGINT), expression2).build();
PlanNode node = new ProjectNode(newId(), baseTableScan, assignments);
assertTypesValid(node);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class TestConnectorPushdownRulesWithHive method testPushdownWithDuplicateExpressions.
@Test
public void testPushdownWithDuplicateExpressions() {
String tableName = "duplicate_expressions";
tester().getQueryRunner().execute(format("CREATE TABLE %s (struct_of_bigint, just_bigint) AS SELECT cast(row(5, 6) AS row(a bigint, b bigint)) AS struct_of_int, 5 AS just_bigint WHERE false", tableName));
PushProjectionIntoTableScan pushProjectionIntoTableScan = new PushProjectionIntoTableScan(tester().getPlannerContext(), tester().getTypeAnalyzer(), new ScalarStatsCalculator(tester().getPlannerContext(), tester().getTypeAnalyzer()));
HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty());
TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(false));
HiveColumnHandle bigintColumn = createBaseColumn("just_bigint", 1, toHiveType(BIGINT), BIGINT, REGULAR, Optional.empty());
HiveColumnHandle partialColumn = new HiveColumnHandle("struct_of_bigint", 0, toHiveType(ROW_TYPE), ROW_TYPE, Optional.of(new HiveColumnProjectionInfo(ImmutableList.of(0), ImmutableList.of("a"), toHiveType(BIGINT), BIGINT)), REGULAR, Optional.empty());
// Test projection pushdown with duplicate column references
tester().assertThat(pushProjectionIntoTableScan).on(p -> {
SymbolReference column = p.symbol("just_bigint", BIGINT).toSymbolReference();
Expression negation = new ArithmeticUnaryExpression(MINUS, column);
return p.project(Assignments.of(// The column reference is part of both the assignments
p.symbol("column_ref", BIGINT), column, p.symbol("negated_column_ref", BIGINT), negation), p.tableScan(table, ImmutableList.of(p.symbol("just_bigint", BIGINT)), ImmutableMap.of(p.symbol("just_bigint", BIGINT), bigintColumn)));
}).matches(project(ImmutableMap.of("column_ref", expression("just_bigint_0"), "negated_column_ref", expression("- just_bigint_0")), tableScan(hiveTable.withProjectedColumns(ImmutableSet.of(bigintColumn))::equals, TupleDomain.all(), ImmutableMap.of("just_bigint_0", bigintColumn::equals))));
// Test Dereference pushdown
tester().assertThat(pushProjectionIntoTableScan).on(p -> {
SubscriptExpression subscript = new SubscriptExpression(p.symbol("struct_of_bigint", ROW_TYPE).toSymbolReference(), new LongLiteral("1"));
Expression sum = new ArithmeticBinaryExpression(ADD, subscript, new LongLiteral("2"));
return p.project(Assignments.of(// The subscript expression instance is part of both the assignments
p.symbol("expr_deref", BIGINT), subscript, p.symbol("expr_deref_2", BIGINT), sum), p.tableScan(table, ImmutableList.of(p.symbol("struct_of_bigint", ROW_TYPE)), ImmutableMap.of(p.symbol("struct_of_bigint", ROW_TYPE), partialColumn.getBaseColumn())));
}).matches(project(ImmutableMap.of("expr_deref", expression(new SymbolReference("struct_of_bigint#a")), "expr_deref_2", expression(new ArithmeticBinaryExpression(ADD, new SymbolReference("struct_of_bigint#a"), new LongLiteral("2")))), tableScan(hiveTable.withProjectedColumns(ImmutableSet.of(partialColumn))::equals, TupleDomain.all(), ImmutableMap.of("struct_of_bigint#a", partialColumn::equals))));
metastore.dropTable(SCHEMA_NAME, tableName, true);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class RelationPlanner method planJoinUsing.
private RelationPlan planJoinUsing(Join node, RelationPlan left, RelationPlan right) {
/* Given: l JOIN r USING (k1, ..., kn)
produces:
- project
coalesce(l.k1, r.k1)
...,
coalesce(l.kn, r.kn)
l.v1,
...,
l.vn,
r.v1,
...,
r.vn
- join (l.k1 = r.k1 and ... l.kn = r.kn)
- project
cast(l.k1 as commonType(l.k1, r.k1))
...
- project
cast(rl.k1 as commonType(l.k1, r.k1))
If casts are redundant (due to column type and common type being equal),
they will be removed by optimization passes.
*/
List<Identifier> joinColumns = ((JoinUsing) node.getCriteria().orElseThrow()).getColumns();
Analysis.JoinUsingAnalysis joinAnalysis = analysis.getJoinUsing(node);
ImmutableList.Builder<JoinNode.EquiJoinClause> clauses = ImmutableList.builder();
Map<Identifier, Symbol> leftJoinColumns = new HashMap<>();
Map<Identifier, Symbol> rightJoinColumns = new HashMap<>();
Assignments.Builder leftCoercions = Assignments.builder();
Assignments.Builder rightCoercions = Assignments.builder();
leftCoercions.putIdentities(left.getRoot().getOutputSymbols());
rightCoercions.putIdentities(right.getRoot().getOutputSymbols());
for (int i = 0; i < joinColumns.size(); i++) {
Identifier identifier = joinColumns.get(i);
Type type = analysis.getType(identifier);
// compute the coercion for the field on the left to the common supertype of left & right
Symbol leftOutput = symbolAllocator.newSymbol(identifier, type);
int leftField = joinAnalysis.getLeftJoinFields().get(i);
leftCoercions.put(leftOutput, new Cast(left.getSymbol(leftField).toSymbolReference(), toSqlType(type), false, typeCoercion.isTypeOnlyCoercion(left.getDescriptor().getFieldByIndex(leftField).getType(), type)));
leftJoinColumns.put(identifier, leftOutput);
// compute the coercion for the field on the right to the common supertype of left & right
Symbol rightOutput = symbolAllocator.newSymbol(identifier, type);
int rightField = joinAnalysis.getRightJoinFields().get(i);
rightCoercions.put(rightOutput, new Cast(right.getSymbol(rightField).toSymbolReference(), toSqlType(type), false, typeCoercion.isTypeOnlyCoercion(right.getDescriptor().getFieldByIndex(rightField).getType(), type)));
rightJoinColumns.put(identifier, rightOutput);
clauses.add(new JoinNode.EquiJoinClause(leftOutput, rightOutput));
}
ProjectNode leftCoercion = new ProjectNode(idAllocator.getNextId(), left.getRoot(), leftCoercions.build());
ProjectNode rightCoercion = new ProjectNode(idAllocator.getNextId(), right.getRoot(), rightCoercions.build());
JoinNode join = new JoinNode(idAllocator.getNextId(), JoinNode.Type.typeConvert(node.getType()), leftCoercion, rightCoercion, clauses.build(), leftCoercion.getOutputSymbols(), rightCoercion.getOutputSymbols(), false, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of(), Optional.empty());
// Add a projection to produce the outputs of the columns in the USING clause,
// which are defined as coalesce(l.k, r.k)
Assignments.Builder assignments = Assignments.builder();
ImmutableList.Builder<Symbol> outputs = ImmutableList.builder();
for (Identifier column : joinColumns) {
Symbol output = symbolAllocator.newSymbol(column, analysis.getType(column));
outputs.add(output);
assignments.put(output, new CoalesceExpression(leftJoinColumns.get(column).toSymbolReference(), rightJoinColumns.get(column).toSymbolReference()));
}
for (int field : joinAnalysis.getOtherLeftFields()) {
Symbol symbol = left.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, symbol.toSymbolReference());
}
for (int field : joinAnalysis.getOtherRightFields()) {
Symbol symbol = right.getFieldMappings().get(field);
outputs.add(symbol);
assignments.put(symbol, symbol.toSymbolReference());
}
return new RelationPlan(new ProjectNode(idAllocator.getNextId(), join, assignments.build()), analysis.getScope(node), outputs.build(), outerContext);
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class QueryPlanner method coerce.
public static NodeAndMappings coerce(RelationPlan plan, List<Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator) {
List<Symbol> visibleFields = visibleFields(plan);
checkArgument(visibleFields.size() == types.size());
Assignments.Builder assignments = Assignments.builder();
ImmutableList.Builder<Symbol> mappings = ImmutableList.builder();
for (int i = 0; i < types.size(); i++) {
Symbol input = visibleFields.get(i);
Type type = types.get(i);
if (!symbolAllocator.getTypes().get(input).equals(type)) {
Symbol coerced = symbolAllocator.newSymbol(input.getName(), type);
assignments.put(coerced, new Cast(input.toSymbolReference(), toSqlType(type)));
mappings.add(coerced);
} else {
assignments.putIdentity(input);
mappings.add(input);
}
}
ProjectNode coerced = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build());
return new NodeAndMappings(coerced, mappings.build());
}
use of io.trino.sql.planner.plan.Assignments in project trino by trinodb.
the class TestPushProjectionIntoTableScan method testPushProjection.
@Test
public void testPushProjection() {
try (RuleTester ruleTester = defaultRuleTester()) {
// Building context for input
String columnName = "col0";
Type columnType = ROW_TYPE;
Symbol baseColumn = new Symbol(columnName);
ColumnHandle columnHandle = new TpchColumnHandle(columnName, columnType);
// Create catalog with applyProjection enabled
MockConnectorFactory factory = createMockFactory(ImmutableMap.of(columnName, columnHandle), Optional.of(this::mockApplyProjection));
ruleTester.getQueryRunner().createCatalog(MOCK_CATALOG, factory, ImmutableMap.of());
TypeAnalyzer typeAnalyzer = createTestingTypeAnalyzer(ruleTester.getPlannerContext());
// Prepare project node symbols and types
Symbol identity = new Symbol("symbol_identity");
Symbol dereference = new Symbol("symbol_dereference");
Symbol constant = new Symbol("symbol_constant");
Symbol call = new Symbol("symbol_call");
ImmutableMap<Symbol, Type> types = ImmutableMap.of(baseColumn, ROW_TYPE, identity, ROW_TYPE, dereference, BIGINT, constant, BIGINT, call, VARCHAR);
// Prepare project node assignments
ImmutableMap<Symbol, Expression> inputProjections = ImmutableMap.of(identity, baseColumn.toSymbolReference(), dereference, new SubscriptExpression(baseColumn.toSymbolReference(), new LongLiteral("1")), constant, new LongLiteral("5"), call, new FunctionCall(QualifiedName.of("STARTS_WITH"), ImmutableList.of(new StringLiteral("abc"), new StringLiteral("ab"))));
// Compute expected symbols after applyProjection
TransactionId transactionId = ruleTester.getQueryRunner().getTransactionManager().beginTransaction(false);
Session session = MOCK_SESSION.beginTransactionId(transactionId, ruleTester.getQueryRunner().getTransactionManager(), ruleTester.getQueryRunner().getAccessControl());
ImmutableMap<Symbol, String> connectorNames = inputProjections.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> translate(session, e.getValue(), typeAnalyzer, viewOf(types), ruleTester.getPlannerContext()).get().toString()));
ImmutableMap<Symbol, String> newNames = ImmutableMap.of(identity, "projected_variable_" + connectorNames.get(identity), dereference, "projected_dereference_" + connectorNames.get(dereference), constant, "projected_constant_" + connectorNames.get(constant), call, "projected_call_" + connectorNames.get(call));
Map<String, ColumnHandle> expectedColumns = newNames.entrySet().stream().collect(toImmutableMap(Map.Entry::getValue, e -> column(e.getValue(), types.get(e.getKey()))));
ruleTester.assertThat(createRule(ruleTester)).on(p -> {
// Register symbols
types.forEach((symbol, type) -> p.symbol(symbol.getName(), type));
return p.project(new Assignments(inputProjections), p.tableScan(tableScan -> tableScan.setTableHandle(TEST_TABLE_HANDLE).setSymbols(ImmutableList.copyOf(types.keySet())).setAssignments(types.keySet().stream().collect(Collectors.toMap(Function.identity(), v -> columnHandle))).setStatistics(Optional.of(PlanNodeStatsEstimate.builder().setOutputRowCount(42).addSymbolStatistics(baseColumn, SymbolStatsEstimate.builder().setNullsFraction(0).setDistinctValuesCount(33).build()).build()))));
}).withSession(MOCK_SESSION).matches(project(newNames.entrySet().stream().collect(toImmutableMap(e -> e.getKey().getName(), e -> expression(symbolReference(e.getValue())))), tableScan(new MockConnectorTableHandle(new SchemaTableName(TEST_SCHEMA, "projected_" + TEST_TABLE), TupleDomain.all(), Optional.of(ImmutableList.copyOf(expectedColumns.values())))::equals, TupleDomain.all(), expectedColumns.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> e.getValue()::equals)), Optional.of(PlanNodeStatsEstimate.builder().setOutputRowCount(42).addSymbolStatistics(new Symbol(newNames.get(constant)), SymbolStatsEstimate.builder().setDistinctValuesCount(1).setNullsFraction(0).setLowValue(5).setHighValue(5).build()).addSymbolStatistics(new Symbol(newNames.get(call).toLowerCase(ENGLISH)), SymbolStatsEstimate.builder().setDistinctValuesCount(1).setNullsFraction(0).build()).addSymbolStatistics(new Symbol(newNames.get(identity)), SymbolStatsEstimate.builder().setDistinctValuesCount(33).setNullsFraction(0).build()).addSymbolStatistics(new Symbol(newNames.get(dereference)), SymbolStatsEstimate.unknown()).build())::equals)));
}
}
Aggregations