use of io.trino.sql.tree.LambdaArgumentDeclaration in project trino by trinodb.
the class LogicalPlanner method getInsertPlan.
private RelationPlan getInsertPlan(Analysis analysis, Table table, Query query, TableHandle tableHandle, List<ColumnHandle> insertColumns, Optional<TableLayout> newTableLayout, Optional<WriterTarget> materializedViewRefreshWriterTarget) {
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle);
Map<NodeRef<LambdaArgumentDeclaration>, Symbol> lambdaDeclarationToSymbolMap = buildLambdaDeclarationToSymbolMap(analysis, symbolAllocator);
RelationPlanner planner = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, Optional.empty(), session, ImmutableMap.of());
RelationPlan plan = planner.process(query, null);
ImmutableList.Builder<Symbol> builder = ImmutableList.builder();
for (int i = 0; i < plan.getFieldMappings().size(); i++) {
if (!plan.getDescriptor().getFieldByIndex(i).isHidden()) {
builder.add(plan.getFieldMappings().get(i));
}
}
List<Symbol> visibleFieldMappings = builder.build();
Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, tableHandle);
Assignments.Builder assignments = Assignments.builder();
boolean supportsMissingColumnsOnInsert = metadata.supportsMissingColumnsOnInsert(session, tableHandle);
ImmutableList.Builder<ColumnMetadata> insertedColumnsBuilder = ImmutableList.builder();
for (ColumnMetadata column : tableMetadata.getColumns()) {
if (column.isHidden()) {
continue;
}
Symbol output = symbolAllocator.newSymbol(column.getName(), column.getType());
int index = insertColumns.indexOf(columns.get(column.getName()));
if (index < 0) {
if (supportsMissingColumnsOnInsert) {
continue;
}
Expression cast = new Cast(new NullLiteral(), toSqlType(column.getType()));
assignments.put(output, cast);
insertedColumnsBuilder.add(column);
} else {
Symbol input = visibleFieldMappings.get(index);
Type tableType = column.getType();
Type queryType = symbolAllocator.getTypes().get(input);
if (queryType.equals(tableType) || typeCoercion.isTypeOnlyCoercion(queryType, tableType)) {
assignments.put(output, input.toSymbolReference());
} else {
Expression cast = noTruncationCast(input.toSymbolReference(), queryType, tableType);
assignments.put(output, cast);
}
insertedColumnsBuilder.add(column);
}
}
ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build());
List<ColumnMetadata> insertedColumns = insertedColumnsBuilder.build();
List<Field> fields = insertedColumns.stream().map(column -> Field.newUnqualified(column.getName(), column.getType())).collect(toImmutableList());
Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields)).build();
plan = new RelationPlan(projectNode, scope, projectNode.getOutputSymbols(), Optional.empty());
plan = planner.addRowFilters(table, plan, failIfPredicateIsNotMeet(metadata, session, PERMISSION_DENIED, AccessDeniedException.PREFIX + "Cannot insert row that does not match to a row filter"), node -> {
Scope accessControlScope = analysis.getAccessControlScope(table);
// hidden fields are not accessible in insert
return Scope.builder().like(accessControlScope).withRelationType(accessControlScope.getRelationId(), accessControlScope.getRelationType().withOnlyVisibleFields()).build();
});
List<String> insertedTableColumnNames = insertedColumns.stream().map(ColumnMetadata::getName).collect(toImmutableList());
String catalogName = tableHandle.getCatalogName().getCatalogName();
TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, catalogName, tableMetadata.getMetadata());
if (materializedViewRefreshWriterTarget.isPresent()) {
return createTableWriterPlan(analysis, plan.getRoot(), plan.getFieldMappings(), materializedViewRefreshWriterTarget.get(), insertedTableColumnNames, insertedColumns, newTableLayout, statisticsMetadata);
}
InsertReference insertTarget = new InsertReference(tableHandle, insertedTableColumnNames.stream().map(columns::get).collect(toImmutableList()));
return createTableWriterPlan(analysis, plan.getRoot(), plan.getFieldMappings(), insertTarget, insertedTableColumnNames, insertedColumns, newTableLayout, statisticsMetadata);
}
use of io.trino.sql.tree.LambdaArgumentDeclaration in project trino by trinodb.
the class LogicalPlanner method buildLambdaDeclarationToSymbolMap.
private static Map<NodeRef<LambdaArgumentDeclaration>, Symbol> buildLambdaDeclarationToSymbolMap(Analysis analysis, SymbolAllocator symbolAllocator) {
Map<Key, Symbol> allocations = new HashMap<>();
Map<NodeRef<LambdaArgumentDeclaration>, Symbol> result = new LinkedHashMap<>();
for (Entry<NodeRef<Expression>, Type> entry : analysis.getTypes().entrySet()) {
if (!(entry.getKey().getNode() instanceof LambdaArgumentDeclaration)) {
continue;
}
LambdaArgumentDeclaration argument = (LambdaArgumentDeclaration) entry.getKey().getNode();
Key key = new Key(argument, entry.getValue());
// Allocate the same symbol for all lambda argument names with a given type. This is needed to be able to
// properly identify multiple instances of syntactically equal lambda expressions during planning as expressions
// get rewritten via TranslationMap
Symbol symbol = allocations.get(key);
if (symbol == null) {
symbol = symbolAllocator.newSymbol(argument, entry.getValue());
allocations.put(key, symbol);
}
result.put(NodeRef.of(argument), symbol);
}
return result;
}
use of io.trino.sql.tree.LambdaArgumentDeclaration in project trino by trinodb.
the class AstBuilder method visitLambda.
@Override
public Node visitLambda(SqlBaseParser.LambdaContext context) {
List<LambdaArgumentDeclaration> arguments = visit(context.identifier(), Identifier.class).stream().map(LambdaArgumentDeclaration::new).collect(toList());
Expression body = (Expression) visit(context.expression());
return new LambdaExpression(getLocation(context), arguments, body);
}
use of io.trino.sql.tree.LambdaArgumentDeclaration in project trino by trinodb.
the class TestSqlParser method testLambda.
@Test
public void testLambda() {
assertExpression("() -> x", new LambdaExpression(ImmutableList.of(), new Identifier("x")));
assertExpression("x -> sin(x)", new LambdaExpression(ImmutableList.of(new LambdaArgumentDeclaration(identifier("x"))), new FunctionCall(QualifiedName.of("sin"), ImmutableList.of(new Identifier("x")))));
assertExpression("(x, y) -> mod(x, y)", new LambdaExpression(ImmutableList.of(new LambdaArgumentDeclaration(identifier("x")), new LambdaArgumentDeclaration(identifier("y"))), new FunctionCall(QualifiedName.of("mod"), ImmutableList.of(new Identifier("x"), new Identifier("y")))));
}
use of io.trino.sql.tree.LambdaArgumentDeclaration in project trino by trinodb.
the class TestLambdaCaptureDesugaringRewriter method testRewriteBasicLambda.
@Test
public void testRewriteBasicLambda() {
Map<Symbol, Type> symbols = ImmutableMap.of(new Symbol("a"), BigintType.BIGINT);
SymbolAllocator allocator = new SymbolAllocator(symbols);
assertEquals(rewrite(expression("x -> a + x"), allocator.getTypes(), allocator), new BindExpression(ImmutableList.of(expression("a")), new LambdaExpression(Stream.of("a_0", "x").map(Identifier::new).map(LambdaArgumentDeclaration::new).collect(toList()), expression("a_0 + x"))));
}
Aggregations