use of io.trino.sql.PlannerContext in project trino by trinodb.
the class ValuesStatsRule method getSymbolValues.
private List<Object> getSymbolValues(ValuesNode valuesNode, int symbolId, Session session, Type rowType) {
Type symbolType = rowType.getTypeParameters().get(symbolId);
if (UNKNOWN.equals(symbolType)) {
// special casing for UNKNOWN as evaluateConstantExpression does not handle that
return IntStream.range(0, valuesNode.getRowCount()).mapToObj(rowId -> null).collect(toList());
}
checkState(valuesNode.getRows().isPresent(), "rows is empty");
return valuesNode.getRows().get().stream().map(row -> {
Object rowValue = evaluateConstantExpression(row, rowType, plannerContext, session, new AllowAllAccessControl(), ImmutableMap.of());
return readNativeValue(symbolType, (SingleRowBlock) rowValue, symbolId);
}).collect(toList());
}
use of io.trino.sql.PlannerContext in project trino by trinodb.
the class PropertyDerivations method deriveProperties.
public static ActualProperties deriveProperties(PlanNode node, List<ActualProperties> inputProperties, PlannerContext plannerContext, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer) {
ActualProperties output = node.accept(new Visitor(plannerContext, session, types, typeAnalyzer), inputProperties);
output.getNodePartitioning().ifPresent(partitioning -> verify(node.getOutputSymbols().containsAll(partitioning.getColumns()), "Node-level partitioning properties contain columns not present in node's output"));
verify(node.getOutputSymbols().containsAll(output.getConstants().keySet()), "Node-level constant properties contain columns not present in node's output");
Set<Symbol> localPropertyColumns = output.getLocalProperties().stream().flatMap(property -> property.getColumns().stream()).collect(Collectors.toSet());
verify(node.getOutputSymbols().containsAll(localPropertyColumns), "Node-level local properties contain columns not present in node's output");
return output;
}
use of io.trino.sql.PlannerContext in project trino by trinodb.
the class TableProceduresPropertyManager method getProperties.
public Map<String, Object> getProperties(CatalogName catalog, String procedureName, Map<String, Expression> sqlPropertyValues, Session session, PlannerContext plannerContext, AccessControl accessControl, Map<NodeRef<Parameter>, Expression> parameters) {
Map<String, PropertyMetadata<?>> supportedProperties = connectorProperties.get(new Key(catalog, procedureName));
if (supportedProperties == null) {
throw new TrinoException(NOT_FOUND, format("Catalog '%s' table procedure '%s' property not found", catalog, procedureName));
}
Map<String, Optional<Object>> propertyValues = evaluateProperties(sqlPropertyValues.entrySet().stream().map(entry -> new Property(new Identifier(entry.getKey()), entry.getValue())).collect(toImmutableList()), session, plannerContext, accessControl, parameters, true, supportedProperties, INVALID_PROCEDURE_ARGUMENT, format("catalog '%s' table procedure '%s' property", catalog, procedureName));
return propertyValues.entrySet().stream().filter(entry -> entry.getValue().isPresent()).collect(toImmutableMap(Entry::getKey, entry -> entry.getValue().orElseThrow()));
}
use of io.trino.sql.PlannerContext in project trino by trinodb.
the class PushProjectionIntoTableScan method apply.
@Override
public Result apply(ProjectNode project, Captures captures, Context context) {
TableScanNode tableScan = captures.get(TABLE_SCAN);
// Extract translatable components from projection expressions. Prepare a mapping from these internal
// expression nodes to corresponding ConnectorExpression translations.
Map<NodeRef<Expression>, ConnectorExpression> partialTranslations = project.getAssignments().getMap().entrySet().stream().flatMap(expression -> extractPartialTranslations(expression.getValue(), context.getSession(), typeAnalyzer, context.getSymbolAllocator().getTypes(), plannerContext).entrySet().stream()).collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue, (first, ignore) -> first));
List<NodeRef<Expression>> nodesForPartialProjections = ImmutableList.copyOf(partialTranslations.keySet());
List<ConnectorExpression> connectorPartialProjections = ImmutableList.copyOf(partialTranslations.values());
Map<String, Symbol> inputVariableMappings = tableScan.getAssignments().keySet().stream().collect(toImmutableMap(Symbol::getName, identity()));
Map<String, ColumnHandle> assignments = inputVariableMappings.entrySet().stream().collect(toImmutableMap(Entry::getKey, entry -> tableScan.getAssignments().get(entry.getValue())));
Optional<ProjectionApplicationResult<TableHandle>> result = plannerContext.getMetadata().applyProjection(context.getSession(), tableScan.getTable(), connectorPartialProjections, assignments);
if (result.isEmpty()) {
return Result.empty();
}
List<ConnectorExpression> newConnectorPartialProjections = result.get().getProjections();
checkState(newConnectorPartialProjections.size() == connectorPartialProjections.size(), "Mismatch between input and output projections from the connector: expected %s but got %s", connectorPartialProjections.size(), newConnectorPartialProjections.size());
List<Symbol> newScanOutputs = new ArrayList<>();
Map<Symbol, ColumnHandle> newScanAssignments = new HashMap<>();
Map<String, Symbol> variableMappings = new HashMap<>();
for (Assignment assignment : result.get().getAssignments()) {
Symbol symbol = context.getSymbolAllocator().newSymbol(assignment.getVariable(), assignment.getType());
newScanOutputs.add(symbol);
newScanAssignments.put(symbol, assignment.getColumn());
variableMappings.put(assignment.getVariable(), symbol);
}
// Translate partial connector projections back to new partial projections
List<Expression> newPartialProjections = newConnectorPartialProjections.stream().map(expression -> ConnectorExpressionTranslator.translate(context.getSession(), expression, plannerContext, variableMappings, new LiteralEncoder(plannerContext))).collect(toImmutableList());
// Map internal node references to new partial projections
ImmutableMap.Builder<NodeRef<Expression>, Expression> nodesToNewPartialProjectionsBuilder = ImmutableMap.builder();
for (int i = 0; i < nodesForPartialProjections.size(); i++) {
nodesToNewPartialProjectionsBuilder.put(nodesForPartialProjections.get(i), newPartialProjections.get(i));
}
Map<NodeRef<Expression>, Expression> nodesToNewPartialProjections = nodesToNewPartialProjectionsBuilder.buildOrThrow();
// Stitch partial translations to form new complete projections
Assignments.Builder newProjectionAssignments = Assignments.builder();
project.getAssignments().entrySet().forEach(entry -> {
newProjectionAssignments.put(entry.getKey(), replaceExpression(entry.getValue(), nodesToNewPartialProjections));
});
Optional<PlanNodeStatsEstimate> newStatistics = tableScan.getStatistics().map(statistics -> {
PlanNodeStatsEstimate.Builder builder = PlanNodeStatsEstimate.builder();
builder.setOutputRowCount(statistics.getOutputRowCount());
for (int i = 0; i < connectorPartialProjections.size(); i++) {
ConnectorExpression inputConnectorExpression = connectorPartialProjections.get(i);
ConnectorExpression resultConnectorExpression = newConnectorPartialProjections.get(i);
if (!(resultConnectorExpression instanceof Variable)) {
continue;
}
String resultVariableName = ((Variable) resultConnectorExpression).getName();
Expression inputExpression = ConnectorExpressionTranslator.translate(context.getSession(), inputConnectorExpression, plannerContext, inputVariableMappings, new LiteralEncoder(plannerContext));
SymbolStatsEstimate symbolStatistics = scalarStatsCalculator.calculate(inputExpression, statistics, context.getSession(), context.getSymbolAllocator().getTypes());
builder.addSymbolStatistics(variableMappings.get(resultVariableName), symbolStatistics);
}
return builder.build();
});
verifyTablePartitioning(context, tableScan, result.get().getHandle());
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), new TableScanNode(tableScan.getId(), result.get().getHandle(), newScanOutputs, newScanAssignments, TupleDomain.all(), newStatistics, tableScan.isUpdateTarget(), tableScan.getUseConnectorNodePartitioning()), newProjectionAssignments.build()));
}
use of io.trino.sql.PlannerContext in project trino by trinodb.
the class TestCreateTableTask method testCreateLikeWithProperties.
@Test
public void testCreateLikeWithProperties() {
CreateTable statement = getCreatleLikeStatement(true);
CreateTableTask createTableTask = new CreateTableTask(plannerContext, new AllowAllAccessControl(), columnPropertyManager, tablePropertyManager);
getFutureValue(createTableTask.internalExecute(statement, testSession, List.of(), output -> {
}));
assertEquals(metadata.getCreateTableCallCount(), 1);
assertThat(metadata.getReceivedTableMetadata().get(0).getColumns()).isEqualTo(PARENT_TABLE.getColumns());
assertThat(metadata.getReceivedTableMetadata().get(0).getProperties()).isEqualTo(PARENT_TABLE.getProperties());
}
Aggregations