use of io.trino.spi.connector.ProjectionApplicationResult in project trino by trinodb.
the class MetadataManager method applyProjection.
@Override
public Optional<ProjectionApplicationResult<TableHandle>> applyProjection(Session session, TableHandle table, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
CatalogName catalogName = table.getCatalogName();
ConnectorMetadata metadata = getMetadata(session, catalogName);
ConnectorSession connectorSession = session.toConnectorSession(catalogName);
return metadata.applyProjection(connectorSession, table.getConnectorHandle(), projections, assignments).map(result -> {
verifyProjection(table, result.getProjections(), result.getAssignments(), projections.size());
return new ProjectionApplicationResult<>(new TableHandle(catalogName, result.getHandle(), table.getTransaction()), result.getProjections(), result.getAssignments(), result.isPrecalculateStatistics());
});
}
use of io.trino.spi.connector.ProjectionApplicationResult in project trino by trinodb.
the class AbstractTestHive method testApplyProjection.
@Test
public void testApplyProjection() throws Exception {
ColumnMetadata bigIntColumn0 = new ColumnMetadata("int0", BIGINT);
ColumnMetadata bigIntColumn1 = new ColumnMetadata("int1", BIGINT);
RowType oneLevelRowType = toRowType(ImmutableList.of(bigIntColumn0, bigIntColumn1));
ColumnMetadata oneLevelRow0 = new ColumnMetadata("onelevelrow0", oneLevelRowType);
RowType twoLevelRowType = toRowType(ImmutableList.of(oneLevelRow0, bigIntColumn0, bigIntColumn1));
ColumnMetadata twoLevelRow0 = new ColumnMetadata("twolevelrow0", twoLevelRowType);
List<ColumnMetadata> columnsForApplyProjectionTest = ImmutableList.of(bigIntColumn0, bigIntColumn1, oneLevelRow0, twoLevelRow0);
SchemaTableName tableName = temporaryTable("apply_projection_tester");
doCreateEmptyTable(tableName, ORC, columnsForApplyProjectionTest);
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toList());
assertEquals(columnHandles.size(), columnsForApplyProjectionTest.size());
Map<String, ColumnHandle> columnHandleMap = columnHandles.stream().collect(toImmutableMap(handle -> ((HiveColumnHandle) handle).getBaseColumnName(), Function.identity()));
// Emulate symbols coming from the query plan and map them to column handles
Map<String, ColumnHandle> columnHandlesWithSymbols = ImmutableMap.of("symbol_0", columnHandleMap.get("int0"), "symbol_1", columnHandleMap.get("int1"), "symbol_2", columnHandleMap.get("onelevelrow0"), "symbol_3", columnHandleMap.get("twolevelrow0"));
// Create variables for the emulated symbols
Map<String, Variable> symbolVariableMapping = columnHandlesWithSymbols.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> new Variable(e.getKey(), ((HiveColumnHandle) e.getValue()).getBaseType())));
// Create dereference expressions for testing
FieldDereference symbol2Field0 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_2"), 0);
FieldDereference symbol3Field0 = new FieldDereference(oneLevelRowType, symbolVariableMapping.get("symbol_3"), 0);
FieldDereference symbol3Field0Field0 = new FieldDereference(BIGINT, symbol3Field0, 0);
FieldDereference symbol3Field1 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_3"), 1);
Map<String, ColumnHandle> inputAssignments;
List<ConnectorExpression> inputProjections;
Optional<ProjectionApplicationResult<ConnectorTableHandle>> projectionResult;
List<ConnectorExpression> expectedProjections;
Map<String, Type> expectedAssignments;
// Test projected columns pushdown to HiveTableHandle in case of all variable references
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_0", "symbol_1"));
inputProjections = ImmutableList.of(symbolVariableMapping.get("symbol_0"), symbolVariableMapping.get("symbol_1"));
expectedAssignments = ImmutableMap.of("symbol_0", BIGINT, "symbol_1", BIGINT);
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
// Empty result when projected column handles are same as those present in table handle
projectionResult = metadata.applyProjection(session, projectionResult.get().getHandle(), inputProjections, inputAssignments);
assertProjectionResult(projectionResult, true, ImmutableList.of(), ImmutableMap.of());
// Extra columns handles in HiveTableHandle should get pruned
projectionResult = metadata.applyProjection(session, ((HiveTableHandle) tableHandle).withProjectedColumns(ImmutableSet.copyOf(columnHandles)), inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
// Test projection pushdown for dereferences
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2", "symbol_3"));
inputProjections = ImmutableList.of(symbol2Field0, symbol3Field0Field0, symbol3Field1);
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_int0", BIGINT);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_int0", BIGINT));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
// Test reuse of virtual column handles
// Round-1: input projections [symbol_2, symbol_2.int0]. virtual handle is created for symbol_2.int0.
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"));
inputProjections = ImmutableList.of(symbol2Field0, symbolVariableMapping.get("symbol_2"));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), symbolVariableMapping.get("symbol_2"));
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "symbol_2", oneLevelRowType);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
// Round-2: input projections [symbol_2.int0 and onelevelrow0#f_int0]. Virtual handle is reused.
Assignment newlyCreatedColumn = getOnlyElement(projectionResult.get().getAssignments().stream().filter(handle -> handle.getVariable().equals("onelevelrow0#f_int0")).collect(toList()));
inputAssignments = ImmutableMap.<String, ColumnHandle>builder().putAll(getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"))).put(newlyCreatedColumn.getVariable(), newlyCreatedColumn.getColumn()).buildOrThrow();
inputProjections = ImmutableList.of(symbol2Field0, new Variable("onelevelrow0#f_int0", BIGINT));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("onelevelrow0#f_int0", BIGINT));
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
} finally {
dropTable(tableName);
}
}
use of io.trino.spi.connector.ProjectionApplicationResult in project trino by trinodb.
the class TestPruneTableScanColumns method mockApplyProjection.
private Optional<ProjectionApplicationResult<ConnectorTableHandle>> mockApplyProjection(ConnectorSession session, ConnectorTableHandle tableHandle, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
MockConnectorTableHandle handle = (MockConnectorTableHandle) tableHandle;
List<Variable> variables = projections.stream().map(Variable.class::cast).collect(toImmutableList());
List<ColumnHandle> newColumns = variables.stream().map(variable -> assignments.get(variable.getName())).collect(toImmutableList());
if (handle.getColumns().isPresent() && newColumns.equals(handle.getColumns().get())) {
return Optional.empty();
}
return Optional.of(new ProjectionApplicationResult<>(new MockConnectorTableHandle(handle.getTableName(), handle.getConstraint(), Optional.of(newColumns)), projections, variables.stream().map(variable -> new Assignment(variable.getName(), assignments.get(variable.getName()), ((MockConnectorColumnHandle) assignments.get(variable.getName())).getType())).collect(toImmutableList()), false));
}
use of io.trino.spi.connector.ProjectionApplicationResult in project trino by trinodb.
the class IcebergMetadata method applyProjection.
@Override
public Optional<ProjectionApplicationResult<ConnectorTableHandle>> applyProjection(ConnectorSession session, ConnectorTableHandle handle, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
if (!isProjectionPushdownEnabled(session)) {
return Optional.empty();
}
// Create projected column representations for supported sub expressions. Simple column references and chain of
// dereferences on a variable are supported right now.
Set<ConnectorExpression> projectedExpressions = projections.stream().flatMap(expression -> extractSupportedProjectedColumns(expression).stream()).collect(toImmutableSet());
Map<ConnectorExpression, ProjectedColumnRepresentation> columnProjections = projectedExpressions.stream().collect(toImmutableMap(Function.identity(), HiveApplyProjectionUtil::createProjectedColumnRepresentation));
IcebergTableHandle icebergTableHandle = (IcebergTableHandle) handle;
// all references are simple variables
if (columnProjections.values().stream().allMatch(ProjectedColumnRepresentation::isVariable)) {
Set<IcebergColumnHandle> projectedColumns = assignments.values().stream().map(IcebergColumnHandle.class::cast).collect(toImmutableSet());
if (icebergTableHandle.getProjectedColumns().equals(projectedColumns)) {
return Optional.empty();
}
List<Assignment> assignmentsList = assignments.entrySet().stream().map(assignment -> new Assignment(assignment.getKey(), assignment.getValue(), ((IcebergColumnHandle) assignment.getValue()).getType())).collect(toImmutableList());
return Optional.of(new ProjectionApplicationResult<>(icebergTableHandle.withProjectedColumns(projectedColumns), projections, assignmentsList, false));
}
Map<String, Assignment> newAssignments = new HashMap<>();
ImmutableMap.Builder<ConnectorExpression, Variable> newVariablesBuilder = ImmutableMap.builder();
ImmutableSet.Builder<IcebergColumnHandle> projectedColumnsBuilder = ImmutableSet.builder();
for (Map.Entry<ConnectorExpression, ProjectedColumnRepresentation> entry : columnProjections.entrySet()) {
ConnectorExpression expression = entry.getKey();
ProjectedColumnRepresentation projectedColumn = entry.getValue();
IcebergColumnHandle baseColumnHandle = (IcebergColumnHandle) assignments.get(projectedColumn.getVariable().getName());
IcebergColumnHandle projectedColumnHandle = createProjectedColumnHandle(baseColumnHandle, projectedColumn.getDereferenceIndices(), expression.getType());
String projectedColumnName = projectedColumnHandle.getQualifiedName();
Variable projectedColumnVariable = new Variable(projectedColumnName, expression.getType());
Assignment newAssignment = new Assignment(projectedColumnName, projectedColumnHandle, expression.getType());
newAssignments.putIfAbsent(projectedColumnName, newAssignment);
newVariablesBuilder.put(expression, projectedColumnVariable);
projectedColumnsBuilder.add(projectedColumnHandle);
}
// Modify projections to refer to new variables
Map<ConnectorExpression, Variable> newVariables = newVariablesBuilder.buildOrThrow();
List<ConnectorExpression> newProjections = projections.stream().map(expression -> replaceWithNewVariables(expression, newVariables)).collect(toImmutableList());
List<Assignment> outputAssignments = newAssignments.values().stream().collect(toImmutableList());
return Optional.of(new ProjectionApplicationResult<>(icebergTableHandle.withProjectedColumns(projectedColumnsBuilder.build()), newProjections, outputAssignments, false));
}
use of io.trino.spi.connector.ProjectionApplicationResult in project trino by trinodb.
the class PruneTableScanColumns method pruneColumns.
public static Optional<PlanNode> pruneColumns(Metadata metadata, TypeProvider types, Session session, TableScanNode node, Set<Symbol> referencedOutputs) {
List<Symbol> newOutputs = filteredCopy(node.getOutputSymbols(), referencedOutputs::contains);
if (newOutputs.size() == node.getOutputSymbols().size()) {
return Optional.empty();
}
List<ConnectorExpression> projections = newOutputs.stream().map(symbol -> new Variable(symbol.getName(), types.get(symbol))).collect(toImmutableList());
TableHandle handle = node.getTable();
Optional<ProjectionApplicationResult<TableHandle>> result = metadata.applyProjection(session, handle, projections, newOutputs.stream().collect(toImmutableMap(Symbol::getName, node.getAssignments()::get)));
Map<Symbol, ColumnHandle> newAssignments;
// Bail out if the connector does anything other than limit the list of columns (e.g., if it synthesizes arbitrary expressions)
if (result.isPresent() && result.get().getProjections().stream().allMatch(Variable.class::isInstance)) {
handle = result.get().getHandle();
Map<String, ColumnHandle> assignments = result.get().getAssignments().stream().collect(toImmutableMap(Assignment::getVariable, Assignment::getColumn));
ImmutableMap.Builder<Symbol, ColumnHandle> builder = ImmutableMap.builder();
for (int i = 0; i < newOutputs.size(); i++) {
Variable variable = (Variable) result.get().getProjections().get(i);
builder.put(newOutputs.get(i), assignments.get(variable.getName()));
}
newAssignments = builder.buildOrThrow();
} else {
newAssignments = newOutputs.stream().collect(toImmutableMap(Function.identity(), node.getAssignments()::get));
}
Set<ColumnHandle> visibleColumns = ImmutableSet.copyOf(newAssignments.values());
TupleDomain<ColumnHandle> enforcedConstraint = node.getEnforcedConstraint().filter((columnHandle, domain) -> visibleColumns.contains(columnHandle));
Optional<PlanNodeStatsEstimate> newStatistics = node.getStatistics().map(statistics -> new PlanNodeStatsEstimate(statistics.getOutputRowCount(), statistics.getSymbolStatistics().entrySet().stream().filter(entry -> newAssignments.containsKey(entry.getKey())).collect(toImmutableMap(Entry::getKey, Entry::getValue))));
return Optional.of(new TableScanNode(node.getId(), handle, newOutputs, newAssignments, enforcedConstraint, newStatistics, node.isUpdateTarget(), node.getUseConnectorNodePartitioning()));
}
Aggregations