use of io.trino.spi.connector.Assignment in project trino by trinodb.
the class DefaultJdbcMetadata method applyAggregation.
@Override
public Optional<AggregationApplicationResult<ConnectorTableHandle>> applyAggregation(ConnectorSession session, ConnectorTableHandle table, List<AggregateFunction> aggregates, Map<String, ColumnHandle> assignments, List<List<ColumnHandle>> groupingSets) {
if (!isAggregationPushdownEnabled(session)) {
return Optional.empty();
}
JdbcTableHandle handle = (JdbcTableHandle) table;
// Global aggregation is represented by [[]]
verify(!groupingSets.isEmpty(), "No grouping sets provided");
if (!jdbcClient.supportsAggregationPushdown(session, handle, aggregates, assignments, groupingSets)) {
// JDBC client implementation prevents pushdown for the given table
return Optional.empty();
}
if (handle.getLimit().isPresent()) {
handle = flushAttributesAsQuery(session, handle);
}
int nextSyntheticColumnId = handle.getNextSyntheticColumnId();
ImmutableList.Builder<JdbcColumnHandle> newColumns = ImmutableList.builder();
ImmutableList.Builder<ConnectorExpression> projections = ImmutableList.builder();
ImmutableList.Builder<Assignment> resultAssignments = ImmutableList.builder();
ImmutableMap.Builder<String, String> expressions = ImmutableMap.builder();
List<List<JdbcColumnHandle>> groupingSetsAsJdbcColumnHandles = groupingSets.stream().map(groupingSet -> groupingSet.stream().map(JdbcColumnHandle.class::cast).collect(toImmutableList())).collect(toImmutableList());
Optional<List<JdbcColumnHandle>> tableColumns = handle.getColumns();
groupingSetsAsJdbcColumnHandles.stream().flatMap(List::stream).distinct().peek(handle.getColumns().<Consumer<JdbcColumnHandle>>map(columns -> groupKey -> verify(columns.contains(groupKey), "applyAggregation called with a grouping column %s which was not included in the table columns: %s", groupKey, tableColumns)).orElse(groupKey -> {
})).forEach(newColumns::add);
for (AggregateFunction aggregate : aggregates) {
Optional<JdbcExpression> expression = jdbcClient.implementAggregation(session, aggregate, assignments);
if (expression.isEmpty()) {
return Optional.empty();
}
String columnName = SYNTHETIC_COLUMN_NAME_PREFIX + nextSyntheticColumnId;
nextSyntheticColumnId++;
JdbcColumnHandle newColumn = JdbcColumnHandle.builder().setColumnName(columnName).setJdbcTypeHandle(expression.get().getJdbcTypeHandle()).setColumnType(aggregate.getOutputType()).setComment(Optional.of("synthetic")).build();
newColumns.add(newColumn);
projections.add(new Variable(newColumn.getColumnName(), aggregate.getOutputType()));
resultAssignments.add(new Assignment(newColumn.getColumnName(), newColumn, aggregate.getOutputType()));
expressions.put(columnName, expression.get().getExpression());
}
List<JdbcColumnHandle> newColumnsList = newColumns.build();
// We need to have matching column handles in JdbcTableHandle constructed below, as columns read via JDBC must match column handles list.
// For more context see assertion in JdbcRecordSetProvider.getRecordSet
PreparedQuery preparedQuery = jdbcClient.prepareQuery(session, handle, Optional.of(groupingSetsAsJdbcColumnHandles), newColumnsList, expressions.buildOrThrow());
handle = new JdbcTableHandle(new JdbcQueryRelationHandle(preparedQuery), TupleDomain.all(), ImmutableList.of(), Optional.empty(), OptionalLong.empty(), Optional.of(newColumnsList), handle.getAllReferencedTables(), nextSyntheticColumnId);
return Optional.of(new AggregationApplicationResult<>(handle, projections.build(), resultAssignments.build(), ImmutableMap.of(), false));
}
use of io.trino.spi.connector.Assignment in project trino by trinodb.
the class HiveMetadata method applyProjection.
@Override
public Optional<ProjectionApplicationResult<ConnectorTableHandle>> applyProjection(ConnectorSession session, ConnectorTableHandle handle, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
if (!isProjectionPushdownEnabled(session)) {
return Optional.empty();
}
// Create projected column representations for supported sub expressions. Simple column references and chain of
// dereferences on a variable are supported right now.
Set<ConnectorExpression> projectedExpressions = projections.stream().flatMap(expression -> extractSupportedProjectedColumns(expression).stream()).collect(toImmutableSet());
Map<ConnectorExpression, ProjectedColumnRepresentation> columnProjections = projectedExpressions.stream().collect(toImmutableMap(Function.identity(), HiveApplyProjectionUtil::createProjectedColumnRepresentation));
HiveTableHandle hiveTableHandle = (HiveTableHandle) handle;
// all references are simple variables
if (columnProjections.values().stream().allMatch(ProjectedColumnRepresentation::isVariable)) {
Set<ColumnHandle> projectedColumns = ImmutableSet.copyOf(assignments.values());
if (hiveTableHandle.getProjectedColumns().equals(projectedColumns)) {
return Optional.empty();
}
List<Assignment> assignmentsList = assignments.entrySet().stream().map(assignment -> new Assignment(assignment.getKey(), assignment.getValue(), ((HiveColumnHandle) assignment.getValue()).getType())).collect(toImmutableList());
return Optional.of(new ProjectionApplicationResult<>(hiveTableHandle.withProjectedColumns(projectedColumns), projections, assignmentsList, false));
}
Map<String, Assignment> newAssignments = new HashMap<>();
ImmutableMap.Builder<ConnectorExpression, Variable> newVariablesBuilder = ImmutableMap.builder();
ImmutableSet.Builder<ColumnHandle> projectedColumnsBuilder = ImmutableSet.builder();
for (Map.Entry<ConnectorExpression, ProjectedColumnRepresentation> entry : columnProjections.entrySet()) {
ConnectorExpression expression = entry.getKey();
ProjectedColumnRepresentation projectedColumn = entry.getValue();
ColumnHandle projectedColumnHandle;
String projectedColumnName;
// See if input already contains a columnhandle for this projected column, avoid creating duplicates.
Optional<String> existingColumn = find(assignments, projectedColumn);
if (existingColumn.isPresent()) {
projectedColumnName = existingColumn.get();
projectedColumnHandle = assignments.get(projectedColumnName);
} else {
// Create a new column handle
HiveColumnHandle oldColumnHandle = (HiveColumnHandle) assignments.get(projectedColumn.getVariable().getName());
projectedColumnHandle = createProjectedColumnHandle(oldColumnHandle, projectedColumn.getDereferenceIndices());
projectedColumnName = ((HiveColumnHandle) projectedColumnHandle).getName();
}
Variable projectedColumnVariable = new Variable(projectedColumnName, expression.getType());
Assignment newAssignment = new Assignment(projectedColumnName, projectedColumnHandle, expression.getType());
newAssignments.put(projectedColumnName, newAssignment);
newVariablesBuilder.put(expression, projectedColumnVariable);
projectedColumnsBuilder.add(projectedColumnHandle);
}
// Modify projections to refer to new variables
Map<ConnectorExpression, Variable> newVariables = newVariablesBuilder.buildOrThrow();
List<ConnectorExpression> newProjections = projections.stream().map(expression -> replaceWithNewVariables(expression, newVariables)).collect(toImmutableList());
List<Assignment> outputAssignments = newAssignments.values().stream().collect(toImmutableList());
return Optional.of(new ProjectionApplicationResult<>(hiveTableHandle.withProjectedColumns(projectedColumnsBuilder.build()), newProjections, outputAssignments, false));
}
use of io.trino.spi.connector.Assignment in project trino by trinodb.
the class PinotMetadata method applyAggregation.
@Override
public Optional<AggregationApplicationResult<ConnectorTableHandle>> applyAggregation(ConnectorSession session, ConnectorTableHandle handle, List<AggregateFunction> aggregates, Map<String, ColumnHandle> assignments, List<List<ColumnHandle>> groupingSets) {
if (!isAggregationPushdownEnabled(session)) {
return Optional.empty();
}
// Global aggregation is represented by [[]]
verify(!groupingSets.isEmpty(), "No grouping sets provided");
// Pinot currently only supports simple GROUP BY clauses with a single grouping set
if (groupingSets.size() != 1) {
return Optional.empty();
}
// See https://github.com/apache/pinot/issues/8353 for more details.
if (getOnlyElement(groupingSets).stream().filter(columnHandle -> ((PinotColumnHandle) columnHandle).getDataType() instanceof ArrayType).findFirst().isPresent()) {
return Optional.empty();
}
PinotTableHandle tableHandle = (PinotTableHandle) handle;
// If there is an offset then do not push the aggregation down as the results will not be correct
if (tableHandle.getQuery().isPresent() && (!tableHandle.getQuery().get().getAggregateColumns().isEmpty() || tableHandle.getQuery().get().isAggregateInProjections() || tableHandle.getQuery().get().getOffset().isPresent())) {
return Optional.empty();
}
ImmutableList.Builder<ConnectorExpression> projections = ImmutableList.builder();
ImmutableList.Builder<Assignment> resultAssignments = ImmutableList.builder();
ImmutableList.Builder<PinotColumnHandle> aggregateColumnsBuilder = ImmutableList.builder();
for (AggregateFunction aggregate : aggregates) {
Optional<AggregateExpression> rewriteResult = aggregateFunctionRewriter.rewrite(session, aggregate, assignments);
rewriteResult = applyCountDistinct(session, aggregate, assignments, tableHandle, rewriteResult);
if (rewriteResult.isEmpty()) {
return Optional.empty();
}
AggregateExpression aggregateExpression = rewriteResult.get();
PinotColumnHandle pinotColumnHandle = new PinotColumnHandle(aggregateExpression.toFieldName(), aggregate.getOutputType(), aggregateExpression.toExpression(), false, true, aggregateExpression.isReturnNullOnEmptyGroup(), Optional.of(aggregateExpression.getFunction()), Optional.of(aggregateExpression.getArgument()));
aggregateColumnsBuilder.add(pinotColumnHandle);
projections.add(new Variable(pinotColumnHandle.getColumnName(), pinotColumnHandle.getDataType()));
resultAssignments.add(new Assignment(pinotColumnHandle.getColumnName(), pinotColumnHandle, pinotColumnHandle.getDataType()));
}
List<PinotColumnHandle> groupingColumns = getOnlyElement(groupingSets).stream().map(PinotColumnHandle.class::cast).map(PinotColumnHandle::fromNonAggregateColumnHandle).collect(toImmutableList());
OptionalLong limitForDynamicTable = OptionalLong.empty();
// know when the limit was exceeded and throw an error
if (tableHandle.getLimit().isEmpty() && !groupingColumns.isEmpty()) {
limitForDynamicTable = OptionalLong.of(maxRowsPerBrokerQuery + 1);
}
List<PinotColumnHandle> aggregationColumns = aggregateColumnsBuilder.build();
String newQuery = "";
List<PinotColumnHandle> newSelections = groupingColumns;
if (tableHandle.getQuery().isPresent()) {
newQuery = tableHandle.getQuery().get().getQuery();
Map<String, PinotColumnHandle> projectionsMap = tableHandle.getQuery().get().getProjections().stream().collect(toImmutableMap(PinotColumnHandle::getColumnName, identity()));
groupingColumns = groupingColumns.stream().map(groupIngColumn -> projectionsMap.getOrDefault(groupIngColumn.getColumnName(), groupIngColumn)).collect(toImmutableList());
ImmutableList.Builder<PinotColumnHandle> newSelectionsBuilder = ImmutableList.<PinotColumnHandle>builder().addAll(groupingColumns);
aggregationColumns = aggregationColumns.stream().map(aggregateExpression -> resolveAggregateExpressionWithAlias(aggregateExpression, projectionsMap)).collect(toImmutableList());
newSelections = newSelectionsBuilder.build();
}
DynamicTable dynamicTable = new DynamicTable(tableHandle.getTableName(), Optional.empty(), newSelections, tableHandle.getQuery().flatMap(DynamicTable::getFilter), groupingColumns, aggregationColumns, ImmutableList.of(), limitForDynamicTable, OptionalLong.empty(), newQuery);
tableHandle = new PinotTableHandle(tableHandle.getSchemaName(), tableHandle.getTableName(), tableHandle.getConstraint(), tableHandle.getLimit(), Optional.of(dynamicTable));
return Optional.of(new AggregationApplicationResult<>(tableHandle, projections.build(), resultAssignments.build(), ImmutableMap.of(), false));
}
use of io.trino.spi.connector.Assignment in project trino by trinodb.
the class ThriftMetadata method applyProjection.
@Override
public Optional<ProjectionApplicationResult<ConnectorTableHandle>> applyProjection(ConnectorSession session, ConnectorTableHandle table, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
ThriftTableHandle handle = (ThriftTableHandle) table;
if (handle.getDesiredColumns().isPresent()) {
return Optional.empty();
}
ImmutableSet.Builder<ColumnHandle> desiredColumns = ImmutableSet.builder();
ImmutableList.Builder<Assignment> assignmentList = ImmutableList.builder();
assignments.forEach((name, column) -> {
desiredColumns.add(column);
assignmentList.add(new Assignment(name, column, ((ThriftColumnHandle) column).getColumnType()));
});
handle = new ThriftTableHandle(handle.getSchemaName(), handle.getTableName(), handle.getConstraint(), Optional.of(desiredColumns.build()));
return Optional.of(new ProjectionApplicationResult<>(handle, projections, assignmentList.build(), false));
}
use of io.trino.spi.connector.Assignment in project trino by trinodb.
the class KuduMetadata method applyProjection.
/**
* Only applies to the projection which selects a list of top-level columns.
* <p>
* Take this query "select col1, col2.field1 from test_table" as an example:
* <p>
* The optimizer calls with the following arguments:
* <p>
* handle = TH0 (col0, col1, col2, col3)
* projections = [
* col1,
* f(col2)
* ]
* assignments = [
* col1 = CH1
* col2 = CH2
* ]
* <p>
* <p>
* This method returns:
* <p>
* handle = TH1 (col1, col2)
* projections = [
* col1,
* f(col2)
* ]
* assignments = [
* col1 = CH1
* col2 = CH2
* ]
*/
@Override
public Optional<ProjectionApplicationResult<ConnectorTableHandle>> applyProjection(ConnectorSession session, ConnectorTableHandle table, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
KuduTableHandle handle = (KuduTableHandle) table;
if (handle.getDesiredColumns().isPresent()) {
return Optional.empty();
}
ImmutableList.Builder<ColumnHandle> desiredColumns = ImmutableList.builder();
ImmutableList.Builder<Assignment> assignmentList = ImmutableList.builder();
assignments.forEach((name, column) -> {
desiredColumns.add(column);
assignmentList.add(new Assignment(name, column, ((KuduColumnHandle) column).getType()));
});
handle = new KuduTableHandle(handle.getSchemaTableName(), handle.getTable(clientSession), handle.getConstraint(), Optional.of(desiredColumns.build()), handle.isDeleteHandle(), handle.getBucketCount(), handle.getLimit());
return Optional.of(new ProjectionApplicationResult<>(handle, projections, assignmentList.build(), false));
}
Aggregations