use of io.trino.spi.connector.ConnectorTableMetadata in project trino by trinodb.
the class LogicalPlanner method createTableCreationPlan.
private RelationPlan createTableCreationPlan(Analysis analysis, Query query) {
Analysis.Create create = analysis.getCreate().orElseThrow();
QualifiedObjectName destination = create.getDestination().orElseThrow();
RelationPlan plan = createRelationPlan(analysis, query);
if (!create.isCreateTableAsSelectWithData()) {
PlanNode root = new LimitNode(idAllocator.getNextId(), plan.getRoot(), 0L, false);
plan = new RelationPlan(root, plan.getScope(), plan.getFieldMappings(), Optional.empty());
}
ConnectorTableMetadata tableMetadata = create.getMetadata().orElseThrow();
Optional<TableLayout> newTableLayout = create.getLayout();
List<String> columnNames = tableMetadata.getColumns().stream().filter(// todo this filter is redundant
column -> !column.isHidden()).map(ColumnMetadata::getName).collect(toImmutableList());
TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, destination.getCatalogName(), tableMetadata);
return createTableWriterPlan(analysis, plan.getRoot(), visibleFields(plan), new CreateReference(destination.getCatalogName(), tableMetadata, newTableLayout), columnNames, tableMetadata.getColumns(), newTableLayout, statisticsMetadata);
}
use of io.trino.spi.connector.ConnectorTableMetadata in project trino by trinodb.
the class TestHiveGlueMetastore method testUpdatePartitionedStatisticsOnCreate.
@Test
public void testUpdatePartitionedStatisticsOnCreate() {
SchemaTableName tableName = temporaryTable("update_partitioned_statistics_create");
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
List<ColumnMetadata> columns = ImmutableList.of(new ColumnMetadata("a_column", BigintType.BIGINT), new ColumnMetadata("part_column", BigintType.BIGINT));
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, createTableProperties(TEXTFILE, ImmutableList.of("part_column")));
ConnectorOutputTableHandle createTableHandle = metadata.beginCreateTable(session, tableMetadata, Optional.empty(), NO_RETRIES);
// write data
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, createTableHandle);
MaterializedResult data = MaterializedResult.resultBuilder(session, BigintType.BIGINT, BigintType.BIGINT).row(1L, 1L).row(2L, 1L).row(3L, 1L).row(4L, 2L).row(5L, 2L).build();
sink.appendPage(data.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
// prepare statistics
ComputedStatistics statistics1 = ComputedStatistics.builder(ImmutableList.of("part_column"), ImmutableList.of(singleValueBlock(1))).addTableStatistic(TableStatisticType.ROW_COUNT, singleValueBlock(3)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MIN_VALUE), singleValueBlock(1)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MAX_VALUE), singleValueBlock(3)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_DISTINCT_VALUES), singleValueBlock(3)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_NON_NULL_VALUES), singleValueBlock(3)).build();
ComputedStatistics statistics2 = ComputedStatistics.builder(ImmutableList.of("part_column"), ImmutableList.of(singleValueBlock(2))).addTableStatistic(TableStatisticType.ROW_COUNT, singleValueBlock(2)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MIN_VALUE), singleValueBlock(4)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MAX_VALUE), singleValueBlock(5)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_DISTINCT_VALUES), singleValueBlock(2)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_NON_NULL_VALUES), singleValueBlock(2)).build();
// finish CTAS
metadata.finishCreateTable(session, createTableHandle, fragments, ImmutableList.of(statistics1, statistics2));
transaction.commit();
} finally {
dropTable(tableName);
}
}
use of io.trino.spi.connector.ConnectorTableMetadata in project trino by trinodb.
the class TestHiveGlueMetastore method testUpdateStatisticsOnCreate.
@Test
public void testUpdateStatisticsOnCreate() {
SchemaTableName tableName = temporaryTable("update_statistics_create");
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
List<ColumnMetadata> columns = ImmutableList.of(new ColumnMetadata("a_column", BigintType.BIGINT));
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, createTableProperties(TEXTFILE));
ConnectorOutputTableHandle createTableHandle = metadata.beginCreateTable(session, tableMetadata, Optional.empty(), NO_RETRIES);
// write data
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, createTableHandle);
MaterializedResult data = MaterializedResult.resultBuilder(session, BigintType.BIGINT).row(1L).row(2L).row(3L).row(4L).row(5L).build();
sink.appendPage(data.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
// prepare statistics
ComputedStatistics statistics = ComputedStatistics.builder(ImmutableList.of(), ImmutableList.of()).addTableStatistic(TableStatisticType.ROW_COUNT, singleValueBlock(5)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MIN_VALUE), singleValueBlock(1)).addColumnStatistic(new ColumnStatisticMetadata("a_column", MAX_VALUE), singleValueBlock(5)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_DISTINCT_VALUES), singleValueBlock(5)).addColumnStatistic(new ColumnStatisticMetadata("a_column", NUMBER_OF_NON_NULL_VALUES), singleValueBlock(5)).build();
// finish CTAS
metadata.finishCreateTable(session, createTableHandle, fragments, ImmutableList.of(statistics));
transaction.commit();
} finally {
dropTable(tableName);
}
}
use of io.trino.spi.connector.ConnectorTableMetadata in project trino by trinodb.
the class TestDefaultJdbcMetadata method testCreateAndAlterTable.
@Test
public void testCreateAndAlterTable() {
SchemaTableName table = new SchemaTableName("example", "foo");
metadata.createTable(SESSION, new ConnectorTableMetadata(table, ImmutableList.of(new ColumnMetadata("text", VARCHAR))), false);
JdbcTableHandle handle = metadata.getTableHandle(SESSION, table);
ConnectorTableMetadata layout = metadata.getTableMetadata(SESSION, handle);
assertEquals(layout.getTable(), table);
assertEquals(layout.getColumns().size(), 1);
assertEquals(layout.getColumns().get(0), new ColumnMetadata("text", VARCHAR));
metadata.addColumn(SESSION, handle, new ColumnMetadata("x", VARCHAR));
layout = metadata.getTableMetadata(SESSION, handle);
assertEquals(layout.getColumns().size(), 2);
assertEquals(layout.getColumns().get(0), new ColumnMetadata("text", VARCHAR));
assertEquals(layout.getColumns().get(1), new ColumnMetadata("x", VARCHAR));
JdbcColumnHandle columnHandle = new JdbcColumnHandle("x", JDBC_VARCHAR, VARCHAR);
metadata.dropColumn(SESSION, handle, columnHandle);
layout = metadata.getTableMetadata(SESSION, handle);
assertEquals(layout.getColumns().size(), 1);
assertEquals(layout.getColumns().get(0), new ColumnMetadata("text", VARCHAR));
SchemaTableName newTableName = new SchemaTableName("example", "bar");
metadata.renameTable(SESSION, handle, newTableName);
handle = metadata.getTableHandle(SESSION, newTableName);
layout = metadata.getTableMetadata(SESSION, handle);
assertEquals(layout.getTable(), newTableName);
assertEquals(layout.getColumns().size(), 1);
assertEquals(layout.getColumns().get(0), new ColumnMetadata("text", VARCHAR));
}
use of io.trino.spi.connector.ConnectorTableMetadata in project trino by trinodb.
the class TestDefaultJdbcMetadata method testAggregationPushdownForTableHandle.
@Test
public void testAggregationPushdownForTableHandle() {
ConnectorSession session = TestingConnectorSession.builder().setPropertyMetadata(new JdbcMetadataSessionProperties(new JdbcMetadataConfig().setAggregationPushdownEnabled(true), Optional.empty()).getSessionProperties()).build();
ColumnHandle groupByColumn = metadata.getColumnHandles(session, tableHandle).get("text");
Function<ConnectorTableHandle, Optional<AggregationApplicationResult<ConnectorTableHandle>>> applyAggregation = handle -> metadata.applyAggregation(session, handle, ImmutableList.of(new AggregateFunction("count", BIGINT, List.of(), List.of(), false, Optional.empty())), ImmutableMap.of(), ImmutableList.of(ImmutableList.of(groupByColumn)));
ConnectorTableHandle baseTableHandle = metadata.getTableHandle(session, new SchemaTableName("example", "numbers"));
Optional<AggregationApplicationResult<ConnectorTableHandle>> aggregationResult = applyAggregation.apply(baseTableHandle);
assertThat(aggregationResult).isPresent();
SchemaTableName noAggregationPushdownTable = new SchemaTableName("example", "no_aggregation_pushdown");
metadata.createTable(SESSION, new ConnectorTableMetadata(noAggregationPushdownTable, ImmutableList.of(new ColumnMetadata("text", VARCHAR))), false);
ConnectorTableHandle noAggregationPushdownTableHandle = metadata.getTableHandle(session, noAggregationPushdownTable);
aggregationResult = applyAggregation.apply(noAggregationPushdownTableHandle);
assertThat(aggregationResult).isEmpty();
}
Aggregations