Search in sources :

Example 11 with ConnectorTableLayout

use of io.trino.spi.connector.ConnectorTableLayout in project trino by trinodb.

the class AbstractTestHive method testCreateBucketedTableLayout.

@Test
public void testCreateBucketedTableLayout() {
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();
        Optional<ConnectorTableLayout> newTableLayout = metadata.getNewTableLayout(session, new ConnectorTableMetadata(new SchemaTableName("schema", "table"), ImmutableList.of(new ColumnMetadata("column1", BIGINT), new ColumnMetadata("column2", BIGINT)), ImmutableMap.of(PARTITIONED_BY_PROPERTY, ImmutableList.of(), BUCKETED_BY_PROPERTY, ImmutableList.of("column1"), BUCKET_COUNT_PROPERTY, 10, SORTED_BY_PROPERTY, ImmutableList.of())));
        assertTrue(newTableLayout.isPresent());
        ConnectorPartitioningHandle partitioningHandle = new HivePartitioningHandle(BUCKETING_V1, 10, ImmutableList.of(HIVE_LONG), OptionalInt.empty(), false);
        assertEquals(newTableLayout.get().getPartitioning(), Optional.of(partitioningHandle));
        assertEquals(newTableLayout.get().getPartitionColumns(), ImmutableList.of("column1"));
        ConnectorBucketNodeMap connectorBucketNodeMap = nodePartitioningProvider.getBucketNodeMap(transaction.getTransactionHandle(), session, partitioningHandle);
        assertEquals(connectorBucketNodeMap.getBucketCount(), 10);
        assertFalse(connectorBucketNodeMap.hasFixedMapping());
    }
}
Also used : ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Test(org.testng.annotations.Test)

Example 12 with ConnectorTableLayout

use of io.trino.spi.connector.ConnectorTableLayout in project trino by trinodb.

the class AbstractTestHive method insertBucketedTableLayout.

protected void insertBucketedTableLayout(boolean transactional) throws Exception {
    SchemaTableName tableName = temporaryTable("empty_bucketed_table");
    try {
        List<Column> columns = ImmutableList.of(new Column("column1", HIVE_STRING, Optional.empty()), new Column("column2", HIVE_LONG, Optional.empty()));
        HiveBucketProperty bucketProperty = new HiveBucketProperty(ImmutableList.of("column1"), BUCKETING_V1, 4, ImmutableList.of());
        createEmptyTable(tableName, ORC, columns, ImmutableList.of(), Optional.of(bucketProperty), transactional);
        try (Transaction transaction = newTransaction()) {
            ConnectorMetadata metadata = transaction.getMetadata();
            ConnectorSession session = newSession();
            ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
            Optional<ConnectorTableLayout> insertLayout = metadata.getInsertLayout(session, tableHandle);
            assertTrue(insertLayout.isPresent());
            ConnectorPartitioningHandle partitioningHandle = new HivePartitioningHandle(bucketProperty.getBucketingVersion(), bucketProperty.getBucketCount(), ImmutableList.of(HIVE_STRING), OptionalInt.empty(), false);
            assertEquals(insertLayout.get().getPartitioning(), Optional.of(partitioningHandle));
            assertEquals(insertLayout.get().getPartitionColumns(), ImmutableList.of("column1"));
            ConnectorBucketNodeMap connectorBucketNodeMap = nodePartitioningProvider.getBucketNodeMap(transaction.getTransactionHandle(), session, partitioningHandle);
            assertEquals(connectorBucketNodeMap.getBucketCount(), 4);
            assertFalse(connectorBucketNodeMap.hasFixedMapping());
        }
    } finally {
        dropTable(tableName);
    }
}
Also used : CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata)

Example 13 with ConnectorTableLayout

use of io.trino.spi.connector.ConnectorTableLayout in project trino by trinodb.

the class DeltaLakeMetadata method getLayoutForOptimize.

private Optional<ConnectorTableLayout> getLayoutForOptimize(DeltaLakeTableExecuteHandle executeHandle) {
    DeltaTableOptimizeHandle optimizeHandle = (DeltaTableOptimizeHandle) executeHandle.getProcedureHandle();
    List<String> partitionColumnNames = optimizeHandle.getMetadataEntry().getCanonicalPartitionColumns();
    if (partitionColumnNames.isEmpty()) {
        return Optional.empty();
    }
    Map<String, DeltaLakeColumnHandle> columnsByName = optimizeHandle.getTableColumns().stream().collect(toImmutableMap(columnHandle -> columnHandle.getName().toLowerCase(Locale.ENGLISH), identity()));
    ImmutableList.Builder<DeltaLakeColumnHandle> partitioningColumns = ImmutableList.builder();
    for (String columnName : partitionColumnNames) {
        partitioningColumns.add(columnsByName.get(columnName));
    }
    DeltaLakePartitioningHandle partitioningHandle = new DeltaLakePartitioningHandle(partitioningColumns.build());
    return Optional.of(new ConnectorTableLayout(partitioningHandle, partitionColumnNames));
}
Also used : DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) TransactionLogUtil.getTransactionLogDir(io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.getTransactionLogDir) FileSystem(org.apache.hadoop.fs.FileSystem) TableSnapshot(io.trino.plugin.deltalake.transactionlog.TableSnapshot) ColumnStatisticMetadata(io.trino.spi.statistics.ColumnStatisticMetadata) FileStatus(org.apache.hadoop.fs.FileStatus) DeltaLakeSchemaSupport.validateType(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.validateType) TypeUtils.isFloatingPointNaN(io.trino.spi.type.TypeUtils.isFloatingPointNaN) RemoveFileEntry(io.trino.plugin.deltalake.transactionlog.RemoveFileEntry) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TransactionLogWriterFactory(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) ValueSet.ofRanges(io.trino.spi.predicate.ValueSet.ofRanges) Column(io.trino.plugin.hive.metastore.Column) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) PARTITIONED_BY_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.PARTITIONED_BY_PROPERTY) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TABLE_PROVIDER_PROPERTY(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_PROPERTY) HiveWriteUtils.pathExists(io.trino.plugin.hive.util.HiveWriteUtils.pathExists) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) HyperLogLog(io.airlift.stats.cardinality.HyperLogLog) DateTimeEncoding.unpackMillisUtc(io.trino.spi.type.DateTimeEncoding.unpackMillisUtc) FILE_MODIFIED_TIME_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME) Predicate.not(java.util.function.Predicate.not) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) ANALYZE_COLUMNS_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.ANALYZE_COLUMNS_PROPERTY) REGULAR(io.trino.plugin.deltalake.DeltaLakeColumnType.REGULAR) TransactionLogParser.getMandatoryCurrentVersion(io.trino.plugin.deltalake.transactionlog.TransactionLogParser.getMandatoryCurrentVersion) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) DeltaLakeColumnHandle.fileSizeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileSizeColumnHandle) Slice(io.airlift.slice.Slice) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) DeltaLakeTableProcedureId(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId) INVALID_ANALYZE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) DeltaLakeTableProperties.getLocation(io.trino.plugin.deltalake.DeltaLakeTableProperties.getLocation) Range.greaterThanOrEqual(io.trino.spi.predicate.Range.greaterThanOrEqual) TransactionConflictException(io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException) HiveType(io.trino.plugin.hive.HiveType) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) DeltaLakeStatisticsAccess(io.trino.plugin.deltalake.statistics.DeltaLakeStatisticsAccess) DeltaLakeSchemaSupport.extractPartitionColumns(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractPartitionColumns) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) DeltaLakeSchemaSupport.serializeStatsAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeStatsAsJson) Nullable(javax.annotation.Nullable) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) MapType(io.trino.spi.type.MapType) PARTITION_KEY(io.trino.plugin.deltalake.DeltaLakeColumnType.PARTITION_KEY) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) DELTA_LAKE_INVALID_SCHEMA(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_INVALID_SCHEMA) CheckpointWriterManager(io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointWriterManager) ROW_ID_COLUMN_TYPE(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_TYPE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HiveUtil.isHiveSystemSchema(io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) MAX_VALUE(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE) DeltaLakeSessionProperties.isTableStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isTableStatisticsEnabled) LOCATION_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.LOCATION_PROPERTY) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) TINYINT(io.trino.spi.type.TinyintType.TINYINT) NotADeltaLakeTableException(io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException) DeltaLakeStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeStatistics) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) NodeManager(io.trino.spi.NodeManager) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Database(io.trino.plugin.hive.metastore.Database) DeltaLakeSchemaSupport.extractSchema(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractSchema) SYNTHESIZED(io.trino.plugin.deltalake.DeltaLakeColumnType.SYNTHESIZED) TABLE_PROVIDER_VALUE(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_VALUE) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) AddFileEntry(io.trino.plugin.deltalake.transactionlog.AddFileEntry) DeltaLakeMetastore(io.trino.plugin.deltalake.metastore.DeltaLakeMetastore) Format(io.trino.plugin.deltalake.transactionlog.MetadataEntry.Format) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Locale(java.util.Locale) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Path(org.apache.hadoop.fs.Path) HyperLogLogType(io.trino.spi.type.HyperLogLogType) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) Range.range(io.trino.spi.predicate.Range.range) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) TransactionLogWriter(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter) Collection(java.util.Collection) DeltaLakeTableExecuteHandle(io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle) MetadataEntry(io.trino.plugin.deltalake.transactionlog.MetadataEntry) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Instant(java.time.Instant) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) Sets(com.google.common.collect.Sets) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) ROW_ID_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_NAME) INVALID_SCHEMA_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) Assignment(io.trino.spi.connector.Assignment) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) DecimalType(io.trino.spi.type.DecimalType) OPTIMIZE(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId.OPTIMIZE) JsonCodec(io.airlift.json.JsonCodec) Comparators(com.google.common.collect.Comparators) Constraint(io.trino.spi.connector.Constraint) Range.lessThanOrEqual(io.trino.spi.predicate.Range.lessThanOrEqual) DeltaLakeFileStatistics(io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics) Logger(io.airlift.log.Logger) DeltaLakeSchemaSupport.serializeSchemaAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeSchemaAsJson) DeltaLakeColumnStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeColumnStatistics) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) DeltaLakeColumnHandle.pathColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.pathColumnHandle) DeltaLakeColumnHandle.fileModifiedTimeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileModifiedTimeColumnHandle) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) DeltaLakeSessionProperties.isExtendedStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isExtendedStatisticsEnabled) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) CHECKPOINT_INTERVAL_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.CHECKPOINT_INTERVAL_PROPERTY) StorageFormat.create(io.trino.plugin.hive.metastore.StorageFormat.create) MetadataEntry.buildDeltaMetadataConfiguration(io.trino.plugin.deltalake.transactionlog.MetadataEntry.buildDeltaMetadataConfiguration) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) DELTA_LAKE_BAD_WRITE(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) TupleDomain(io.trino.spi.predicate.TupleDomain) DeltaLakeTableProperties.getPartitionedBy(io.trino.plugin.deltalake.DeltaLakeTableProperties.getPartitionedBy) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) SchemaTableName.schemaTableName(io.trino.spi.connector.SchemaTableName.schemaTableName) UUID.randomUUID(java.util.UUID.randomUUID) ProtocolEntry(io.trino.plugin.deltalake.transactionlog.ProtocolEntry) DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) CommitInfoEntry(io.trino.plugin.deltalake.transactionlog.CommitInfoEntry) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) TypeManager(io.trino.spi.type.TypeManager) Collections(java.util.Collections) NUMBER_OF_DISTINCT_VALUES_SUMMARY(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_DISTINCT_VALUES_SUMMARY) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList)

Example 14 with ConnectorTableLayout

use of io.trino.spi.connector.ConnectorTableLayout in project trino by trinodb.

the class TestInsert method createLocalQueryRunner.

@Override
protected LocalQueryRunner createLocalQueryRunner() {
    Session.SessionBuilder sessionBuilder = testSessionBuilder().setCatalog("mock").setSchema("schema");
    LocalQueryRunner queryRunner = LocalQueryRunner.create(sessionBuilder.build());
    queryRunner.createCatalog("mock", MockConnectorFactory.builder().withGetTableHandle((session, schemaTableName) -> {
        if (schemaTableName.getTableName().equals("test_table_preferred_partitioning")) {
            return new MockConnectorTableHandle(schemaTableName);
        }
        if (schemaTableName.getTableName().equals("test_table_required_partitioning")) {
            return new MockConnectorTableHandle(schemaTableName);
        }
        return null;
    }).withGetColumns(name -> ImmutableList.of(new ColumnMetadata("column1", INTEGER), new ColumnMetadata("column2", INTEGER))).withGetInsertLayout((session, tableName) -> {
        if (tableName.getTableName().equals("test_table_preferred_partitioning")) {
            return Optional.of(new ConnectorTableLayout(ImmutableList.of("column1")));
        }
        if (tableName.getTableName().equals("test_table_required_partitioning")) {
            return Optional.of(new ConnectorTableLayout(new TpchPartitioningHandle("orders", 10), ImmutableList.of("column1")));
        }
        return Optional.empty();
    }).withGetNewTableLayout((session, tableMetadata) -> {
        if (tableMetadata.getTable().getTableName().equals("new_test_table_preferred_partitioning")) {
            return Optional.of(new ConnectorTableLayout(ImmutableList.of("column1")));
        }
        if (tableMetadata.getTable().getTableName().equals("new_test_table_required_partitioning")) {
            return Optional.of(new ConnectorTableLayout(new TpchPartitioningHandle("orders", 10), ImmutableList.of("column1")));
        }
        if (tableMetadata.getTable().getTableName().equals("new_test_table_unpartitioned")) {
            return Optional.empty();
        }
        return Optional.empty();
    }).build(), ImmutableMap.of());
    return queryRunner;
}
Also used : ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS(io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS) TableWriterNode(io.trino.sql.planner.plan.TableWriterNode) Test(org.testng.annotations.Test) PlanNode(io.trino.sql.planner.plan.PlanNode) ImmutableList(com.google.common.collect.ImmutableList) MockConnectorFactory(io.trino.connector.MockConnectorFactory) PlanMatchPattern.exchange(io.trino.sql.planner.assertions.PlanMatchPattern.exchange) LocalQueryRunner(io.trino.testing.LocalQueryRunner) LOCAL(io.trino.sql.planner.plan.ExchangeNode.Scope.LOCAL) REPARTITION(io.trino.sql.planner.plan.ExchangeNode.Type.REPARTITION) INTEGER(io.trino.spi.type.IntegerType.INTEGER) BasePlanTest(io.trino.sql.planner.assertions.BasePlanTest) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) PlanMatchPattern.values(io.trino.sql.planner.assertions.PlanMatchPattern.values) StatsProvider(io.trino.cost.StatsProvider) TASK_WRITER_COUNT(io.trino.SystemSessionProperties.TASK_WRITER_COUNT) PlanMatchPattern.node(io.trino.sql.planner.assertions.PlanMatchPattern.node) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TpchPartitioningHandle(io.trino.plugin.tpch.TpchPartitioningHandle) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) REMOTE(io.trino.sql.planner.plan.ExchangeNode.Scope.REMOTE) MatchResult(io.trino.sql.planner.assertions.MatchResult) USE_PREFERRED_WRITE_PARTITIONING(io.trino.SystemSessionProperties.USE_PREFERRED_WRITE_PARTITIONING) Metadata(io.trino.metadata.Metadata) Matcher(io.trino.sql.planner.assertions.Matcher) Optional(java.util.Optional) SymbolAliases(io.trino.sql.planner.assertions.SymbolAliases) ExchangeNode(io.trino.sql.planner.plan.ExchangeNode) Session(io.trino.Session) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TpchPartitioningHandle(io.trino.plugin.tpch.TpchPartitioningHandle) LocalQueryRunner(io.trino.testing.LocalQueryRunner) Session(io.trino.Session)

Example 15 with ConnectorTableLayout

use of io.trino.spi.connector.ConnectorTableLayout in project trino by trinodb.

the class TestDeltaLakeMetadata method testGetInsertLayout.

@Test
public void testGetInsertLayout() {
    DeltaLakeMetadata deltaLakeMetadata = deltaLakeMetadataFactory.create(SESSION.getIdentity());
    ConnectorTableMetadata tableMetadata = newTableMetadata(ImmutableList.of(BIGINT_COLUMN_1, BIGINT_COLUMN_2), ImmutableList.of(BIGINT_COLUMN_1));
    deltaLakeMetadata.createTable(SESSION, tableMetadata, false);
    Optional<ConnectorTableLayout> insertLayout = deltaLakeMetadata.getInsertLayout(SESSION, deltaLakeMetadata.getTableHandle(SESSION, tableMetadata.getTable()));
    assertThat(insertLayout).isPresent();
    assertThat(insertLayout.get().getPartitioning()).isNotPresent();
    assertThat(insertLayout.get().getPartitionColumns()).isEqualTo(getPartitionColumnNames(ImmutableList.of(BIGINT_COLUMN_1)));
}
Also used : ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Test(org.testng.annotations.Test)

Aggregations

ConnectorTableLayout (io.trino.spi.connector.ConnectorTableLayout)18 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)11 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)10 ConnectorSession (io.trino.spi.connector.ConnectorSession)10 SchemaTableName (io.trino.spi.connector.SchemaTableName)10 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)8 ImmutableList (com.google.common.collect.ImmutableList)7 ConnectorPartitioningHandle (io.trino.spi.connector.ConnectorPartitioningHandle)7 ImmutableMap (com.google.common.collect.ImmutableMap)6 Column (io.trino.plugin.hive.metastore.Column)6 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)6 ConnectorTableMetadata (io.trino.spi.connector.ConnectorTableMetadata)6 Test (org.testng.annotations.Test)6 ImmutableSet (com.google.common.collect.ImmutableSet)5 HiveColumnHandle.createBaseColumn (io.trino.plugin.hive.HiveColumnHandle.createBaseColumn)5 SortingColumn (io.trino.plugin.hive.metastore.SortingColumn)5 TrinoException (io.trino.spi.TrinoException)5 ConnectorOutputTableHandle (io.trino.spi.connector.ConnectorOutputTableHandle)5 TestingConnectorSession (io.trino.testing.TestingConnectorSession)5 Verify.verify (com.google.common.base.Verify.verify)4