Search in sources :

Example 6 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class AbstractTestHive method setupHive.

protected void setupHive(String databaseName) {
    database = databaseName;
    tablePartitionFormat = new SchemaTableName(database, "trino_test_partition_format");
    tableUnpartitioned = new SchemaTableName(database, "trino_test_unpartitioned");
    tableOffline = new SchemaTableName(database, "trino_test_offline");
    tableOfflinePartition = new SchemaTableName(database, "trino_test_offline_partition");
    tableNotReadable = new SchemaTableName(database, "trino_test_not_readable");
    view = new SchemaTableName(database, "trino_test_view");
    invalidTable = new SchemaTableName(database, INVALID_TABLE);
    tableBucketedStringInt = new SchemaTableName(database, "trino_test_bucketed_by_string_int");
    tableBucketedBigintBoolean = new SchemaTableName(database, "trino_test_bucketed_by_bigint_boolean");
    tableBucketedDoubleFloat = new SchemaTableName(database, "trino_test_bucketed_by_double_float");
    tablePartitionSchemaChange = new SchemaTableName(database, "trino_test_partition_schema_change");
    tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "trino_test_partition_schema_change_non_canonical");
    tableBucketEvolution = new SchemaTableName(database, "trino_test_bucket_evolution");
    invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty());
    dsColumn = createBaseColumn("ds", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty());
    fileFormatColumn = createBaseColumn("file_format", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty());
    dummyColumn = createBaseColumn("dummy", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty());
    intColumn = createBaseColumn("t_int", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty());
    invalidColumnHandle = createBaseColumn(INVALID_COLUMN, 0, HIVE_STRING, VARCHAR, REGULAR, Optional.empty());
    List<ColumnHandle> partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn);
    tablePartitionFormatPartitions = ImmutableList.<HivePartition>builder().add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=textfile/dummy=1", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("textfile"))).put(dummyColumn, NullableValue.of(INTEGER, 1L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=sequencefile/dummy=2", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("sequencefile"))).put(dummyColumn, NullableValue.of(INTEGER, 2L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rctext/dummy=3", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rctext"))).put(dummyColumn, NullableValue.of(INTEGER, 3L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rcbinary/dummy=4", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rcbinary"))).put(dummyColumn, NullableValue.of(INTEGER, 4L)).buildOrThrow())).build();
    tableUnpartitionedPartitions = ImmutableList.of(new HivePartition(tableUnpartitioned));
    tablePartitionFormatProperties = new ConnectorTableProperties(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile")), Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile")), Range.equal(createUnboundedVarcharType(), utf8Slice("rctext")), Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L), Range.equal(INTEGER, 2L), Range.equal(INTEGER, 3L), Range.equal(INTEGER, 4L)), false))), Optional.empty(), Optional.empty(), Optional.of(new DiscretePredicates(partitionColumns, ImmutableList.of(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 2L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rctext"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 3L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 4L)), false)))))), ImmutableList.of());
    tableUnpartitionedProperties = new ConnectorTableProperties();
}
Also used : HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) NullableValue(io.trino.spi.predicate.NullableValue) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Example 7 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class TestPushProjectionIntoTableScan method createMockFactory.

private MockConnectorFactory createMockFactory(Map<String, ColumnHandle> assignments, Optional<MockConnectorFactory.ApplyProjection> applyProjection) {
    List<ColumnMetadata> metadata = assignments.entrySet().stream().map(entry -> new ColumnMetadata(entry.getKey(), ((TpchColumnHandle) entry.getValue()).getType())).collect(toImmutableList());
    MockConnectorFactory.Builder builder = MockConnectorFactory.builder().withListSchemaNames(connectorSession -> ImmutableList.of(TEST_SCHEMA)).withListTables((connectorSession, schema) -> TEST_SCHEMA.equals(schema) ? ImmutableList.of(TEST_SCHEMA_TABLE) : ImmutableList.of()).withGetColumns(schemaTableName -> metadata).withGetTableProperties((session, tableHandle) -> {
        MockConnectorTableHandle mockTableHandle = (MockConnectorTableHandle) tableHandle;
        if (mockTableHandle.getTableName().getTableName().equals(TEST_TABLE)) {
            return new ConnectorTableProperties(TupleDomain.all(), Optional.of(new ConnectorTablePartitioning(PARTITIONING_HANDLE, ImmutableList.of(column("col", VARCHAR)))), Optional.empty(), Optional.empty(), ImmutableList.of());
        }
        return new ConnectorTableProperties();
    });
    if (applyProjection.isPresent()) {
        builder = builder.withApplyProjection(applyProjection.get());
    }
    return builder.build();
}
Also used : Test(org.testng.annotations.Test) CatalogName(io.trino.connector.CatalogName) MockConnectorFactory(io.trino.connector.MockConnectorFactory) LongLiteral(io.trino.sql.tree.LongLiteral) Arrays.asList(java.util.Arrays.asList) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) FunctionCall(io.trino.sql.tree.FunctionCall) ENGLISH(java.util.Locale.ENGLISH) PlanNodeStatsEstimate(io.trino.cost.PlanNodeStatsEstimate) PlanMatchPattern.expression(io.trino.sql.planner.assertions.PlanMatchPattern.expression) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RowType(io.trino.spi.type.RowType) ImmutableMap(com.google.common.collect.ImmutableMap) Call(io.trino.spi.expression.Call) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) ScalarStatsCalculator(io.trino.cost.ScalarStatsCalculator) Collectors(java.util.stream.Collectors) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) BIGINT(io.trino.spi.type.BigintType.BIGINT) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) SymbolReference(io.trino.sql.tree.SymbolReference) Assignment(io.trino.spi.connector.Assignment) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) RowType.field(io.trino.spi.type.RowType.field) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) ConnectorExpressionTranslator.translate(io.trino.sql.planner.ConnectorExpressionTranslator.translate) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) Variable(io.trino.spi.expression.Variable) Function(java.util.function.Function) SubscriptExpression(io.trino.sql.tree.SubscriptExpression) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ColumnHandle(io.trino.spi.connector.ColumnHandle) Constant(io.trino.spi.expression.Constant) TypeProvider.viewOf(io.trino.sql.planner.TypeProvider.viewOf) Symbol(io.trino.sql.planner.Symbol) SymbolStatsEstimate(io.trino.cost.SymbolStatsEstimate) StringLiteral(io.trino.sql.tree.StringLiteral) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) FieldDereference(io.trino.spi.expression.FieldDereference) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) ConnectorSession(io.trino.spi.connector.ConnectorSession) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) TupleDomain(io.trino.spi.predicate.TupleDomain) QualifiedName(io.trino.sql.tree.QualifiedName) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TableHandle(io.trino.metadata.TableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) PlanMatchPattern.project(io.trino.sql.planner.assertions.PlanMatchPattern.project) TransactionId(io.trino.transaction.TransactionId) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) MockConnectorFactory(io.trino.connector.MockConnectorFactory) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Example 8 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class TestPartialTopNWithPresortedInput method createLocalQueryRunner.

@Override
protected LocalQueryRunner createLocalQueryRunner() {
    Session session = testSessionBuilder().setCatalog(MOCK_CATALOG).setSchema(TEST_SCHEMA).build();
    LocalQueryRunner queryRunner = LocalQueryRunner.builder(session).build();
    MockConnectorFactory mockFactory = MockConnectorFactory.builder().withGetTableProperties((connectorSession, handle) -> {
        MockConnectorTableHandle tableHandle = (MockConnectorTableHandle) handle;
        if (tableHandle.getTableName().equals(tableA)) {
            return new ConnectorTableProperties(TupleDomain.all(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableList.of(new SortingProperty<>(columnHandleA, ASC_NULLS_FIRST)));
        }
        throw new IllegalArgumentException();
    }).withGetColumns(schemaTableName -> {
        if (schemaTableName.equals(tableA)) {
            return ImmutableList.of(new ColumnMetadata(columnNameA, VARCHAR), new ColumnMetadata(columnNameB, VARCHAR));
        }
        throw new IllegalArgumentException();
    }).build();
    queryRunner.createCatalog(MOCK_CATALOG, mockFactory, ImmutableMap.of());
    return queryRunner;
}
Also used : OPTIMIZED_AND_VALIDATED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED) PlanMatchPattern(io.trino.sql.planner.assertions.PlanMatchPattern) Test(org.testng.annotations.Test) PARTIAL(io.trino.sql.planner.plan.TopNNode.Step.PARTIAL) PlanMatchPattern.limit(io.trino.sql.planner.assertions.PlanMatchPattern.limit) MockConnectorFactory(io.trino.connector.MockConnectorFactory) LongLiteral(io.trino.sql.tree.LongLiteral) PlanMatchPattern.exchange(io.trino.sql.planner.assertions.PlanMatchPattern.exchange) LOCAL(io.trino.sql.planner.plan.ExchangeNode.Scope.LOCAL) REPARTITION(io.trino.sql.planner.plan.ExchangeNode.Type.REPARTITION) ASCENDING(io.trino.sql.tree.SortItem.Ordering.ASCENDING) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) PlanMatchPattern.values(io.trino.sql.planner.assertions.PlanMatchPattern.values) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) ASC_NULLS_FIRST(io.trino.spi.connector.SortOrder.ASC_NULLS_FIRST) FIRST(io.trino.sql.tree.SortItem.NullOrdering.FIRST) List(java.util.List) ASC_NULLS_LAST(io.trino.spi.connector.SortOrder.ASC_NULLS_LAST) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) GATHER(io.trino.sql.planner.plan.ExchangeNode.Type.GATHER) Optional(java.util.Optional) PlanMatchPattern.output(io.trino.sql.planner.assertions.PlanMatchPattern.output) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) ValidateLimitWithPresortedInput(io.trino.sql.planner.sanity.ValidateLimitWithPresortedInput) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) PlanMatchPattern.window(io.trino.sql.planner.assertions.PlanMatchPattern.window) PlanAssert(io.trino.sql.planner.assertions.PlanAssert) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) PlanMatchPattern.sort(io.trino.sql.planner.assertions.PlanMatchPattern.sort) LocalQueryRunner(io.trino.testing.LocalQueryRunner) ColumnHandle(io.trino.spi.connector.ColumnHandle) BasePlanTest(io.trino.sql.planner.assertions.BasePlanTest) LAST(io.trino.sql.tree.SortItem.NullOrdering.LAST) Language(org.intellij.lang.annotations.Language) MockConnectorColumnHandle(io.trino.connector.MockConnectorColumnHandle) FINAL(io.trino.sql.planner.plan.TopNNode.Step.FINAL) PlanMatchPattern.topN(io.trino.sql.planner.assertions.PlanMatchPattern.topN) TupleDomain(io.trino.spi.predicate.TupleDomain) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) REMOTE(io.trino.sql.planner.plan.ExchangeNode.Scope.REMOTE) WarningCollector(io.trino.execution.warnings.WarningCollector) Plan(io.trino.sql.planner.Plan) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) MockConnectorFactory(io.trino.connector.MockConnectorFactory) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) SortingProperty(io.trino.spi.connector.SortingProperty) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) LocalQueryRunner(io.trino.testing.LocalQueryRunner) Session(io.trino.Session)

Example 9 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class TestValidateLimitWithPresortedInput method createLocalQueryRunner.

@Override
protected LocalQueryRunner createLocalQueryRunner() {
    Session session = testSessionBuilder().setCatalog(MOCK_CATALOG).setSchema(TEST_SCHEMA).build();
    LocalQueryRunner queryRunner = LocalQueryRunner.builder(session).build();
    MockConnectorFactory mockFactory = MockConnectorFactory.builder().withGetTableProperties((connectorSession, handle) -> {
        MockConnectorTableHandle tableHandle = (MockConnectorTableHandle) handle;
        if (tableHandle.getTableName().equals(MOCK_TABLE_NAME)) {
            return new ConnectorTableProperties(TupleDomain.all(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableList.of(new SortingProperty<>(COLUMN_HANDLE_A, ASC_NULLS_FIRST), new SortingProperty<>(COLUMN_HANDLE_C, ASC_NULLS_FIRST)));
        }
        throw new IllegalArgumentException();
    }).withGetColumns(schemaTableName -> {
        if (schemaTableName.equals(MOCK_TABLE_NAME)) {
            return ImmutableList.of(new ColumnMetadata(COLUMN_NAME_A, VARCHAR), new ColumnMetadata(COLUMN_NAME_B, VARCHAR), new ColumnMetadata(COLUMN_NAME_C, VARCHAR));
        }
        throw new IllegalArgumentException();
    }).build();
    queryRunner.createCatalog(MOCK_CATALOG, mockFactory, ImmutableMap.of());
    return queryRunner;
}
Also used : TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Test(org.testng.annotations.Test) Function(java.util.function.Function) PlanNode(io.trino.sql.planner.plan.PlanNode) CatalogName(io.trino.connector.CatalogName) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) MockConnectorFactory(io.trino.connector.MockConnectorFactory) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) LongLiteral(io.trino.sql.tree.LongLiteral) LocalQueryRunner(io.trino.testing.LocalQueryRunner) PlanBuilder(io.trino.sql.planner.iterative.rule.test.PlanBuilder) ColumnHandle(io.trino.spi.connector.ColumnHandle) BasePlanTest(io.trino.sql.planner.assertions.BasePlanTest) VerifyException(com.google.common.base.VerifyException) ImmutableMap(com.google.common.collect.ImmutableMap) MockConnectorColumnHandle(io.trino.connector.MockConnectorColumnHandle) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) TupleDomain(io.trino.spi.predicate.TupleDomain) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) ASC_NULLS_FIRST(io.trino.spi.connector.SortOrder.ASC_NULLS_FIRST) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) TableHandle(io.trino.metadata.TableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) BIGINT(io.trino.spi.type.BigintType.BIGINT) WarningCollector(io.trino.execution.warnings.WarningCollector) TypeProvider(io.trino.sql.planner.TypeProvider) Optional(java.util.Optional) PlanBuilder.expression(io.trino.sql.planner.iterative.rule.test.PlanBuilder.expression) PlanNodeIdAllocator(io.trino.sql.planner.PlanNodeIdAllocator) Session(io.trino.Session) MockConnectorFactory(io.trino.connector.MockConnectorFactory) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) SortingProperty(io.trino.spi.connector.SortingProperty) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) LocalQueryRunner(io.trino.testing.LocalQueryRunner) Session(io.trino.Session)

Example 10 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class KuduMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
    KuduTableHandle handle = (KuduTableHandle) table;
    KuduTable kuduTable = handle.getTable(clientSession);
    Optional<ConnectorTablePartitioning> tablePartitioning = Optional.empty();
    Optional<Set<ColumnHandle>> partitioningColumns = Optional.empty();
    List<LocalProperty<ColumnHandle>> localProperties = ImmutableList.of();
    if (isKuduGroupedExecutionEnabled(session) && isTableSupportGroupedExecution(kuduTable)) {
        Map<String, ColumnHandle> columnMap = getColumnHandles(session, handle);
        List<Integer> bucketColumnIds = getBucketColumnIds(kuduTable);
        List<ColumnHandle> bucketColumns = getSpecifyColumns(kuduTable.getSchema(), bucketColumnIds, columnMap);
        Optional<List<KuduRangePartition>> kuduRangePartitions = getKuduRangePartitions(kuduTable);
        tablePartitioning = Optional.of(new ConnectorTablePartitioning(new KuduPartitioningHandle(handle.getSchemaTableName().getSchemaName(), handle.getSchemaTableName().getTableName(), handle.getBucketCount().orElse(0), bucketColumnIds, bucketColumns.stream().map(KuduColumnHandle.class::cast).map(KuduColumnHandle::getType).collect(Collectors.toList()), kuduRangePartitions), bucketColumns));
        partitioningColumns = Optional.of(ImmutableSet.copyOf(bucketColumns));
    }
    return new ConnectorTableProperties(handle.getConstraint(), tablePartitioning, partitioningColumns, Optional.empty(), localProperties);
}
Also used : ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) KuduTable(org.apache.kudu.client.KuduTable) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) LocalProperty(io.trino.spi.connector.LocalProperty) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Aggregations

ConnectorTableProperties (io.trino.spi.connector.ConnectorTableProperties)16 ColumnHandle (io.trino.spi.connector.ColumnHandle)13 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)10 ImmutableList (com.google.common.collect.ImmutableList)8 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)7 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)7 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)7 ConnectorSession (io.trino.spi.connector.ConnectorSession)7 SortingProperty (io.trino.spi.connector.SortingProperty)7 SchemaTableName (io.trino.spi.connector.SchemaTableName)6 Test (org.testng.annotations.Test)6 ImmutableMap (com.google.common.collect.ImmutableMap)5 Constraint (io.trino.spi.connector.Constraint)5 TupleDomain (io.trino.spi.predicate.TupleDomain)5 List (java.util.List)5 Optional (java.util.Optional)5 ImmutableSet (com.google.common.collect.ImmutableSet)4 Slice (io.airlift.slice.Slice)4 HiveColumnHandle.bucketColumnHandle (io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle)4 ConnectorOutputTableHandle (io.trino.spi.connector.ConnectorOutputTableHandle)4