Search in sources :

Example 11 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class IcebergMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle tableHandle) {
    IcebergTableHandle table = (IcebergTableHandle) tableHandle;
    if (table.getSnapshotId().isEmpty()) {
        // TupleDomain.none() as the predicate
        return new ConnectorTableProperties(TupleDomain.none(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableList.of());
    }
    Table icebergTable = catalog.loadTable(session, table.getSchemaTableName());
    // Extract identity partition fields that are present in all partition specs, for creating the discrete predicates.
    Set<Integer> partitionSourceIds = identityPartitionColumnsInAllSpecs(icebergTable);
    TupleDomain<IcebergColumnHandle> enforcedPredicate = table.getEnforcedPredicate();
    DiscretePredicates discretePredicates = null;
    if (!partitionSourceIds.isEmpty()) {
        // Extract identity partition columns
        Map<Integer, IcebergColumnHandle> columns = getColumns(icebergTable.schema(), typeManager).stream().filter(column -> partitionSourceIds.contains(column.getId())).collect(toImmutableMap(IcebergColumnHandle::getId, Function.identity()));
        Supplier<List<FileScanTask>> lazyFiles = Suppliers.memoize(() -> {
            TableScan tableScan = icebergTable.newScan().useSnapshot(table.getSnapshotId().get()).filter(toIcebergExpression(enforcedPredicate)).includeColumnStats();
            try (CloseableIterable<FileScanTask> iterator = tableScan.planFiles()) {
                return ImmutableList.copyOf(iterator);
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }
        });
        Iterable<FileScanTask> files = () -> lazyFiles.get().iterator();
        Iterable<TupleDomain<ColumnHandle>> discreteTupleDomain = Iterables.transform(files, fileScan -> {
            // Extract partition values in the data file
            Map<Integer, Optional<String>> partitionColumnValueStrings = getPartitionKeys(fileScan);
            Map<ColumnHandle, NullableValue> partitionValues = partitionSourceIds.stream().filter(partitionColumnValueStrings::containsKey).collect(toImmutableMap(columns::get, columnId -> {
                IcebergColumnHandle column = columns.get(columnId);
                Object prestoValue = deserializePartitionValue(column.getType(), partitionColumnValueStrings.get(columnId).orElse(null), column.getName());
                return NullableValue.of(column.getType(), prestoValue);
            }));
            return TupleDomain.fromFixedValues(partitionValues);
        });
        discretePredicates = new DiscretePredicates(columns.values().stream().map(ColumnHandle.class::cast).collect(toImmutableList()), discreteTupleDomain);
    }
    return new ConnectorTableProperties(// over all tableScan.planFiles() and caching partition values in table handle.
    enforcedPredicate.transformKeys(ColumnHandle.class::cast), // TODO: implement table partitioning
    Optional.empty(), Optional.empty(), Optional.ofNullable(discretePredicates), ImmutableList.of());
}
Also used : IcebergUtil.getPartitionKeys(io.trino.plugin.iceberg.IcebergUtil.getPartitionKeys) TrinoCatalog(io.trino.plugin.iceberg.catalog.TrinoCatalog) FileSystem(org.apache.hadoop.fs.FileSystem) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) HiveApplyProjectionUtil.replaceWithNewVariables(io.trino.plugin.hive.HiveApplyProjectionUtil.replaceWithNewVariables) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Matcher(java.util.regex.Matcher) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) RewriteFiles(org.apache.iceberg.RewriteFiles) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) CloseableIterable(org.apache.iceberg.io.CloseableIterable) IcebergUtil.newCreateTableTransaction(io.trino.plugin.iceberg.IcebergUtil.newCreateTableTransaction) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) IcebergTableExecuteHandle(io.trino.plugin.iceberg.procedure.IcebergTableExecuteHandle) Set(java.util.Set) Schema(org.apache.iceberg.Schema) ColumnIdentity.primitiveColumnIdentity(io.trino.plugin.iceberg.ColumnIdentity.primitiveColumnIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) PartitionSpecParser(org.apache.iceberg.PartitionSpecParser) Collectors.joining(java.util.stream.Collectors.joining) Type(org.apache.iceberg.types.Type) UncheckedIOException(java.io.UncheckedIOException) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) IcebergUtil.deserializePartitionValue(io.trino.plugin.iceberg.IcebergUtil.deserializePartitionValue) Slice(io.airlift.slice.Slice) NullableValue(io.trino.spi.predicate.NullableValue) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) Supplier(java.util.function.Supplier) OptionalLong(java.util.OptionalLong) MaterializedViewFreshness(io.trino.spi.connector.MaterializedViewFreshness) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) FILE_FORMAT_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY) OPTIMIZE(io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.OPTIMIZE) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) DEPENDS_ON_TABLES(io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog.DEPENDS_ON_TABLES) Table(org.apache.iceberg.Table) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ICEBERG_INVALID_METADATA(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_METADATA) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) PartitionFields.parsePartitionFields(io.trino.plugin.iceberg.PartitionFields.parsePartitionFields) ArrayDeque(java.util.ArrayDeque) MaterializedViewNotFoundException(io.trino.spi.connector.MaterializedViewNotFoundException) TypeConverter.toTrinoType(io.trino.plugin.iceberg.TypeConverter.toTrinoType) IcebergUtil.getColumns(io.trino.plugin.iceberg.IcebergUtil.getColumns) IcebergSessionProperties.isProjectionPushdownEnabled(io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled) IcebergTableProcedureId(io.trino.plugin.iceberg.procedure.IcebergTableProcedureId) IcebergSessionProperties.isStatisticsEnabled(io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled) AppendFiles(org.apache.iceberg.AppendFiles) NO_RETRIES(io.trino.spi.connector.RetryMode.NO_RETRIES) ConnectorMaterializedViewDefinition(io.trino.spi.connector.ConnectorMaterializedViewDefinition) TypeConverter.toIcebergType(io.trino.plugin.iceberg.TypeConverter.toIcebergType) PartitionField(org.apache.iceberg.PartitionField) DataFiles(org.apache.iceberg.DataFiles) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) LOCATION_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.LOCATION_PROPERTY) Path(org.apache.hadoop.fs.Path) DATA(io.trino.plugin.iceberg.TableType.DATA) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) FileScanTask(org.apache.iceberg.FileScanTask) DataFile(org.apache.iceberg.DataFile) Splitter(com.google.common.base.Splitter) IcebergTableProperties.getPartitioning(io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning) IcebergUtil.getFileFormat(io.trino.plugin.iceberg.IcebergUtil.getFileFormat) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) HiveWrittenPartitions(io.trino.plugin.hive.HiveWrittenPartitions) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) TableScan(org.apache.iceberg.TableScan) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) String.format(java.lang.String.format) SchemaParser(org.apache.iceberg.SchemaParser) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) ClassLoaderSafeSystemTable(io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable) IcebergUtil.getTableComment(io.trino.plugin.iceberg.IcebergUtil.getTableComment) Assignment(io.trino.spi.connector.Assignment) HiveApplyProjectionUtil(io.trino.plugin.hive.HiveApplyProjectionUtil) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) PartitionSpec(org.apache.iceberg.PartitionSpec) Function.identity(java.util.function.Function.identity) TableProperties(org.apache.iceberg.TableProperties) Optional(java.util.Optional) ProjectedColumnRepresentation(io.trino.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) Pattern(java.util.regex.Pattern) SystemTable(io.trino.spi.connector.SystemTable) JsonCodec(io.airlift.json.JsonCodec) Constraint(io.trino.spi.connector.Constraint) IcebergOptimizeHandle(io.trino.plugin.iceberg.procedure.IcebergOptimizeHandle) Logger(io.airlift.log.Logger) PartitionFields.toPartitionFields(io.trino.plugin.iceberg.PartitionFields.toPartitionFields) HashMap(java.util.HashMap) Deque(java.util.Deque) Function(java.util.function.Function) ExpressionConverter.toIcebergExpression(io.trino.plugin.iceberg.ExpressionConverter.toIcebergExpression) PARTITIONING_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY) HashSet(java.util.HashSet) BiPredicate(java.util.function.BiPredicate) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) Suppliers(com.google.common.base.Suppliers) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveApplyProjectionUtil.extractSupportedProjectedColumns(io.trino.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns) VerifyException(com.google.common.base.VerifyException) RetryMode(io.trino.spi.connector.RetryMode) Iterator(java.util.Iterator) TupleDomain(io.trino.spi.predicate.TupleDomain) IcebergUtil.toIcebergSchema(io.trino.plugin.iceberg.IcebergUtil.toIcebergSchema) Transaction(org.apache.iceberg.Transaction) Comparator(java.util.Comparator) TypeManager(io.trino.spi.type.TypeManager) HiveUtil.isStructuralType(io.trino.plugin.hive.util.HiveUtil.isStructuralType) Snapshot(org.apache.iceberg.Snapshot) UncheckedIOException(java.io.UncheckedIOException) Collections.singletonList(java.util.Collections.singletonList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) TableScan(org.apache.iceberg.TableScan) ColumnHandle(io.trino.spi.connector.ColumnHandle) Table(org.apache.iceberg.Table) ClassLoaderSafeSystemTable(io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable) SystemTable(io.trino.spi.connector.SystemTable) Optional(java.util.Optional) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) NullableValue(io.trino.spi.predicate.NullableValue) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) TupleDomain(io.trino.spi.predicate.TupleDomain) FileScanTask(org.apache.iceberg.FileScanTask) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Example 12 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class AbstractTestHive method testGetPartitions.

@Test
public void testGetPartitions() throws Exception {
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tablePartitionFormat);
        tableHandle = applyFilter(metadata, tableHandle, Constraint.alwaysTrue());
        ConnectorTableProperties properties = metadata.getTableProperties(newSession(), tableHandle);
        assertExpectedTableProperties(properties, tablePartitionFormatProperties);
        assertExpectedPartitions(tableHandle, tablePartitionFormatPartitions);
    }
}
Also used : ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Test(org.testng.annotations.Test)

Example 13 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class AbstractTestHive method doTestBucketedSortedTableEvolution.

private void doTestBucketedSortedTableEvolution(SchemaTableName tableName) throws Exception {
    int rowCount = 100;
    // Create table and populate it with 3 partitions with different sort orders but same bucketing
    createEmptyTable(tableName, ORC, ImmutableList.of(new Column("id", HIVE_LONG, Optional.empty()), new Column("name", HIVE_STRING, Optional.empty())), ImmutableList.of(new Column("pk", HIVE_STRING, Optional.empty())), Optional.of(new HiveBucketProperty(ImmutableList.of("id"), BUCKETING_V1, 4, ImmutableList.of(new SortingColumn("id", ASCENDING), new SortingColumn("name", ASCENDING)))));
    // write a 4-bucket partition sorted by id, name
    MaterializedResult.Builder sortedByIdNameBuilder = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, VARCHAR);
    IntStream.range(0, rowCount).forEach(i -> sortedByIdNameBuilder.row((long) i, String.valueOf(i), "sorted_by_id_name"));
    insertData(tableName, sortedByIdNameBuilder.build());
    // write a 4-bucket partition sorted by name
    alterBucketProperty(tableName, Optional.of(new HiveBucketProperty(ImmutableList.of("id"), BUCKETING_V1, 4, ImmutableList.of(new SortingColumn("name", ASCENDING)))));
    MaterializedResult.Builder sortedByNameBuilder = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, VARCHAR);
    IntStream.range(0, rowCount).forEach(i -> sortedByNameBuilder.row((long) i, String.valueOf(i), "sorted_by_name"));
    insertData(tableName, sortedByNameBuilder.build());
    // write a 4-bucket partition sorted by id
    alterBucketProperty(tableName, Optional.of(new HiveBucketProperty(ImmutableList.of("id"), BUCKETING_V1, 4, ImmutableList.of(new SortingColumn("id", ASCENDING)))));
    MaterializedResult.Builder sortedByIdBuilder = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR, VARCHAR);
    IntStream.range(0, rowCount).forEach(i -> sortedByIdBuilder.row((long) i, String.valueOf(i), "sorted_by_id"));
    insertData(tableName, sortedByIdBuilder.build());
    ConnectorTableHandle tableHandle;
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();
        metadata.beginQuery(session);
        tableHandle = getTableHandle(metadata, tableName);
        // read entire table
        List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().collect(toImmutableList());
        MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.empty(), Optional.empty());
        assertEquals(result.getRowCount(), 300);
    }
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession(ImmutableMap.of("propagate_table_scan_sorting_properties", true));
        metadata.beginQuery(session);
        Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle);
        // verify local sorting property
        ConnectorTableProperties properties = metadata.getTableProperties(session, tableHandle);
        assertEquals(properties.getLocalProperties(), ImmutableList.of(new SortingProperty<>(columnHandles.get("id"), ASC_NULLS_FIRST)));
        // read on a entire table should fail with exception
        assertThatThrownBy(() -> readTable(transaction, tableHandle, ImmutableList.copyOf(columnHandles.values()), session, TupleDomain.all(), OptionalInt.empty(), Optional.empty())).isInstanceOf(TrinoException.class).hasMessage("Hive table (%s) sorting by [id] is not compatible with partition (pk=sorted_by_name) sorting by [name]." + " This restriction can be avoided by disabling propagate_table_scan_sorting_properties.", tableName);
        // read only the partitions with sorting that is compatible to table sorting
        MaterializedResult result = readTable(transaction, tableHandle, ImmutableList.copyOf(columnHandles.values()), session, TupleDomain.withColumnDomains(ImmutableMap.of(columnHandles.get("pk"), Domain.create(ValueSet.of(VARCHAR, utf8Slice("sorted_by_id_name"), utf8Slice("sorted_by_id")), false))), OptionalInt.empty(), Optional.empty());
        assertEquals(result.getRowCount(), 200);
    }
}
Also used : HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) SortingProperty(io.trino.spi.connector.SortingProperty) Constraint(io.trino.spi.connector.Constraint) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) MaterializedResult(io.trino.testing.MaterializedResult) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Example 14 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class MongoMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
    MongoTableHandle tableHandle = (MongoTableHandle) table;
    // TODO: sharding key
    Optional<Set<ColumnHandle>> partitioningColumns = Optional.empty();
    ImmutableList.Builder<LocalProperty<ColumnHandle>> localProperties = ImmutableList.builder();
    MongoTable tableInfo = mongoSession.getTable(tableHandle.getSchemaTableName());
    Map<String, ColumnHandle> columns = getColumnHandles(session, tableHandle);
    for (MongoIndex index : tableInfo.getIndexes()) {
        for (MongodbIndexKey key : index.getKeys()) {
            if (key.getSortOrder().isEmpty()) {
                continue;
            }
            if (columns.get(key.getName()) != null) {
                localProperties.add(new SortingProperty<>(columns.get(key.getName()), key.getSortOrder().get()));
            }
        }
    }
    return new ConnectorTableProperties(TupleDomain.all(), Optional.empty(), partitioningColumns, Optional.empty(), localProperties.build());
}
Also used : ColumnHandle(io.trino.spi.connector.ColumnHandle) Set(java.util.Set) ImmutableList(com.google.common.collect.ImmutableList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) MongodbIndexKey(io.trino.plugin.mongodb.MongoIndex.MongodbIndexKey) LocalProperty(io.trino.spi.connector.LocalProperty) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Example 15 with ConnectorTableProperties

use of io.trino.spi.connector.ConnectorTableProperties in project trino by trinodb.

the class PhoenixMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
    JdbcTableHandle tableHandle = (JdbcTableHandle) table;
    List<LocalProperty<ColumnHandle>> sortingProperties = tableHandle.getSortOrder().map(properties -> properties.stream().map(item -> (LocalProperty<ColumnHandle>) new SortingProperty<ColumnHandle>(item.getColumn(), item.getSortOrder())).collect(toImmutableList())).orElse(ImmutableList.of());
    return new ConnectorTableProperties(TupleDomain.all(), Optional.empty(), Optional.empty(), Optional.empty(), sortingProperties);
}
Also used : ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) Connection(java.sql.Connection) Slice(io.airlift.slice.Slice) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) AggregateFunction(io.trino.spi.connector.AggregateFunction) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Inject(javax.inject.Inject) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) SQLException(java.sql.SQLException) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ImmutableList(com.google.common.collect.ImmutableList) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) PHOENIX_METADATA_ERROR(io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_METADATA_ERROR) SchemaUtil.getEscapedArgument(org.apache.phoenix.util.SchemaUtil.getEscapedArgument) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) Collection(java.util.Collection) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) MetadataUtil.getEscapedTableName(io.trino.plugin.phoenix5.MetadataUtil.getEscapedTableName) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) MetadataUtil.toTrinoSchemaName(io.trino.plugin.phoenix5.MetadataUtil.toTrinoSchemaName) TupleDomain(io.trino.spi.predicate.TupleDomain) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) String.format(java.lang.String.format) ConnectorTableSchema(io.trino.spi.connector.ConnectorTableSchema) AggregationApplicationResult(io.trino.spi.connector.AggregationApplicationResult) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) LocalProperty(io.trino.spi.connector.LocalProperty) List(java.util.List) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) Optional(java.util.Optional) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) DefaultJdbcMetadata(io.trino.plugin.jdbc.DefaultJdbcMetadata) ColumnHandle(io.trino.spi.connector.ColumnHandle) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) LocalProperty(io.trino.spi.connector.LocalProperty) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Aggregations

ConnectorTableProperties (io.trino.spi.connector.ConnectorTableProperties)16 ColumnHandle (io.trino.spi.connector.ColumnHandle)13 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)10 ImmutableList (com.google.common.collect.ImmutableList)8 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)7 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)7 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)7 ConnectorSession (io.trino.spi.connector.ConnectorSession)7 SortingProperty (io.trino.spi.connector.SortingProperty)7 SchemaTableName (io.trino.spi.connector.SchemaTableName)6 Test (org.testng.annotations.Test)6 ImmutableMap (com.google.common.collect.ImmutableMap)5 Constraint (io.trino.spi.connector.Constraint)5 TupleDomain (io.trino.spi.predicate.TupleDomain)5 List (java.util.List)5 Optional (java.util.Optional)5 ImmutableSet (com.google.common.collect.ImmutableSet)4 Slice (io.airlift.slice.Slice)4 HiveColumnHandle.bucketColumnHandle (io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle)4 ConnectorOutputTableHandle (io.trino.spi.connector.ConnectorOutputTableHandle)4