Search in sources :

Example 6 with ConstraintApplicationResult

use of io.trino.spi.connector.ConstraintApplicationResult in project trino by trinodb.

the class IcebergMetadata method applyFilter.

@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle handle, Constraint constraint) {
    IcebergTableHandle table = (IcebergTableHandle) handle;
    Table icebergTable = catalog.loadTable(session, table.getSchemaTableName());
    Set<Integer> partitionSourceIds = identityPartitionColumnsInAllSpecs(icebergTable);
    BiPredicate<IcebergColumnHandle, Domain> isIdentityPartition = (column, domain) -> partitionSourceIds.contains(column.getId());
    TupleDomain<IcebergColumnHandle> newEnforcedConstraint = constraint.getSummary().transformKeys(IcebergColumnHandle.class::cast).filter(isIdentityPartition).intersect(table.getEnforcedPredicate());
    TupleDomain<IcebergColumnHandle> remainingConstraint = constraint.getSummary().transformKeys(IcebergColumnHandle.class::cast).filter(isIdentityPartition.negate());
    TupleDomain<IcebergColumnHandle> newUnenforcedConstraint = remainingConstraint.filter((columnHandle, predicate) -> !isStructuralType(columnHandle.getType())).intersect(table.getUnenforcedPredicate());
    if (newEnforcedConstraint.equals(table.getEnforcedPredicate()) && newUnenforcedConstraint.equals(table.getUnenforcedPredicate())) {
        return Optional.empty();
    }
    return Optional.of(new ConstraintApplicationResult<>(new IcebergTableHandle(table.getSchemaName(), table.getTableName(), table.getTableType(), table.getSnapshotId(), newUnenforcedConstraint, newEnforcedConstraint, table.getProjectedColumns(), table.getNameMappingJson()), remainingConstraint.transformKeys(ColumnHandle.class::cast), false));
}
Also used : IcebergUtil.getPartitionKeys(io.trino.plugin.iceberg.IcebergUtil.getPartitionKeys) TrinoCatalog(io.trino.plugin.iceberg.catalog.TrinoCatalog) FileSystem(org.apache.hadoop.fs.FileSystem) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) HiveApplyProjectionUtil.replaceWithNewVariables(io.trino.plugin.hive.HiveApplyProjectionUtil.replaceWithNewVariables) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Matcher(java.util.regex.Matcher) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) RewriteFiles(org.apache.iceberg.RewriteFiles) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) CloseableIterable(org.apache.iceberg.io.CloseableIterable) IcebergUtil.newCreateTableTransaction(io.trino.plugin.iceberg.IcebergUtil.newCreateTableTransaction) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) IcebergTableExecuteHandle(io.trino.plugin.iceberg.procedure.IcebergTableExecuteHandle) Set(java.util.Set) Schema(org.apache.iceberg.Schema) ColumnIdentity.primitiveColumnIdentity(io.trino.plugin.iceberg.ColumnIdentity.primitiveColumnIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) PartitionSpecParser(org.apache.iceberg.PartitionSpecParser) Collectors.joining(java.util.stream.Collectors.joining) Type(org.apache.iceberg.types.Type) UncheckedIOException(java.io.UncheckedIOException) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) IcebergUtil.deserializePartitionValue(io.trino.plugin.iceberg.IcebergUtil.deserializePartitionValue) Slice(io.airlift.slice.Slice) NullableValue(io.trino.spi.predicate.NullableValue) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) Supplier(java.util.function.Supplier) OptionalLong(java.util.OptionalLong) MaterializedViewFreshness(io.trino.spi.connector.MaterializedViewFreshness) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) FILE_FORMAT_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY) OPTIMIZE(io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.OPTIMIZE) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) DEPENDS_ON_TABLES(io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog.DEPENDS_ON_TABLES) Table(org.apache.iceberg.Table) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ICEBERG_INVALID_METADATA(io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_METADATA) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) PartitionFields.parsePartitionFields(io.trino.plugin.iceberg.PartitionFields.parsePartitionFields) ArrayDeque(java.util.ArrayDeque) MaterializedViewNotFoundException(io.trino.spi.connector.MaterializedViewNotFoundException) TypeConverter.toTrinoType(io.trino.plugin.iceberg.TypeConverter.toTrinoType) IcebergUtil.getColumns(io.trino.plugin.iceberg.IcebergUtil.getColumns) IcebergSessionProperties.isProjectionPushdownEnabled(io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled) IcebergTableProcedureId(io.trino.plugin.iceberg.procedure.IcebergTableProcedureId) IcebergSessionProperties.isStatisticsEnabled(io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled) AppendFiles(org.apache.iceberg.AppendFiles) NO_RETRIES(io.trino.spi.connector.RetryMode.NO_RETRIES) ConnectorMaterializedViewDefinition(io.trino.spi.connector.ConnectorMaterializedViewDefinition) TypeConverter.toIcebergType(io.trino.plugin.iceberg.TypeConverter.toIcebergType) PartitionField(org.apache.iceberg.PartitionField) DataFiles(org.apache.iceberg.DataFiles) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) LOCATION_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.LOCATION_PROPERTY) Path(org.apache.hadoop.fs.Path) DATA(io.trino.plugin.iceberg.TableType.DATA) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) FileScanTask(org.apache.iceberg.FileScanTask) DataFile(org.apache.iceberg.DataFile) Splitter(com.google.common.base.Splitter) IcebergTableProperties.getPartitioning(io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning) IcebergUtil.getFileFormat(io.trino.plugin.iceberg.IcebergUtil.getFileFormat) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) HiveWrittenPartitions(io.trino.plugin.hive.HiveWrittenPartitions) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) TableScan(org.apache.iceberg.TableScan) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) String.format(java.lang.String.format) SchemaParser(org.apache.iceberg.SchemaParser) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) ClassLoaderSafeSystemTable(io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable) IcebergUtil.getTableComment(io.trino.plugin.iceberg.IcebergUtil.getTableComment) Assignment(io.trino.spi.connector.Assignment) HiveApplyProjectionUtil(io.trino.plugin.hive.HiveApplyProjectionUtil) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) PartitionSpec(org.apache.iceberg.PartitionSpec) Function.identity(java.util.function.Function.identity) TableProperties(org.apache.iceberg.TableProperties) Optional(java.util.Optional) ProjectedColumnRepresentation(io.trino.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) Pattern(java.util.regex.Pattern) SystemTable(io.trino.spi.connector.SystemTable) JsonCodec(io.airlift.json.JsonCodec) Constraint(io.trino.spi.connector.Constraint) IcebergOptimizeHandle(io.trino.plugin.iceberg.procedure.IcebergOptimizeHandle) Logger(io.airlift.log.Logger) PartitionFields.toPartitionFields(io.trino.plugin.iceberg.PartitionFields.toPartitionFields) HashMap(java.util.HashMap) Deque(java.util.Deque) Function(java.util.function.Function) ExpressionConverter.toIcebergExpression(io.trino.plugin.iceberg.ExpressionConverter.toIcebergExpression) PARTITIONING_PROPERTY(io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY) HashSet(java.util.HashSet) BiPredicate(java.util.function.BiPredicate) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) Suppliers(com.google.common.base.Suppliers) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveApplyProjectionUtil.extractSupportedProjectedColumns(io.trino.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns) VerifyException(com.google.common.base.VerifyException) RetryMode(io.trino.spi.connector.RetryMode) Iterator(java.util.Iterator) TupleDomain(io.trino.spi.predicate.TupleDomain) IcebergUtil.toIcebergSchema(io.trino.plugin.iceberg.IcebergUtil.toIcebergSchema) Transaction(org.apache.iceberg.Transaction) Comparator(java.util.Comparator) TypeManager(io.trino.spi.type.TypeManager) HiveUtil.isStructuralType(io.trino.plugin.hive.util.HiveUtil.isStructuralType) Snapshot(org.apache.iceberg.Snapshot) ColumnHandle(io.trino.spi.connector.ColumnHandle) Table(org.apache.iceberg.Table) ClassLoaderSafeSystemTable(io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable) SystemTable(io.trino.spi.connector.SystemTable) Domain(io.trino.spi.predicate.Domain) TupleDomain(io.trino.spi.predicate.TupleDomain)

Example 7 with ConstraintApplicationResult

use of io.trino.spi.connector.ConstraintApplicationResult in project trino by trinodb.

the class TestInformationSchemaMetadata method testInformationSchemaPredicatePushdownForEmptyNames.

@Test
public void testInformationSchemaPredicatePushdownForEmptyNames() {
    TransactionId transactionId = transactionManager.beginTransaction(false);
    ConnectorSession session = createNewSession(transactionId);
    ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata);
    InformationSchemaColumnHandle tableSchemaColumn = new InformationSchemaColumnHandle("table_schema");
    InformationSchemaColumnHandle tableNameColumn = new InformationSchemaColumnHandle("table_name");
    ConnectorTableHandle tableHandle = metadata.getTableHandle(session, new SchemaTableName("information_schema", "tables"));
    // Empty schema name
    InformationSchemaTableHandle filtered = metadata.applyFilter(session, tableHandle, new Constraint(TupleDomain.withColumnDomains(ImmutableMap.of(tableSchemaColumn, Domain.singleValue(VARCHAR, Slices.utf8Slice("")))))).map(ConstraintApplicationResult::getHandle).map(InformationSchemaTableHandle.class::cast).orElseThrow(AssertionError::new);
    // "" schema name is valid schema name, but is (currently) valid for QualifiedTablePrefix
    assertEquals(filtered.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "")));
    // Empty table name
    filtered = metadata.applyFilter(session, tableHandle, new Constraint(TupleDomain.withColumnDomains(ImmutableMap.of(tableNameColumn, Domain.singleValue(VARCHAR, Slices.utf8Slice("")))))).map(ConstraintApplicationResult::getHandle).map(InformationSchemaTableHandle.class::cast).orElseThrow(AssertionError::new);
    // "" table name is valid schema name, but is (currently) valid for QualifiedTablePrefix
    // filter blindly applies filter to all visible schemas, so information_schema must be included
    assertEquals(filtered.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "test_schema", ""), new QualifiedTablePrefix("test_catalog", "information_schema", "")));
}
Also used : InformationSchemaTableHandle(io.trino.connector.informationschema.InformationSchemaTableHandle) Constraint(io.trino.spi.connector.Constraint) InformationSchemaColumnHandle(io.trino.connector.informationschema.InformationSchemaColumnHandle) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) ConnectorSession(io.trino.spi.connector.ConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) InformationSchemaMetadata(io.trino.connector.informationschema.InformationSchemaMetadata) SchemaTableName(io.trino.spi.connector.SchemaTableName) TransactionId(io.trino.transaction.TransactionId) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Test(org.testng.annotations.Test)

Example 8 with ConstraintApplicationResult

use of io.trino.spi.connector.ConstraintApplicationResult in project trino by trinodb.

the class TestJmxMetadata method testApplyFilterWithConstraint.

@Test
public void testApplyFilterWithConstraint() {
    JmxTableHandle handle = metadata.getTableHandle(SESSION, new SchemaTableName(JMX_SCHEMA_NAME, "java.lang:*"));
    JmxColumnHandle nodeColumnHandle = new JmxColumnHandle("node", createUnboundedVarcharType());
    NullableValue nodeColumnValue = NullableValue.of(createUnboundedVarcharType(), utf8Slice(localNode.getNodeIdentifier()));
    JmxColumnHandle objectNameColumnHandle = new JmxColumnHandle("object_name", createUnboundedVarcharType());
    NullableValue objectNameColumnValue = NullableValue.of(createUnboundedVarcharType(), utf8Slice("trino.memory:type=MemoryPool,name=reserved"));
    TupleDomain<ColumnHandle> tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.of(nodeColumnHandle, nodeColumnValue, objectNameColumnHandle, objectNameColumnValue));
    Optional<ConstraintApplicationResult<ConnectorTableHandle>> result = metadata.applyFilter(SESSION, handle, new Constraint(tupleDomain));
    assertTrue(result.isPresent());
    assertEquals(result.get().getRemainingFilter(), TupleDomain.fromFixedValues(ImmutableMap.of(objectNameColumnHandle, objectNameColumnValue)));
    assertEquals(((JmxTableHandle) result.get().getHandle()).getNodeFilter(), TupleDomain.fromFixedValues(ImmutableMap.of(nodeColumnHandle, nodeColumnValue)));
}
Also used : ColumnHandle(io.trino.spi.connector.ColumnHandle) Constraint(io.trino.spi.connector.Constraint) NullableValue(io.trino.spi.predicate.NullableValue) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) SchemaTableName(io.trino.spi.connector.SchemaTableName) Test(org.testng.annotations.Test)

Example 9 with ConstraintApplicationResult

use of io.trino.spi.connector.ConstraintApplicationResult in project trino by trinodb.

the class TestJmxMetadata method testApplyFilterWithSameConstraint.

@Test
public void testApplyFilterWithSameConstraint() {
    JmxTableHandle handle = metadata.getTableHandle(SESSION, new SchemaTableName(JMX_SCHEMA_NAME, "java.lang:*"));
    JmxColumnHandle columnHandle = new JmxColumnHandle("node", createUnboundedVarcharType());
    TupleDomain<ColumnHandle> nodeTupleDomain = TupleDomain.fromFixedValues(ImmutableMap.of(columnHandle, NullableValue.of(createUnboundedVarcharType(), utf8Slice(localNode.getNodeIdentifier()))));
    JmxTableHandle newTableHandle = new JmxTableHandle(handle.getTableName(), handle.getObjectNames(), handle.getColumnHandles(), handle.isLiveData(), nodeTupleDomain);
    Optional<ConstraintApplicationResult<ConnectorTableHandle>> result = metadata.applyFilter(SESSION, newTableHandle, new Constraint(nodeTupleDomain));
    assertFalse(result.isPresent());
}
Also used : ColumnHandle(io.trino.spi.connector.ColumnHandle) Constraint(io.trino.spi.connector.Constraint) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) SchemaTableName(io.trino.spi.connector.SchemaTableName) Test(org.testng.annotations.Test)

Example 10 with ConstraintApplicationResult

use of io.trino.spi.connector.ConstraintApplicationResult in project trino by trinodb.

the class TestJmxMetadata method testApplyFilterWithoutConstraint.

@Test
public void testApplyFilterWithoutConstraint() {
    JmxTableHandle handle = metadata.getTableHandle(SESSION, new SchemaTableName(JMX_SCHEMA_NAME, "java.lang:*"));
    Optional<ConstraintApplicationResult<ConnectorTableHandle>> result = metadata.applyFilter(SESSION, handle, new Constraint(TupleDomain.all()));
    assertFalse(result.isPresent());
}
Also used : Constraint(io.trino.spi.connector.Constraint) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) SchemaTableName(io.trino.spi.connector.SchemaTableName) Test(org.testng.annotations.Test)

Aggregations

Constraint (io.trino.spi.connector.Constraint)10 ConstraintApplicationResult (io.trino.spi.connector.ConstraintApplicationResult)10 ColumnHandle (io.trino.spi.connector.ColumnHandle)7 SchemaTableName (io.trino.spi.connector.SchemaTableName)7 Test (org.testng.annotations.Test)6 ImmutableList (com.google.common.collect.ImmutableList)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)4 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)4 ConnectorSession (io.trino.spi.connector.ConnectorSession)4 TupleDomain (io.trino.spi.predicate.TupleDomain)4 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)3 Verify.verify (com.google.common.base.Verify.verify)3 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)3 ImmutableSet (com.google.common.collect.ImmutableSet)3 Session (io.trino.Session)3 ConnectorExpression (io.trino.spi.expression.ConnectorExpression)3 Domain (io.trino.spi.predicate.Domain)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3