Search in sources :

Example 71 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class AvroColumnDecoder method serializeRow.

private static Block serializeRow(BlockBuilder parentBlockBuilder, Object value, Type type, String columnName) {
    if (value == null) {
        checkState(parentBlockBuilder != null, "parent block builder is null");
        parentBlockBuilder.appendNull();
        return null;
    }
    BlockBuilder blockBuilder;
    if (parentBlockBuilder != null) {
        blockBuilder = parentBlockBuilder;
    } else {
        blockBuilder = type.createBlockBuilder(null, 1);
    }
    BlockBuilder singleRowBuilder = blockBuilder.beginBlockEntry();
    GenericRecord record = (GenericRecord) value;
    List<Field> fields = ((RowType) type).getFields();
    for (Field field : fields) {
        checkState(field.getName().isPresent(), "field name not found");
        serializeObject(singleRowBuilder, record.get(field.getName().get()), field.getType(), columnName);
    }
    blockBuilder.closeEntry();
    if (parentBlockBuilder == null) {
        return blockBuilder.getObject(0, Block.class);
    }
    return null;
}
Also used : Field(io.trino.spi.type.RowType.Field) RowType(io.trino.spi.type.RowType) GenericRecord(org.apache.avro.generic.GenericRecord) BlockBuilder(io.trino.spi.block.BlockBuilder)

Example 72 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class AvroColumnDecoder method isSupportedType.

private boolean isSupportedType(Type type) {
    if (isSupportedPrimitive(type)) {
        return true;
    }
    if (type instanceof ArrayType) {
        checkArgument(type.getTypeParameters().size() == 1, "expecting exactly one type parameter for array");
        return isSupportedType(type.getTypeParameters().get(0));
    }
    if (type instanceof MapType) {
        List<Type> typeParameters = type.getTypeParameters();
        checkArgument(typeParameters.size() == 2, "expecting exactly two type parameters for map");
        checkArgument(typeParameters.get(0) instanceof VarcharType, "Unsupported column type '%s' for map key", typeParameters.get(0));
        return isSupportedType(type.getTypeParameters().get(1));
    }
    if (type instanceof RowType) {
        for (Type fieldType : type.getTypeParameters()) {
            if (!isSupportedType(fieldType)) {
                return false;
            }
        }
        return true;
    }
    return false;
}
Also used : ArrayType(io.trino.spi.type.ArrayType) DoubleType(io.trino.spi.type.DoubleType) Type(io.trino.spi.type.Type) BooleanType(io.trino.spi.type.BooleanType) BigintType(io.trino.spi.type.BigintType) VarcharType(io.trino.spi.type.VarcharType) TinyintType(io.trino.spi.type.TinyintType) SmallintType(io.trino.spi.type.SmallintType) RowType(io.trino.spi.type.RowType) IntegerType(io.trino.spi.type.IntegerType) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) VarbinaryType(io.trino.spi.type.VarbinaryType) RealType(io.trino.spi.type.RealType) VarcharType(io.trino.spi.type.VarcharType) RowType(io.trino.spi.type.RowType) MapType(io.trino.spi.type.MapType)

Example 73 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class DeltaLakeMetadata method getUpdateRowIdColumnHandle.

// The rowId is a RowType where the first field is a BigInt containing the row number. The second field contains the values of any columns that were unmodified.
// They are needed to write the complete row after modifications in DeltaLakeUpdatablePageSource. If there are no unmodified columns, the rowId only has one field.
@Override
public ColumnHandle getUpdateRowIdColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle, List<ColumnHandle> updatedColumns) {
    DeltaLakeTableHandle handle = (DeltaLakeTableHandle) tableHandle;
    List<DeltaLakeColumnHandle> unmodifiedColumns = getUnmodifiedColumns(handle, updatedColumns);
    Type rowIdType;
    if (unmodifiedColumns.isEmpty()) {
        rowIdType = RowType.rowType(RowType.field(BIGINT));
    } else {
        List<RowType.Field> unmodifiedColumnFields = unmodifiedColumns.stream().map(columnMetadata -> RowType.field(columnMetadata.getName(), columnMetadata.getType())).collect(toImmutableList());
        rowIdType = RowType.rowType(RowType.field(BIGINT), RowType.field(RowType.from(unmodifiedColumnFields)));
    }
    return new DeltaLakeColumnHandle(ROW_ID_COLUMN_NAME, rowIdType, SYNTHESIZED);
}
Also used : TransactionLogUtil.getTransactionLogDir(io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.getTransactionLogDir) FileSystem(org.apache.hadoop.fs.FileSystem) TableSnapshot(io.trino.plugin.deltalake.transactionlog.TableSnapshot) ColumnStatisticMetadata(io.trino.spi.statistics.ColumnStatisticMetadata) FileStatus(org.apache.hadoop.fs.FileStatus) DeltaLakeSchemaSupport.validateType(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.validateType) TypeUtils.isFloatingPointNaN(io.trino.spi.type.TypeUtils.isFloatingPointNaN) RemoveFileEntry(io.trino.plugin.deltalake.transactionlog.RemoveFileEntry) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TransactionLogWriterFactory(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) ValueSet.ofRanges(io.trino.spi.predicate.ValueSet.ofRanges) Column(io.trino.plugin.hive.metastore.Column) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) PARTITIONED_BY_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.PARTITIONED_BY_PROPERTY) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TABLE_PROVIDER_PROPERTY(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_PROPERTY) HiveWriteUtils.pathExists(io.trino.plugin.hive.util.HiveWriteUtils.pathExists) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) HyperLogLog(io.airlift.stats.cardinality.HyperLogLog) DateTimeEncoding.unpackMillisUtc(io.trino.spi.type.DateTimeEncoding.unpackMillisUtc) FILE_MODIFIED_TIME_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME) Predicate.not(java.util.function.Predicate.not) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) ANALYZE_COLUMNS_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.ANALYZE_COLUMNS_PROPERTY) REGULAR(io.trino.plugin.deltalake.DeltaLakeColumnType.REGULAR) TransactionLogParser.getMandatoryCurrentVersion(io.trino.plugin.deltalake.transactionlog.TransactionLogParser.getMandatoryCurrentVersion) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) DeltaLakeColumnHandle.fileSizeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileSizeColumnHandle) Slice(io.airlift.slice.Slice) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) DeltaLakeTableProcedureId(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId) INVALID_ANALYZE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) DeltaLakeTableProperties.getLocation(io.trino.plugin.deltalake.DeltaLakeTableProperties.getLocation) Range.greaterThanOrEqual(io.trino.spi.predicate.Range.greaterThanOrEqual) TransactionConflictException(io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException) HiveType(io.trino.plugin.hive.HiveType) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) DeltaLakeStatisticsAccess(io.trino.plugin.deltalake.statistics.DeltaLakeStatisticsAccess) DeltaLakeSchemaSupport.extractPartitionColumns(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractPartitionColumns) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) DeltaLakeSchemaSupport.serializeStatsAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeStatsAsJson) Nullable(javax.annotation.Nullable) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) MapType(io.trino.spi.type.MapType) PARTITION_KEY(io.trino.plugin.deltalake.DeltaLakeColumnType.PARTITION_KEY) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) DELTA_LAKE_INVALID_SCHEMA(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_INVALID_SCHEMA) CheckpointWriterManager(io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointWriterManager) ROW_ID_COLUMN_TYPE(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_TYPE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HiveUtil.isHiveSystemSchema(io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) MAX_VALUE(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE) DeltaLakeSessionProperties.isTableStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isTableStatisticsEnabled) LOCATION_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.LOCATION_PROPERTY) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) TINYINT(io.trino.spi.type.TinyintType.TINYINT) NotADeltaLakeTableException(io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException) DeltaLakeStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeStatistics) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) NodeManager(io.trino.spi.NodeManager) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Database(io.trino.plugin.hive.metastore.Database) DeltaLakeSchemaSupport.extractSchema(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractSchema) SYNTHESIZED(io.trino.plugin.deltalake.DeltaLakeColumnType.SYNTHESIZED) TABLE_PROVIDER_VALUE(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_VALUE) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) AddFileEntry(io.trino.plugin.deltalake.transactionlog.AddFileEntry) DeltaLakeMetastore(io.trino.plugin.deltalake.metastore.DeltaLakeMetastore) Format(io.trino.plugin.deltalake.transactionlog.MetadataEntry.Format) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Locale(java.util.Locale) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Path(org.apache.hadoop.fs.Path) HyperLogLogType(io.trino.spi.type.HyperLogLogType) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) Range.range(io.trino.spi.predicate.Range.range) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) TransactionLogWriter(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter) Collection(java.util.Collection) DeltaLakeTableExecuteHandle(io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle) MetadataEntry(io.trino.plugin.deltalake.transactionlog.MetadataEntry) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Instant(java.time.Instant) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) Sets(com.google.common.collect.Sets) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) ROW_ID_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_NAME) INVALID_SCHEMA_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) Assignment(io.trino.spi.connector.Assignment) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) DecimalType(io.trino.spi.type.DecimalType) OPTIMIZE(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId.OPTIMIZE) JsonCodec(io.airlift.json.JsonCodec) Comparators(com.google.common.collect.Comparators) Constraint(io.trino.spi.connector.Constraint) Range.lessThanOrEqual(io.trino.spi.predicate.Range.lessThanOrEqual) DeltaLakeFileStatistics(io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics) Logger(io.airlift.log.Logger) DeltaLakeSchemaSupport.serializeSchemaAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeSchemaAsJson) DeltaLakeColumnStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeColumnStatistics) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) DeltaLakeColumnHandle.pathColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.pathColumnHandle) DeltaLakeColumnHandle.fileModifiedTimeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileModifiedTimeColumnHandle) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) DeltaLakeSessionProperties.isExtendedStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isExtendedStatisticsEnabled) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) CHECKPOINT_INTERVAL_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.CHECKPOINT_INTERVAL_PROPERTY) StorageFormat.create(io.trino.plugin.hive.metastore.StorageFormat.create) MetadataEntry.buildDeltaMetadataConfiguration(io.trino.plugin.deltalake.transactionlog.MetadataEntry.buildDeltaMetadataConfiguration) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) DELTA_LAKE_BAD_WRITE(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) TupleDomain(io.trino.spi.predicate.TupleDomain) DeltaLakeTableProperties.getPartitionedBy(io.trino.plugin.deltalake.DeltaLakeTableProperties.getPartitionedBy) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) SchemaTableName.schemaTableName(io.trino.spi.connector.SchemaTableName.schemaTableName) UUID.randomUUID(java.util.UUID.randomUUID) ProtocolEntry(io.trino.plugin.deltalake.transactionlog.ProtocolEntry) DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) CommitInfoEntry(io.trino.plugin.deltalake.transactionlog.CommitInfoEntry) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) TypeManager(io.trino.spi.type.TypeManager) Collections(java.util.Collections) NUMBER_OF_DISTINCT_VALUES_SUMMARY(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_DISTINCT_VALUES_SUMMARY) DeltaLakeSchemaSupport.validateType(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.validateType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) HiveType(io.trino.plugin.hive.HiveType) MapType(io.trino.spi.type.MapType) HyperLogLogType(io.trino.spi.type.HyperLogLogType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType)

Example 74 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class TypeCoercion method compatibility.

private TypeCompatibility compatibility(Type fromType, Type toType) {
    if (fromType.equals(toType)) {
        return TypeCompatibility.compatible(toType, true);
    }
    if (fromType.equals(UnknownType.UNKNOWN)) {
        return TypeCompatibility.compatible(toType, true);
    }
    if (toType.equals(UnknownType.UNKNOWN)) {
        return TypeCompatibility.compatible(fromType, false);
    }
    String fromTypeBaseName = fromType.getBaseName();
    String toTypeBaseName = toType.getBaseName();
    if (fromTypeBaseName.equals(toTypeBaseName)) {
        if (fromTypeBaseName.equals(StandardTypes.DECIMAL)) {
            Type commonSuperType = getCommonSuperTypeForDecimal((DecimalType) fromType, (DecimalType) toType);
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.VARCHAR)) {
            Type commonSuperType = getCommonSuperTypeForVarchar((VarcharType) fromType, (VarcharType) toType);
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.CHAR)) {
            Type commonSuperType = getCommonSuperTypeForChar((CharType) fromType, (CharType) toType);
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.ROW)) {
            return typeCompatibilityForRow((RowType) fromType, (RowType) toType);
        }
        if (fromTypeBaseName.equals(StandardTypes.TIMESTAMP)) {
            Type commonSuperType = createTimestampType(Math.max(((TimestampType) fromType).getPrecision(), ((TimestampType) toType).getPrecision()));
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.TIMESTAMP_WITH_TIME_ZONE)) {
            Type commonSuperType = createTimestampWithTimeZoneType(Math.max(((TimestampWithTimeZoneType) fromType).getPrecision(), ((TimestampWithTimeZoneType) toType).getPrecision()));
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.TIME)) {
            Type commonSuperType = createTimeType(Math.max(((TimeType) fromType).getPrecision(), ((TimeType) toType).getPrecision()));
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (fromTypeBaseName.equals(StandardTypes.TIME_WITH_TIME_ZONE)) {
            Type commonSuperType = createTimeWithTimeZoneType(Math.max(((TimeWithTimeZoneType) fromType).getPrecision(), ((TimeWithTimeZoneType) toType).getPrecision()));
            return TypeCompatibility.compatible(commonSuperType, commonSuperType.equals(toType));
        }
        if (isCovariantParametrizedType(fromType)) {
            return typeCompatibilityForCovariantParametrizedType(fromType, toType);
        }
        return TypeCompatibility.incompatible();
    }
    Optional<Type> coercedType = coerceTypeBase(fromType, toType.getBaseName());
    if (coercedType.isPresent()) {
        return compatibility(coercedType.get(), toType);
    }
    coercedType = coerceTypeBase(toType, fromType.getBaseName());
    if (coercedType.isPresent()) {
        TypeCompatibility typeCompatibility = compatibility(fromType, coercedType.get());
        if (!typeCompatibility.isCompatible()) {
            return TypeCompatibility.incompatible();
        }
        return TypeCompatibility.compatible(typeCompatibility.getCommonSuperType(), false);
    }
    return TypeCompatibility.incompatible();
}
Also used : TimeType(io.trino.spi.type.TimeType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) TimeWithTimeZoneType.createTimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType.createTimeWithTimeZoneType) TimeType.createTimeType(io.trino.spi.type.TimeType.createTimeType) TimestampType(io.trino.spi.type.TimestampType) CharType.createCharType(io.trino.spi.type.CharType.createCharType) VarcharType(io.trino.spi.type.VarcharType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) SetDigestType(io.trino.type.setdigest.SetDigestType) RowType(io.trino.spi.type.RowType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) TimestampWithTimeZoneType.createTimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType) CharType(io.trino.spi.type.CharType) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) DecimalType(io.trino.spi.type.DecimalType) TimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TimestampWithTimeZoneType.createTimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType) TimestampType(io.trino.spi.type.TimestampType) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) TimeWithTimeZoneType.createTimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType.createTimeWithTimeZoneType) TimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType) TimeType(io.trino.spi.type.TimeType) TimeType.createTimeType(io.trino.spi.type.TimeType.createTimeType)

Example 75 with RowType

use of io.trino.spi.type.RowType in project trino by trinodb.

the class TestAnalyzer method setup.

@BeforeClass
public void setup() {
    LocalQueryRunner queryRunner = LocalQueryRunner.create(TEST_SESSION);
    closer.register(queryRunner);
    transactionManager = queryRunner.getTransactionManager();
    AccessControlManager accessControlManager = new AccessControlManager(transactionManager, emptyEventListenerManager(), new AccessControlConfig(), DefaultSystemAccessControl.NAME);
    accessControlManager.setSystemAccessControls(List.of(AllowAllSystemAccessControl.INSTANCE));
    this.accessControl = accessControlManager;
    queryRunner.addFunctions(InternalFunctionBundle.builder().functions(APPLY_FUNCTION).build());
    plannerContext = queryRunner.getPlannerContext();
    Metadata metadata = plannerContext.getMetadata();
    TestingMetadata testingConnectorMetadata = new TestingMetadata();
    TestingConnector connector = new TestingConnector(testingConnectorMetadata);
    queryRunner.createCatalog(TPCH_CATALOG, new StaticConnectorFactory("main", connector), ImmutableMap.of());
    tablePropertyManager = queryRunner.getTablePropertyManager();
    analyzePropertyManager = queryRunner.getAnalyzePropertyManager();
    queryRunner.createCatalog(SECOND_CATALOG, MockConnectorFactory.create("second"), ImmutableMap.of());
    queryRunner.createCatalog(THIRD_CATALOG, MockConnectorFactory.create("third"), ImmutableMap.of());
    SchemaTableName table1 = new SchemaTableName("s1", "t1");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table1, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT))), false));
    SchemaTableName table2 = new SchemaTableName("s1", "t2");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table2, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT))), false));
    SchemaTableName table3 = new SchemaTableName("s1", "t3");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table3, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("x", BIGINT, null, true))), false));
    // table in different catalog
    SchemaTableName table4 = new SchemaTableName("s2", "t4");
    inSetupTransaction(session -> metadata.createTable(session, SECOND_CATALOG, new ConnectorTableMetadata(table4, ImmutableList.of(new ColumnMetadata("a", BIGINT))), false));
    // table with a hidden column
    SchemaTableName table5 = new SchemaTableName("s1", "t5");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table5, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT, null, true))), false));
    // table with a varchar column
    SchemaTableName table6 = new SchemaTableName("s1", "t6");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table6, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", VARCHAR), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT))), false));
    // table with bigint, double, array of bigints and array of doubles column
    SchemaTableName table7 = new SchemaTableName("s1", "t7");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table7, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", DOUBLE), new ColumnMetadata("c", new ArrayType(BIGINT)), new ColumnMetadata("d", new ArrayType(DOUBLE)))), false));
    // materialized view referencing table in same schema
    MaterializedViewDefinition materializedViewData1 = new MaterializedViewDefinition("select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.of("comment"), Identity.ofUser("user"), Optional.empty(), ImmutableMap.of());
    inSetupTransaction(session -> metadata.createMaterializedView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "mv1"), materializedViewData1, false, true));
    // valid view referencing table in same schema
    ViewDefinition viewData1 = new ViewDefinition("select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.of("comment"), Optional.of(Identity.ofUser("user")));
    inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v1"), viewData1, false));
    // stale view (different column type)
    ViewDefinition viewData2 = new ViewDefinition("select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", VARCHAR.getTypeId())), Optional.of("comment"), Optional.of(Identity.ofUser("user")));
    inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v2"), viewData2, false));
    // view referencing table in different schema from itself and session
    ViewDefinition viewData3 = new ViewDefinition("select a from t4", Optional.of(SECOND_CATALOG), Optional.of("s2"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.of("comment"), Optional.of(Identity.ofUser("owner")));
    inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(THIRD_CATALOG, "s3", "v3"), viewData3, false));
    // valid view with uppercase column name
    ViewDefinition viewData4 = new ViewDefinition("select A from t1", Optional.of("tpch"), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.of("comment"), Optional.of(Identity.ofUser("user")));
    inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName("tpch", "s1", "v4"), viewData4, false));
    // recursive view referencing to itself
    ViewDefinition viewData5 = new ViewDefinition("select * from v5", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.of("comment"), Optional.of(Identity.ofUser("user")));
    inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v5"), viewData5, false));
    // type analysis for INSERT
    SchemaTableName table8 = new SchemaTableName("s1", "t8");
    inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table8, ImmutableList.of(new ColumnMetadata("tinyint_column", TINYINT), new ColumnMetadata("integer_column", INTEGER), new ColumnMetadata("decimal_column", createDecimalType(5, 3)), new ColumnMetadata("real_column", REAL), new ColumnMetadata("char_column", createCharType(3)), new ColumnMetadata("bounded_varchar_column", createVarcharType(3)), new ColumnMetadata("unbounded_varchar_column", VARCHAR), new ColumnMetadata("tinyint_array_column", new ArrayType(TINYINT)), new ColumnMetadata("bigint_array_column", new ArrayType(BIGINT)), new ColumnMetadata("nested_bounded_varchar_column", anonymousRow(createVarcharType(3))), new ColumnMetadata("row_column", anonymousRow(TINYINT, createUnboundedVarcharType())), new ColumnMetadata("date_column", DATE))), false));
    // for identifier chain resolving tests
    queryRunner.createCatalog(CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new StaticConnectorFactory("chain", new TestingConnector(new TestingMetadata())), ImmutableMap.of());
    Type singleFieldRowType = TESTING_TYPE_MANAGER.fromSqlType("row(f1 bigint)");
    Type rowType = TESTING_TYPE_MANAGER.fromSqlType("row(f1 bigint, f2 bigint)");
    Type nestedRowType = TESTING_TYPE_MANAGER.fromSqlType("row(f1 row(f11 bigint, f12 bigint), f2 boolean)");
    Type doubleNestedRowType = TESTING_TYPE_MANAGER.fromSqlType("row(f1 row(f11 row(f111 bigint, f112 bigint), f12 boolean), f2 boolean)");
    SchemaTableName b = new SchemaTableName("a", "b");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(b, ImmutableList.of(new ColumnMetadata("x", VARCHAR))), false));
    SchemaTableName t1 = new SchemaTableName("a", "t1");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(t1, ImmutableList.of(new ColumnMetadata("b", rowType))), false));
    SchemaTableName t2 = new SchemaTableName("a", "t2");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(t2, ImmutableList.of(new ColumnMetadata("a", rowType))), false));
    SchemaTableName t3 = new SchemaTableName("a", "t3");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(t3, ImmutableList.of(new ColumnMetadata("b", nestedRowType), new ColumnMetadata("c", BIGINT))), false));
    SchemaTableName t4 = new SchemaTableName("a", "t4");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(t4, ImmutableList.of(new ColumnMetadata("b", doubleNestedRowType), new ColumnMetadata("c", BIGINT))), false));
    SchemaTableName t5 = new SchemaTableName("a", "t5");
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(t5, ImmutableList.of(new ColumnMetadata("b", singleFieldRowType))), false));
    QualifiedObjectName tableViewAndMaterializedView = new QualifiedObjectName(TPCH_CATALOG, "s1", "table_view_and_materialized_view");
    inSetupTransaction(session -> metadata.createMaterializedView(session, tableViewAndMaterializedView, new MaterializedViewDefinition("SELECT a FROM t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.empty(), Identity.ofUser("some user"), Optional.of(new CatalogSchemaTableName(TPCH_CATALOG, "s1", "t1")), ImmutableMap.of()), false, false));
    ViewDefinition viewDefinition = new ViewDefinition("SELECT a FROM t2", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.empty(), Optional.empty());
    inSetupTransaction(session -> metadata.createView(session, tableViewAndMaterializedView, viewDefinition, false));
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(tableViewAndMaterializedView.asSchemaTableName(), ImmutableList.of(new ColumnMetadata("a", BIGINT))), false));
    QualifiedObjectName tableAndView = new QualifiedObjectName(TPCH_CATALOG, "s1", "table_and_view");
    inSetupTransaction(session -> metadata.createView(session, tableAndView, viewDefinition, false));
    inSetupTransaction(session -> metadata.createTable(session, CATALOG_FOR_IDENTIFIER_CHAIN_TESTS, new ConnectorTableMetadata(tableAndView.asSchemaTableName(), ImmutableList.of(new ColumnMetadata("a", BIGINT))), false));
    QualifiedObjectName freshMaterializedView = new QualifiedObjectName(TPCH_CATALOG, "s1", "fresh_materialized_view");
    inSetupTransaction(session -> metadata.createMaterializedView(session, freshMaterializedView, new MaterializedViewDefinition("SELECT a, b FROM t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId()), new ViewColumn("b", BIGINT.getTypeId())), Optional.empty(), Identity.ofUser("some user"), // t3 has a, b column and hidden column x
    Optional.of(new CatalogSchemaTableName(TPCH_CATALOG, "s1", "t3")), ImmutableMap.of()), false, false));
    testingConnectorMetadata.markMaterializedViewIsFresh(freshMaterializedView.asSchemaTableName());
    QualifiedObjectName freshMaterializedViewMismatchedColumnCount = new QualifiedObjectName(TPCH_CATALOG, "s1", "fresh_materialized_view_mismatched_column_count");
    inSetupTransaction(session -> metadata.createMaterializedView(session, freshMaterializedViewMismatchedColumnCount, new MaterializedViewDefinition("SELECT a FROM t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.empty(), Identity.ofUser("some user"), Optional.of(new CatalogSchemaTableName(TPCH_CATALOG, "s1", "t2")), ImmutableMap.of()), false, false));
    testingConnectorMetadata.markMaterializedViewIsFresh(freshMaterializedViewMismatchedColumnCount.asSchemaTableName());
    QualifiedObjectName freshMaterializedMismatchedColumnName = new QualifiedObjectName(TPCH_CATALOG, "s1", "fresh_materialized_view_mismatched_column_name");
    inSetupTransaction(session -> metadata.createMaterializedView(session, freshMaterializedMismatchedColumnName, new MaterializedViewDefinition("SELECT a, b as c FROM t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId()), new ViewColumn("c", BIGINT.getTypeId())), Optional.empty(), Identity.ofUser("some user"), Optional.of(new CatalogSchemaTableName(TPCH_CATALOG, "s1", "t2")), ImmutableMap.of()), false, false));
    testingConnectorMetadata.markMaterializedViewIsFresh(freshMaterializedMismatchedColumnName.asSchemaTableName());
    QualifiedObjectName freshMaterializedMismatchedColumnType = new QualifiedObjectName(TPCH_CATALOG, "s1", "fresh_materialized_view_mismatched_column_type");
    inSetupTransaction(session -> metadata.createMaterializedView(session, freshMaterializedMismatchedColumnType, new MaterializedViewDefinition("SELECT a, null b FROM t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId()), new ViewColumn("b", RowType.anonymousRow(TINYINT).getTypeId())), Optional.empty(), Identity.ofUser("some user"), Optional.of(new CatalogSchemaTableName(TPCH_CATALOG, "s1", "t2")), ImmutableMap.of()), false, false));
    testingConnectorMetadata.markMaterializedViewIsFresh(freshMaterializedMismatchedColumnType.asSchemaTableName());
}
Also used : AccessControlManager(io.trino.security.AccessControlManager) TestingAccessControlManager(io.trino.testing.TestingAccessControlManager) AccessControlConfig(io.trino.security.AccessControlConfig) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ViewColumn(io.trino.metadata.ViewColumn) MaterializedViewDefinition(io.trino.metadata.MaterializedViewDefinition) ViewDefinition(io.trino.metadata.ViewDefinition) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TestingMetadata(io.trino.testing.TestingMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) PropertyMetadata(io.trino.spi.session.PropertyMetadata) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) Metadata(io.trino.metadata.Metadata) SchemaTableName(io.trino.spi.connector.SchemaTableName) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) LocalQueryRunner(io.trino.testing.LocalQueryRunner) StaticConnectorFactory(io.trino.connector.StaticConnectorFactory) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) ArrayType(io.trino.spi.type.ArrayType) CharType.createCharType(io.trino.spi.type.CharType.createCharType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) MaterializedViewDefinition(io.trino.metadata.MaterializedViewDefinition) TestingMetadata(io.trino.testing.TestingMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) BeforeClass(org.testng.annotations.BeforeClass)

Aggregations

RowType (io.trino.spi.type.RowType)90 ArrayType (io.trino.spi.type.ArrayType)51 MapType (io.trino.spi.type.MapType)42 Type (io.trino.spi.type.Type)42 ImmutableList (com.google.common.collect.ImmutableList)30 VarcharType (io.trino.spi.type.VarcharType)28 BlockBuilder (io.trino.spi.block.BlockBuilder)26 DecimalType (io.trino.spi.type.DecimalType)21 Test (org.testng.annotations.Test)21 ImmutableMap (com.google.common.collect.ImmutableMap)20 Block (io.trino.spi.block.Block)20 List (java.util.List)20 Map (java.util.Map)18 ArrayList (java.util.ArrayList)17 Optional (java.util.Optional)17 CharType (io.trino.spi.type.CharType)16 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)15 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)12 TimestampWithTimeZoneType (io.trino.spi.type.TimestampWithTimeZoneType)12 HashMap (java.util.HashMap)12