Search in sources :

Example 66 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class DynamicSourceUtils method validateAndApplyMetadata.

private static void validateAndApplyMetadata(String tableDebugName, ResolvedSchema schema, DynamicTableSource source) {
    final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
    if (metadataColumns.isEmpty()) {
        return;
    }
    if (!(source instanceof SupportsReadingMetadata)) {
        throw new ValidationException(String.format("Table '%s' declares metadata columns, but the underlying %s doesn't implement " + "the %s interface. Therefore, metadata cannot be read from the given source.", source.asSummaryString(), DynamicTableSource.class.getSimpleName(), SupportsReadingMetadata.class.getSimpleName()));
    }
    final SupportsReadingMetadata metadataSource = (SupportsReadingMetadata) source;
    final Map<String, DataType> metadataMap = metadataSource.listReadableMetadata();
    metadataColumns.forEach(c -> {
        final String metadataKey = c.getMetadataKey().orElse(c.getName());
        final LogicalType metadataType = c.getDataType().getLogicalType();
        final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
        // check that metadata key is valid
        if (expectedMetadataDataType == null) {
            throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for reading:\n%s", metadataKey, c.getName(), tableDebugName, DynamicTableSource.class.getSimpleName(), source.getClass().getName(), String.join("\n", metadataMap.keySet())));
        }
        // check that types are compatible
        if (!supportsExplicitCast(expectedMetadataDataType.getLogicalType(), metadataType)) {
            if (metadataKey.equals(c.getName())) {
                throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
            } else {
                throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), metadataKey, tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
            }
        }
    });
    metadataSource.applyReadableMetadata(createRequiredMetadataKeys(schema, source), TypeConversions.fromLogicalToDataType(createProducedType(schema, source)));
}
Also used : MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ValidationException(org.apache.flink.table.api.ValidationException) SupportsReadingMetadata(org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 67 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class DataFormatConverters method getConverterForDataType.

/**
 * Get {@link DataFormatConverter} for {@link DataType}.
 *
 * @param originDataType DataFormatConverter is oriented to Java format, while LogicalType has
 *     lost its specific Java format. Only DataType retains all its Java format information.
 */
public static DataFormatConverter getConverterForDataType(DataType originDataType) {
    DataType dataType = originDataType.nullable();
    DataFormatConverter converter = TYPE_TO_CONVERTER.get(dataType);
    if (converter != null) {
        return converter;
    }
    Class<?> clazz = dataType.getConversionClass();
    LogicalType logicalType = dataType.getLogicalType();
    switch(logicalType.getTypeRoot()) {
        case CHAR:
        case VARCHAR:
            if (clazz == String.class) {
                return StringConverter.INSTANCE;
            } else if (clazz == StringData.class) {
                return StringDataConverter.INSTANCE;
            } else {
                throw new RuntimeException("Not support class for VARCHAR: " + clazz);
            }
        case BINARY:
        case VARBINARY:
            return PrimitiveByteArrayConverter.INSTANCE;
        case DECIMAL:
            Tuple2<Integer, Integer> ps = getPrecision(logicalType);
            if (clazz == BigDecimal.class) {
                return new BigDecimalConverter(ps.f0, ps.f1);
            } else if (clazz == DecimalData.class) {
                return new DecimalDataConverter(ps.f0, ps.f1);
            } else {
                throw new RuntimeException("Not support conversion class for DECIMAL: " + clazz);
            }
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            int precisionOfTS = getDateTimePrecision(logicalType);
            if (clazz == Timestamp.class) {
                return new TimestampConverter(precisionOfTS);
            } else if (clazz == LocalDateTime.class) {
                return new LocalDateTimeConverter(precisionOfTS);
            } else if (clazz == TimestampData.class) {
                return new TimestampDataConverter(precisionOfTS);
            } else {
                throw new RuntimeException("Not support conversion class for TIMESTAMP WITHOUT TIME ZONE: " + clazz);
            }
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            int precisionOfLZTS = getDateTimePrecision(logicalType);
            if (clazz == Instant.class) {
                return new InstantConverter(precisionOfLZTS);
            } else if (clazz == Long.class || clazz == long.class) {
                return new LongTimestampDataConverter(precisionOfLZTS);
            } else if (clazz == TimestampData.class) {
                return new TimestampDataConverter(precisionOfLZTS);
            } else if (clazz == Timestamp.class) {
                return new TimestampLtzConverter(precisionOfLZTS);
            } else {
                throw new RuntimeException("Not support conversion class for TIMESTAMP WITH LOCAL TIME ZONE: " + clazz);
            }
        case ARRAY:
            if (clazz == ArrayData.class) {
                return ArrayDataConverter.INSTANCE;
            } else if (clazz == boolean[].class) {
                return PrimitiveBooleanArrayConverter.INSTANCE;
            } else if (clazz == short[].class) {
                return PrimitiveShortArrayConverter.INSTANCE;
            } else if (clazz == int[].class) {
                return PrimitiveIntArrayConverter.INSTANCE;
            } else if (clazz == long[].class) {
                return PrimitiveLongArrayConverter.INSTANCE;
            } else if (clazz == float[].class) {
                return PrimitiveFloatArrayConverter.INSTANCE;
            } else if (clazz == double[].class) {
                return PrimitiveDoubleArrayConverter.INSTANCE;
            }
            if (dataType instanceof CollectionDataType) {
                return new ObjectArrayConverter(((CollectionDataType) dataType).getElementDataType().bridgedTo(clazz.getComponentType()));
            } else {
                BasicArrayTypeInfo typeInfo = (BasicArrayTypeInfo) ((LegacyTypeInformationType) dataType.getLogicalType()).getTypeInformation();
                return new ObjectArrayConverter(fromLegacyInfoToDataType(typeInfo.getComponentInfo()).bridgedTo(clazz.getComponentType()));
            }
        case MAP:
            if (clazz == MapData.class) {
                return MapDataConverter.INSTANCE;
            }
            KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
            return new MapConverter(keyValueDataType.getKeyDataType(), keyValueDataType.getValueDataType());
        case MULTISET:
            if (clazz == MapData.class) {
                return MapDataConverter.INSTANCE;
            }
            CollectionDataType collectionDataType = (CollectionDataType) dataType;
            return new MapConverter(collectionDataType.getElementDataType(), DataTypes.INT().bridgedTo(Integer.class));
        case ROW:
        case STRUCTURED_TYPE:
            TypeInformation<?> asTypeInfo = fromDataTypeToTypeInfo(dataType);
            if (asTypeInfo instanceof InternalTypeInfo && clazz == RowData.class) {
                LogicalType realLogicalType = ((InternalTypeInfo<?>) asTypeInfo).toLogicalType();
                return new RowDataConverter(getFieldCount(realLogicalType));
            }
            // legacy
            CompositeType compositeType = (CompositeType) asTypeInfo;
            DataType[] fieldTypes = Stream.iterate(0, x -> x + 1).limit(compositeType.getArity()).map((Function<Integer, TypeInformation>) compositeType::getTypeAt).map(TypeConversions::fromLegacyInfoToDataType).toArray(DataType[]::new);
            if (clazz == RowData.class) {
                return new RowDataConverter(compositeType.getArity());
            } else if (clazz == Row.class) {
                return new RowConverter(fieldTypes);
            } else if (Tuple.class.isAssignableFrom(clazz)) {
                return new TupleConverter((Class<Tuple>) clazz, fieldTypes);
            } else if (CaseClassConverter.PRODUCT_CLASS != null && CaseClassConverter.PRODUCT_CLASS.isAssignableFrom(clazz)) {
                return new CaseClassConverter((TupleTypeInfoBase) compositeType, fieldTypes);
            } else if (compositeType instanceof PojoTypeInfo) {
                return new PojoConverter((PojoTypeInfo) compositeType, fieldTypes);
            } else {
                throw new IllegalStateException("Cannot find a converter for type " + compositeType + ". If the target should be a converter to scala.Product, then you might have a scala classpath issue.");
            }
        case RAW:
            if (logicalType instanceof RawType) {
                final RawType<?> rawType = (RawType<?>) logicalType;
                if (clazz == RawValueData.class) {
                    return RawValueDataConverter.INSTANCE;
                } else {
                    return new GenericConverter<>(rawType.getTypeSerializer());
                }
            }
            // legacy
            TypeInformation typeInfo = logicalType instanceof LegacyTypeInformationType ? ((LegacyTypeInformationType) logicalType).getTypeInformation() : ((TypeInformationRawType) logicalType).getTypeInformation();
            // planner type info
            if (typeInfo instanceof StringDataTypeInfo) {
                return StringDataConverter.INSTANCE;
            } else if (typeInfo instanceof DecimalDataTypeInfo) {
                DecimalDataTypeInfo decimalType = (DecimalDataTypeInfo) typeInfo;
                return new DecimalDataConverter(decimalType.precision(), decimalType.scale());
            } else if (typeInfo instanceof BigDecimalTypeInfo) {
                BigDecimalTypeInfo decimalType = (BigDecimalTypeInfo) typeInfo;
                return new BigDecimalConverter(decimalType.precision(), decimalType.scale());
            } else if (typeInfo instanceof TimestampDataTypeInfo) {
                TimestampDataTypeInfo timestampDataTypeInfo = (TimestampDataTypeInfo) typeInfo;
                return new TimestampDataConverter(timestampDataTypeInfo.getPrecision());
            } else if (typeInfo instanceof LegacyLocalDateTimeTypeInfo) {
                LegacyLocalDateTimeTypeInfo dateTimeType = (LegacyLocalDateTimeTypeInfo) typeInfo;
                return new LocalDateTimeConverter(dateTimeType.getPrecision());
            } else if (typeInfo instanceof LegacyTimestampTypeInfo) {
                LegacyTimestampTypeInfo timestampType = (LegacyTimestampTypeInfo) typeInfo;
                return new TimestampConverter(timestampType.getPrecision());
            } else if (typeInfo instanceof LegacyInstantTypeInfo) {
                LegacyInstantTypeInfo instantTypeInfo = (LegacyInstantTypeInfo) typeInfo;
                return new InstantConverter(instantTypeInfo.getPrecision());
            }
            if (clazz == RawValueData.class) {
                return RawValueDataConverter.INSTANCE;
            }
            return new GenericConverter(typeInfo.createSerializer(new ExecutionConfig()));
        default:
            throw new RuntimeException("Not support dataType: " + dataType);
    }
}
Also used : DecimalDataTypeInfo(org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo) LogicalType(org.apache.flink.table.types.logical.LogicalType) PojoTypeInfo(org.apache.flink.api.java.typeutils.PojoTypeInfo) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) RawType(org.apache.flink.table.types.logical.RawType) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) BigDecimalTypeInfo(org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) LegacyTimestampTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyTimestampTypeInfo) Row(org.apache.flink.types.Row) BasicArrayTypeInfo(org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo) LocalDateTime(java.time.LocalDateTime) CollectionDataType(org.apache.flink.table.types.CollectionDataType) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Timestamp(java.sql.Timestamp) DecimalData(org.apache.flink.table.data.DecimalData) LegacyInstantTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyInstantTypeInfo) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) TupleTypeInfoBase(org.apache.flink.api.java.typeutils.TupleTypeInfoBase) DataType(org.apache.flink.table.types.DataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) TypeConversions.fromLegacyInfoToDataType(org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType) TimestampDataTypeInfo(org.apache.flink.table.runtime.typeutils.TimestampDataTypeInfo) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) StringDataTypeInfo(org.apache.flink.table.runtime.typeutils.StringDataTypeInfo) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) LegacyLocalDateTimeTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo) StringData(org.apache.flink.table.data.StringData) Tuple(org.apache.flink.api.java.tuple.Tuple) CompositeType(org.apache.flink.api.common.typeutils.CompositeType)

Example 68 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class LogicalTypeCasts method supportsStructuredCasting.

private static boolean supportsStructuredCasting(LogicalType sourceType, LogicalType targetType, BiFunction<LogicalType, LogicalType, Boolean> childPredicate) {
    final LogicalTypeRoot sourceRoot = sourceType.getTypeRoot();
    final LogicalTypeRoot targetRoot = targetType.getTypeRoot();
    if (sourceRoot != STRUCTURED_TYPE || targetRoot != STRUCTURED_TYPE) {
        return false;
    }
    final StructuredType sourceStructuredType = (StructuredType) sourceType;
    final StructuredType targetStructuredType = (StructuredType) targetType;
    // non-anonymous structured types must be fully equal
    if (sourceStructuredType.getObjectIdentifier().isPresent() || targetStructuredType.getObjectIdentifier().isPresent()) {
        return false;
    }
    // for anonymous structured types we are a bit more lenient, if they provide similar fields
    // e.g. this is necessary when structured types derived from type information and
    // structured types derived within Table API are slightly different
    final Class<?> sourceClass = sourceStructuredType.getImplementationClass().orElse(null);
    final Class<?> targetClass = targetStructuredType.getImplementationClass().orElse(null);
    if (sourceClass != targetClass) {
        return false;
    }
    final List<String> sourceNames = sourceStructuredType.getAttributes().stream().map(StructuredType.StructuredAttribute::getName).collect(Collectors.toList());
    final List<String> targetNames = sourceStructuredType.getAttributes().stream().map(StructuredType.StructuredAttribute::getName).collect(Collectors.toList());
    if (!sourceNames.equals(targetNames)) {
        return false;
    }
    final List<LogicalType> sourceChildren = sourceType.getChildren();
    final List<LogicalType> targetChildren = targetType.getChildren();
    for (int i = 0; i < sourceChildren.size(); i++) {
        if (!childPredicate.apply(sourceChildren.get(i), targetChildren.get(i))) {
            return false;
        }
    }
    return true;
}
Also used : LogicalType(org.apache.flink.table.types.logical.LogicalType) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) StructuredType(org.apache.flink.table.types.logical.StructuredType)

Example 69 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class LogicalTypeCasts method supportsConstructedCasting.

private static boolean supportsConstructedCasting(LogicalType sourceType, LogicalType targetType, boolean allowExplicit) {
    final LogicalTypeRoot sourceRoot = sourceType.getTypeRoot();
    final LogicalTypeRoot targetRoot = targetType.getTypeRoot();
    // however, rows can be converted to structured types and vice versa
    if (sourceRoot == targetRoot || (sourceRoot == ROW && targetRoot == STRUCTURED_TYPE) || (sourceRoot == STRUCTURED_TYPE && targetRoot == ROW)) {
        final List<LogicalType> sourceChildren = sourceType.getChildren();
        final List<LogicalType> targetChildren = targetType.getChildren();
        if (sourceChildren.size() != targetChildren.size()) {
            return false;
        }
        for (int i = 0; i < sourceChildren.size(); i++) {
            if (!supportsCasting(sourceChildren.get(i), targetChildren.get(i), allowExplicit)) {
                return false;
            }
        }
        return true;
    }
    return false;
}
Also used : LogicalType(org.apache.flink.table.types.logical.LogicalType) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot)

Example 70 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class LogicalTypeMerging method findCommonRowType.

@Nullable
private static LogicalType findCommonRowType(List<LogicalType> normalizedTypes) {
    final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes);
    if (children == null) {
        return null;
    }
    final RowType firstType = (RowType) normalizedTypes.get(0);
    final List<RowType.RowField> newFields = IntStream.range(0, children.size()).mapToObj(pos -> {
        final LogicalType newType = children.get(pos);
        final RowType.RowField originalField = firstType.getFields().get(pos);
        if (originalField.getDescription().isPresent()) {
            return new RowType.RowField(originalField.getName(), newType, originalField.getDescription().get());
        } else {
            return new RowType.RowField(originalField.getName(), newType);
        }
    }).collect(Collectors.toList());
    return new RowType(newFields);
}
Also used : Arrays(java.util.Arrays) INTERVAL(org.apache.flink.table.types.logical.LogicalTypeFamily.INTERVAL) AbstractList(java.util.AbstractList) MapType(org.apache.flink.table.types.logical.MapType) BINARY(org.apache.flink.table.types.logical.LogicalTypeRoot.BINARY) CharType(org.apache.flink.table.types.logical.CharType) MULTISET(org.apache.flink.table.types.logical.LogicalTypeRoot.MULTISET) NULL(org.apache.flink.table.types.logical.LogicalTypeRoot.NULL) DecimalType(org.apache.flink.table.types.logical.DecimalType) LogicalTypeChecks.getScale(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getScale) HOUR_TO_MINUTE(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR_TO_MINUTE) Map(java.util.Map) TimeType(org.apache.flink.table.types.logical.TimeType) DECIMAL(org.apache.flink.table.types.logical.LogicalTypeRoot.DECIMAL) DOUBLE(org.apache.flink.table.types.logical.LogicalTypeRoot.DOUBLE) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) INTERVAL_YEAR_MONTH(org.apache.flink.table.types.logical.LogicalTypeRoot.INTERVAL_YEAR_MONTH) DayTimeIntervalType(org.apache.flink.table.types.logical.DayTimeIntervalType) TIMESTAMP_WITHOUT_TIME_ZONE(org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) SECOND(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.SECOND) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) NullType(org.apache.flink.table.types.logical.NullType) HOUR_TO_SECOND(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR_TO_SECOND) EXACT_NUMERIC(org.apache.flink.table.types.logical.LogicalTypeFamily.EXACT_NUMERIC) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) VARCHAR(org.apache.flink.table.types.logical.LogicalTypeRoot.VARCHAR) ARRAY(org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY) Optional(java.util.Optional) MONTH(org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.MONTH) DAY(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY) LogicalTypeChecks.getPrecision(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision) YEAR(org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.YEAR) TIMESTAMP_WITH_TIME_ZONE(org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE) LogicalTypeCasts.supportsImplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsImplicitCast) IntStream(java.util.stream.IntStream) BINARY_STRING(org.apache.flink.table.types.logical.LogicalTypeFamily.BINARY_STRING) TIMESTAMP(org.apache.flink.table.types.logical.LogicalTypeFamily.TIMESTAMP) BinaryType(org.apache.flink.table.types.logical.BinaryType) APPROXIMATE_NUMERIC(org.apache.flink.table.types.logical.LogicalTypeFamily.APPROXIMATE_NUMERIC) VARBINARY(org.apache.flink.table.types.logical.LogicalTypeRoot.VARBINARY) CHAR(org.apache.flink.table.types.logical.LogicalTypeRoot.CHAR) MINUTE_TO_SECOND(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.MINUTE_TO_SECOND) LogicalTypeChecks.getLength(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getLength) HashMap(java.util.HashMap) MINUTE(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.MINUTE) RowType(org.apache.flink.table.types.logical.RowType) ArrayList(java.util.ArrayList) TimestampType(org.apache.flink.table.types.logical.TimestampType) DoubleType(org.apache.flink.table.types.logical.DoubleType) DAY_TO_SECOND(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND) CHARACTER_STRING(org.apache.flink.table.types.logical.LogicalTypeFamily.CHARACTER_STRING) MAP(org.apache.flink.table.types.logical.LogicalTypeRoot.MAP) YearMonthIntervalType(org.apache.flink.table.types.logical.YearMonthIntervalType) YEAR_TO_MONTH(org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH) NUMERIC(org.apache.flink.table.types.logical.LogicalTypeFamily.NUMERIC) Nullable(javax.annotation.Nullable) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) MultisetType(org.apache.flink.table.types.logical.MultisetType) TIME(org.apache.flink.table.types.logical.LogicalTypeFamily.TIME) DateType(org.apache.flink.table.types.logical.DateType) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DATE(org.apache.flink.table.types.logical.LogicalTypeRoot.DATE) RAW(org.apache.flink.table.types.logical.LogicalTypeRoot.RAW) DAY_TO_MINUTE(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_MINUTE) DayTimeResolution(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution) YearMonthResolution(org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution) DATETIME(org.apache.flink.table.types.logical.LogicalTypeFamily.DATETIME) TIMESTAMP_WITH_LOCAL_TIME_ZONE(org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) Internal(org.apache.flink.annotation.Internal) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) HOUR(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR) DAY_TO_HOUR(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) INTERVAL_DAY_TIME(org.apache.flink.table.types.logical.LogicalTypeRoot.INTERVAL_DAY_TIME) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) Nullable(javax.annotation.Nullable)

Aggregations

LogicalType (org.apache.flink.table.types.logical.LogicalType)192 DataType (org.apache.flink.table.types.DataType)53 RowType (org.apache.flink.table.types.logical.RowType)53 RowData (org.apache.flink.table.data.RowData)45 List (java.util.List)29 ArrayList (java.util.ArrayList)28 TableException (org.apache.flink.table.api.TableException)25 TimestampType (org.apache.flink.table.types.logical.TimestampType)25 Internal (org.apache.flink.annotation.Internal)21 IntType (org.apache.flink.table.types.logical.IntType)21 Map (java.util.Map)20 ValidationException (org.apache.flink.table.api.ValidationException)20 ArrayType (org.apache.flink.table.types.logical.ArrayType)19 DecimalType (org.apache.flink.table.types.logical.DecimalType)19 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)17 Test (org.junit.Test)17 BigIntType (org.apache.flink.table.types.logical.BigIntType)16 LegacyTypeInformationType (org.apache.flink.table.types.logical.LegacyTypeInformationType)16 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Arrays (java.util.Arrays)14