use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class DynamicSourceUtils method validateAndApplyMetadata.
private static void validateAndApplyMetadata(String tableDebugName, ResolvedSchema schema, DynamicTableSource source) {
final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
if (metadataColumns.isEmpty()) {
return;
}
if (!(source instanceof SupportsReadingMetadata)) {
throw new ValidationException(String.format("Table '%s' declares metadata columns, but the underlying %s doesn't implement " + "the %s interface. Therefore, metadata cannot be read from the given source.", source.asSummaryString(), DynamicTableSource.class.getSimpleName(), SupportsReadingMetadata.class.getSimpleName()));
}
final SupportsReadingMetadata metadataSource = (SupportsReadingMetadata) source;
final Map<String, DataType> metadataMap = metadataSource.listReadableMetadata();
metadataColumns.forEach(c -> {
final String metadataKey = c.getMetadataKey().orElse(c.getName());
final LogicalType metadataType = c.getDataType().getLogicalType();
final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
// check that metadata key is valid
if (expectedMetadataDataType == null) {
throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for reading:\n%s", metadataKey, c.getName(), tableDebugName, DynamicTableSource.class.getSimpleName(), source.getClass().getName(), String.join("\n", metadataMap.keySet())));
}
// check that types are compatible
if (!supportsExplicitCast(expectedMetadataDataType.getLogicalType(), metadataType)) {
if (metadataKey.equals(c.getName())) {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
} else {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), metadataKey, tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
}
}
});
metadataSource.applyReadableMetadata(createRequiredMetadataKeys(schema, source), TypeConversions.fromLogicalToDataType(createProducedType(schema, source)));
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class DataFormatConverters method getConverterForDataType.
/**
* Get {@link DataFormatConverter} for {@link DataType}.
*
* @param originDataType DataFormatConverter is oriented to Java format, while LogicalType has
* lost its specific Java format. Only DataType retains all its Java format information.
*/
public static DataFormatConverter getConverterForDataType(DataType originDataType) {
DataType dataType = originDataType.nullable();
DataFormatConverter converter = TYPE_TO_CONVERTER.get(dataType);
if (converter != null) {
return converter;
}
Class<?> clazz = dataType.getConversionClass();
LogicalType logicalType = dataType.getLogicalType();
switch(logicalType.getTypeRoot()) {
case CHAR:
case VARCHAR:
if (clazz == String.class) {
return StringConverter.INSTANCE;
} else if (clazz == StringData.class) {
return StringDataConverter.INSTANCE;
} else {
throw new RuntimeException("Not support class for VARCHAR: " + clazz);
}
case BINARY:
case VARBINARY:
return PrimitiveByteArrayConverter.INSTANCE;
case DECIMAL:
Tuple2<Integer, Integer> ps = getPrecision(logicalType);
if (clazz == BigDecimal.class) {
return new BigDecimalConverter(ps.f0, ps.f1);
} else if (clazz == DecimalData.class) {
return new DecimalDataConverter(ps.f0, ps.f1);
} else {
throw new RuntimeException("Not support conversion class for DECIMAL: " + clazz);
}
case TIMESTAMP_WITHOUT_TIME_ZONE:
int precisionOfTS = getDateTimePrecision(logicalType);
if (clazz == Timestamp.class) {
return new TimestampConverter(precisionOfTS);
} else if (clazz == LocalDateTime.class) {
return new LocalDateTimeConverter(precisionOfTS);
} else if (clazz == TimestampData.class) {
return new TimestampDataConverter(precisionOfTS);
} else {
throw new RuntimeException("Not support conversion class for TIMESTAMP WITHOUT TIME ZONE: " + clazz);
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
int precisionOfLZTS = getDateTimePrecision(logicalType);
if (clazz == Instant.class) {
return new InstantConverter(precisionOfLZTS);
} else if (clazz == Long.class || clazz == long.class) {
return new LongTimestampDataConverter(precisionOfLZTS);
} else if (clazz == TimestampData.class) {
return new TimestampDataConverter(precisionOfLZTS);
} else if (clazz == Timestamp.class) {
return new TimestampLtzConverter(precisionOfLZTS);
} else {
throw new RuntimeException("Not support conversion class for TIMESTAMP WITH LOCAL TIME ZONE: " + clazz);
}
case ARRAY:
if (clazz == ArrayData.class) {
return ArrayDataConverter.INSTANCE;
} else if (clazz == boolean[].class) {
return PrimitiveBooleanArrayConverter.INSTANCE;
} else if (clazz == short[].class) {
return PrimitiveShortArrayConverter.INSTANCE;
} else if (clazz == int[].class) {
return PrimitiveIntArrayConverter.INSTANCE;
} else if (clazz == long[].class) {
return PrimitiveLongArrayConverter.INSTANCE;
} else if (clazz == float[].class) {
return PrimitiveFloatArrayConverter.INSTANCE;
} else if (clazz == double[].class) {
return PrimitiveDoubleArrayConverter.INSTANCE;
}
if (dataType instanceof CollectionDataType) {
return new ObjectArrayConverter(((CollectionDataType) dataType).getElementDataType().bridgedTo(clazz.getComponentType()));
} else {
BasicArrayTypeInfo typeInfo = (BasicArrayTypeInfo) ((LegacyTypeInformationType) dataType.getLogicalType()).getTypeInformation();
return new ObjectArrayConverter(fromLegacyInfoToDataType(typeInfo.getComponentInfo()).bridgedTo(clazz.getComponentType()));
}
case MAP:
if (clazz == MapData.class) {
return MapDataConverter.INSTANCE;
}
KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
return new MapConverter(keyValueDataType.getKeyDataType(), keyValueDataType.getValueDataType());
case MULTISET:
if (clazz == MapData.class) {
return MapDataConverter.INSTANCE;
}
CollectionDataType collectionDataType = (CollectionDataType) dataType;
return new MapConverter(collectionDataType.getElementDataType(), DataTypes.INT().bridgedTo(Integer.class));
case ROW:
case STRUCTURED_TYPE:
TypeInformation<?> asTypeInfo = fromDataTypeToTypeInfo(dataType);
if (asTypeInfo instanceof InternalTypeInfo && clazz == RowData.class) {
LogicalType realLogicalType = ((InternalTypeInfo<?>) asTypeInfo).toLogicalType();
return new RowDataConverter(getFieldCount(realLogicalType));
}
// legacy
CompositeType compositeType = (CompositeType) asTypeInfo;
DataType[] fieldTypes = Stream.iterate(0, x -> x + 1).limit(compositeType.getArity()).map((Function<Integer, TypeInformation>) compositeType::getTypeAt).map(TypeConversions::fromLegacyInfoToDataType).toArray(DataType[]::new);
if (clazz == RowData.class) {
return new RowDataConverter(compositeType.getArity());
} else if (clazz == Row.class) {
return new RowConverter(fieldTypes);
} else if (Tuple.class.isAssignableFrom(clazz)) {
return new TupleConverter((Class<Tuple>) clazz, fieldTypes);
} else if (CaseClassConverter.PRODUCT_CLASS != null && CaseClassConverter.PRODUCT_CLASS.isAssignableFrom(clazz)) {
return new CaseClassConverter((TupleTypeInfoBase) compositeType, fieldTypes);
} else if (compositeType instanceof PojoTypeInfo) {
return new PojoConverter((PojoTypeInfo) compositeType, fieldTypes);
} else {
throw new IllegalStateException("Cannot find a converter for type " + compositeType + ". If the target should be a converter to scala.Product, then you might have a scala classpath issue.");
}
case RAW:
if (logicalType instanceof RawType) {
final RawType<?> rawType = (RawType<?>) logicalType;
if (clazz == RawValueData.class) {
return RawValueDataConverter.INSTANCE;
} else {
return new GenericConverter<>(rawType.getTypeSerializer());
}
}
// legacy
TypeInformation typeInfo = logicalType instanceof LegacyTypeInformationType ? ((LegacyTypeInformationType) logicalType).getTypeInformation() : ((TypeInformationRawType) logicalType).getTypeInformation();
// planner type info
if (typeInfo instanceof StringDataTypeInfo) {
return StringDataConverter.INSTANCE;
} else if (typeInfo instanceof DecimalDataTypeInfo) {
DecimalDataTypeInfo decimalType = (DecimalDataTypeInfo) typeInfo;
return new DecimalDataConverter(decimalType.precision(), decimalType.scale());
} else if (typeInfo instanceof BigDecimalTypeInfo) {
BigDecimalTypeInfo decimalType = (BigDecimalTypeInfo) typeInfo;
return new BigDecimalConverter(decimalType.precision(), decimalType.scale());
} else if (typeInfo instanceof TimestampDataTypeInfo) {
TimestampDataTypeInfo timestampDataTypeInfo = (TimestampDataTypeInfo) typeInfo;
return new TimestampDataConverter(timestampDataTypeInfo.getPrecision());
} else if (typeInfo instanceof LegacyLocalDateTimeTypeInfo) {
LegacyLocalDateTimeTypeInfo dateTimeType = (LegacyLocalDateTimeTypeInfo) typeInfo;
return new LocalDateTimeConverter(dateTimeType.getPrecision());
} else if (typeInfo instanceof LegacyTimestampTypeInfo) {
LegacyTimestampTypeInfo timestampType = (LegacyTimestampTypeInfo) typeInfo;
return new TimestampConverter(timestampType.getPrecision());
} else if (typeInfo instanceof LegacyInstantTypeInfo) {
LegacyInstantTypeInfo instantTypeInfo = (LegacyInstantTypeInfo) typeInfo;
return new InstantConverter(instantTypeInfo.getPrecision());
}
if (clazz == RawValueData.class) {
return RawValueDataConverter.INSTANCE;
}
return new GenericConverter(typeInfo.createSerializer(new ExecutionConfig()));
default:
throw new RuntimeException("Not support dataType: " + dataType);
}
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeCasts method supportsStructuredCasting.
private static boolean supportsStructuredCasting(LogicalType sourceType, LogicalType targetType, BiFunction<LogicalType, LogicalType, Boolean> childPredicate) {
final LogicalTypeRoot sourceRoot = sourceType.getTypeRoot();
final LogicalTypeRoot targetRoot = targetType.getTypeRoot();
if (sourceRoot != STRUCTURED_TYPE || targetRoot != STRUCTURED_TYPE) {
return false;
}
final StructuredType sourceStructuredType = (StructuredType) sourceType;
final StructuredType targetStructuredType = (StructuredType) targetType;
// non-anonymous structured types must be fully equal
if (sourceStructuredType.getObjectIdentifier().isPresent() || targetStructuredType.getObjectIdentifier().isPresent()) {
return false;
}
// for anonymous structured types we are a bit more lenient, if they provide similar fields
// e.g. this is necessary when structured types derived from type information and
// structured types derived within Table API are slightly different
final Class<?> sourceClass = sourceStructuredType.getImplementationClass().orElse(null);
final Class<?> targetClass = targetStructuredType.getImplementationClass().orElse(null);
if (sourceClass != targetClass) {
return false;
}
final List<String> sourceNames = sourceStructuredType.getAttributes().stream().map(StructuredType.StructuredAttribute::getName).collect(Collectors.toList());
final List<String> targetNames = sourceStructuredType.getAttributes().stream().map(StructuredType.StructuredAttribute::getName).collect(Collectors.toList());
if (!sourceNames.equals(targetNames)) {
return false;
}
final List<LogicalType> sourceChildren = sourceType.getChildren();
final List<LogicalType> targetChildren = targetType.getChildren();
for (int i = 0; i < sourceChildren.size(); i++) {
if (!childPredicate.apply(sourceChildren.get(i), targetChildren.get(i))) {
return false;
}
}
return true;
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeCasts method supportsConstructedCasting.
private static boolean supportsConstructedCasting(LogicalType sourceType, LogicalType targetType, boolean allowExplicit) {
final LogicalTypeRoot sourceRoot = sourceType.getTypeRoot();
final LogicalTypeRoot targetRoot = targetType.getTypeRoot();
// however, rows can be converted to structured types and vice versa
if (sourceRoot == targetRoot || (sourceRoot == ROW && targetRoot == STRUCTURED_TYPE) || (sourceRoot == STRUCTURED_TYPE && targetRoot == ROW)) {
final List<LogicalType> sourceChildren = sourceType.getChildren();
final List<LogicalType> targetChildren = targetType.getChildren();
if (sourceChildren.size() != targetChildren.size()) {
return false;
}
for (int i = 0; i < sourceChildren.size(); i++) {
if (!supportsCasting(sourceChildren.get(i), targetChildren.get(i), allowExplicit)) {
return false;
}
}
return true;
}
return false;
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeMerging method findCommonRowType.
@Nullable
private static LogicalType findCommonRowType(List<LogicalType> normalizedTypes) {
final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes);
if (children == null) {
return null;
}
final RowType firstType = (RowType) normalizedTypes.get(0);
final List<RowType.RowField> newFields = IntStream.range(0, children.size()).mapToObj(pos -> {
final LogicalType newType = children.get(pos);
final RowType.RowField originalField = firstType.getFields().get(pos);
if (originalField.getDescription().isPresent()) {
return new RowType.RowField(originalField.getName(), newType, originalField.getDescription().get());
} else {
return new RowType.RowField(originalField.getName(), newType);
}
}).collect(Collectors.toList());
return new RowType(newFields);
}
Aggregations