use of org.apache.flink.table.types.logical.LegacyTypeInformationType in project flink by apache.
the class TableSchema method validateAndCreateNameToTypeMapping.
/**
* Creates a mapping from field name to data type, the field name can be a nested field. This is
* mainly used for validating whether the rowtime attribute (might be nested) exists in the
* schema. During creating, it also validates whether there is duplicate field names.
*
* <p>For example, a "f0" field of ROW type has two nested fields "q1" and "q2". Then the
* mapping will be ["f0" -> ROW, "f0.q1" -> INT, "f0.q2" -> STRING].
*
* <pre>{@code
* f0 ROW<q1 INT, q2 STRING>
* }</pre>
*
* @param fieldNameToType Field name to type mapping that to update
* @param fieldName Name of this field, e.g. "q1" or "q2" in the above example
* @param fieldType Data type of this field
* @param parentFieldName Field name of parent type, e.g. "f0" in the above example
*/
private static void validateAndCreateNameToTypeMapping(Map<String, LogicalType> fieldNameToType, String fieldName, LogicalType fieldType, String parentFieldName) {
String fullFieldName = parentFieldName.isEmpty() ? fieldName : parentFieldName + "." + fieldName;
LogicalType oldType = fieldNameToType.put(fullFieldName, fieldType);
if (oldType != null) {
throw new ValidationException("Field names must be unique. Duplicate field: '" + fullFieldName + "'");
}
if (isCompositeType(fieldType) && !(fieldType instanceof LegacyTypeInformationType)) {
final List<String> fieldNames = LogicalTypeChecks.getFieldNames(fieldType);
final List<LogicalType> fieldTypes = fieldType.getChildren();
IntStream.range(0, fieldNames.size()).forEach(i -> validateAndCreateNameToTypeMapping(fieldNameToType, fieldNames.get(i), fieldTypes.get(i), fullFieldName));
}
}
use of org.apache.flink.table.types.logical.LegacyTypeInformationType in project flink by apache.
the class TypeMappingUtilsTest method testCheckPhysicalLogicalTypeCompatible.
@Test
public void testCheckPhysicalLogicalTypeCompatible() {
TableSchema tableSchema = TableSchema.builder().field("a", DataTypes.VARCHAR(2)).field("b", DataTypes.DECIMAL(20, 2)).build();
TableSink tableSink = new TestTableSink(tableSchema);
LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType().getLogicalType();
TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1);
DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo);
ResolvedSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType);
DataType[] logicalDataTypes = tableSchema.getFieldDataTypes();
List<DataType> physicalDataTypes = physicSchema.getColumnDataTypes();
for (int i = 0; i < logicalDataTypes.length; i++) {
TypeMappingUtils.checkPhysicalLogicalTypeCompatible(physicalDataTypes.get(i).getLogicalType(), logicalDataTypes[i].getLogicalType(), "physicalField", "logicalField", false);
}
}
use of org.apache.flink.table.types.logical.LegacyTypeInformationType in project flink by apache.
the class DecimalDivideTypeStrategy method inferType.
@Override
public Optional<DataType> inferType(CallContext callContext) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final LogicalType dividend = argumentDataTypes.get(0).getLogicalType();
final LogicalType divisor = argumentDataTypes.get(1).getLogicalType();
// a hack to make legacy types possible until we drop them
if (dividend instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(0));
}
if (divisor instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(1));
}
if (!isDecimalComputation(dividend, divisor)) {
return Optional.empty();
}
final DecimalType decimalType = LogicalTypeMerging.findDivisionDecimalType(getPrecision(dividend), getScale(dividend), getPrecision(divisor), getScale(divisor));
return Optional.of(fromLogicalToDataType(decimalType));
}
use of org.apache.flink.table.types.logical.LegacyTypeInformationType in project flink by apache.
the class DecimalPlusTypeStrategy method inferType.
@Override
public Optional<DataType> inferType(CallContext callContext) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final LogicalType addend1 = argumentDataTypes.get(0).getLogicalType();
final LogicalType addend2 = argumentDataTypes.get(1).getLogicalType();
// a hack to make legacy types possible until we drop them
if (addend1 instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(0));
}
if (addend2 instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(1));
}
if (!isDecimalComputation(addend1, addend2)) {
return Optional.empty();
}
final DecimalType decimalType = LogicalTypeMerging.findAdditionDecimalType(getPrecision(addend1), getScale(addend1), getPrecision(addend2), getScale(addend2));
return Optional.of(fromLogicalToDataType(decimalType));
}
use of org.apache.flink.table.types.logical.LegacyTypeInformationType in project flink by apache.
the class CastInputTypeStrategy method inferInputTypes.
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
// check for type literal
if (!callContext.isArgumentLiteral(1) || !callContext.getArgumentValue(1, DataType.class).isPresent()) {
return Optional.empty();
}
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final LogicalType fromType = argumentDataTypes.get(0).getLogicalType();
final LogicalType toType = argumentDataTypes.get(1).getLogicalType();
// A hack to support legacy types. To be removed when we drop the legacy types.
if (fromType instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes);
}
if (!supportsExplicitCast(fromType, toType)) {
if (throwOnFailure) {
throw callContext.newValidationError("Unsupported cast from '%s' to '%s'.", fromType, toType);
}
return Optional.empty();
}
return Optional.of(argumentDataTypes);
}
Aggregations